From 500eb920e4498c0573ceadd22f65117fd55fd075 Mon Sep 17 00:00:00 2001 From: Manuel Hermann Date: Tue, 7 Aug 2012 17:04:03 +0200 Subject: [PATCH 001/464] Use info from getlasterror whether a document has been updated or created. --- mongoengine/document.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index f8bf769..bb5a60f 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -208,11 +208,20 @@ class Document(BaseDocument): actual_key = self._db_field_map.get(k, k) select_dict[actual_key] = doc[actual_key] + def is_new_object(last_error): + if last_error is not None: + updated = last_error.get("updatedExisting") + if updated is not None: + return not updated + return created + upsert = self._created if updates: - collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) + last_error = collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) + created = is_new_object(last_error) if removals: - collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) + last_error = collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) + created = created or is_new_object(last_error) cascade = self._meta.get('cascade', True) if cascade is None else cascade if cascade: From 6a31736644452ec6598dff8851f7847aef63da11 Mon Sep 17 00:00:00 2001 From: Luis Araujo Date: Wed, 26 Sep 2012 14:43:59 -0300 Subject: [PATCH 002/464] Initial support to Group and Permission. The /admin can't be exec login in MongoDB yet. Only SQLsDB (SQLite,...) This code work with django-mongoadmin pluggin. --- mongoengine/django/auth.py | 187 +++++++++++++++++++++++++++++++++++++ 1 file changed, 187 insertions(+) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index a30fc57..d9f0584 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -3,6 +3,8 @@ import datetime from mongoengine import * from django.utils.encoding import smart_str +from django.db import models +from django.contrib.contenttypes.models import ContentTypeManager from django.contrib.auth.models import AnonymousUser from django.utils.translation import ugettext_lazy as _ @@ -34,6 +36,191 @@ except ImportError: REDIRECT_FIELD_NAME = 'next' +class ContentType(Document): + name = StringField(max_length=100) + app_label = StringField(max_length=100) + model = StringField(max_length=100, verbose_name=_('python model class name'), + unique_with='app_label') + objects = ContentTypeManager() + + class Meta: + verbose_name = _('content type') + verbose_name_plural = _('content types') + # db_table = 'django_content_type' + # ordering = ('name',) + # unique_together = (('app_label', 'model'),) + + def __unicode__(self): + return self.name + + def model_class(self): + "Returns the Python model class for this type of content." + from django.db import models + return models.get_model(self.app_label, self.model) + + def get_object_for_this_type(self, **kwargs): + """ + Returns an object of this type for the keyword arguments given. + Basically, this is a proxy around this object_type's get_object() model + method. The ObjectNotExist exception, if thrown, will not be caught, + so code that calls this method should catch it. + """ + return self.model_class()._default_manager.using(self._state.db).get(**kwargs) + + def natural_key(self): + return (self.app_label, self.model) + +class SiteProfileNotAvailable(Exception): + pass + +class PermissionManager(models.Manager): + def get_by_natural_key(self, codename, app_label, model): + return self.get( + codename=codename, + content_type=ContentType.objects.get_by_natural_key(app_label, model) + ) + +class Permission(Document): + """The permissions system provides a way to assign permissions to specific users and groups of users. + + The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows: + + - The "add" permission limits the user's ability to view the "add" form and add an object. + - The "change" permission limits a user's ability to view the change list, view the "change" form and change an object. + - The "delete" permission limits the ability to delete an object. + + Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date." + + Three basic permissions -- add, change and delete -- are automatically created for each Django model. + """ + name = StringField(max_length=50, verbose_name=_('username')) + content_type = ReferenceField(ContentType) + codename = StringField(max_length=100, verbose_name=_('codename')) + # FIXME: don't access field of the other class + # unique_with=['content_type__app_label', 'content_type__model']) + + objects = PermissionManager() + + class Meta: + verbose_name = _('permission') + verbose_name_plural = _('permissions') + # unique_together = (('content_type', 'codename'),) + # ordering = ('content_type__app_label', 'content_type__model', 'codename') + + def __unicode__(self): + return u"%s | %s | %s" % ( + unicode(self.content_type.app_label), + unicode(self.content_type), + unicode(self.name)) + + def natural_key(self): + return (self.codename,) + self.content_type.natural_key() + natural_key.dependencies = ['contenttypes.contenttype'] + +class Group(Document): + """Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups. + + A user in a group automatically has all the permissions granted to that group. For example, if the group Site editors has the permission can_edit_home_page, any user in that group will have that permission. + + Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only e-mail messages. + """ + name = StringField(max_length=80, unique=True, verbose_name=_('name')) + # permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True) + permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) + + class Meta: + verbose_name = _('group') + verbose_name_plural = _('groups') + + def __unicode__(self): + return self.name + +class UserManager(models.Manager): + def create_user(self, username, email, password=None): + """ + Creates and saves a User with the given username, e-mail and password. + """ + now = datetime.datetime.now() + + # Normalize the address by lowercasing the domain part of the email + # address. + try: + email_name, domain_part = email.strip().split('@', 1) + except ValueError: + pass + else: + email = '@'.join([email_name, domain_part.lower()]) + + user = self.model(username=username, email=email, is_staff=False, + is_active=True, is_superuser=False, last_login=now, + date_joined=now) + + user.set_password(password) + user.save(using=self._db) + return user + + def create_superuser(self, username, email, password): + u = self.create_user(username, email, password) + u.is_staff = True + u.is_active = True + u.is_superuser = True + u.save(using=self._db) + return u + + def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'): + "Generates a random password with the given length and given allowed_chars" + # Note that default value of allowed_chars does not have "I" or letters + # that look like it -- just to avoid confusion. + from random import choice + return ''.join([choice(allowed_chars) for i in range(length)]) + + +# A few helper functions for common logic between User and AnonymousUser. +def _user_get_all_permissions(user, obj): + permissions = set() + anon = user.is_anonymous() + for backend in auth.get_backends(): + if not anon or backend.supports_anonymous_user: + if hasattr(backend, "get_all_permissions"): + if obj is not None: + if backend.supports_object_permissions: + permissions.update( + backend.get_all_permissions(user, obj) + ) + else: + permissions.update(backend.get_all_permissions(user)) + return permissions + + +def _user_has_perm(user, perm, obj): + anon = user.is_anonymous() + active = user.is_active + for backend in auth.get_backends(): + if (not active and not anon and backend.supports_inactive_user) or \ + (not anon or backend.supports_anonymous_user): + if hasattr(backend, "has_perm"): + if obj is not None: + if (backend.supports_object_permissions and + backend.has_perm(user, perm, obj)): + return True + else: + if backend.has_perm(user, perm): + return True + return False + + +def _user_has_module_perms(user, app_label): + anon = user.is_anonymous() + active = user.is_active + for backend in auth.get_backends(): + if (not active and not anon and backend.supports_inactive_user) or \ + (not anon or backend.supports_anonymous_user): + if hasattr(backend, "has_module_perms"): + if backend.has_module_perms(user, app_label): + return True + return False + + class User(Document): """A User document that aims to mirror most of the API specified by Django at http://docs.djangoproject.com/en/dev/topics/auth/#users From 3425574ddcd63d126bc96556e182cc0e7cf18bcb Mon Sep 17 00:00:00 2001 From: Luis Araujo Date: Thu, 27 Sep 2012 14:30:59 -0300 Subject: [PATCH 003/464] Adding, adjust and transplant more methods to auth.User model --- mongoengine/django/auth.py | 150 ++++++++++++++++++++++++++----------- 1 file changed, 105 insertions(+), 45 deletions(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index d9f0584..0b6ffb9 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -5,6 +5,7 @@ from mongoengine import * from django.utils.encoding import smart_str from django.db import models from django.contrib.contenttypes.models import ContentTypeManager +from django.contrib import auth from django.contrib.auth.models import AnonymousUser from django.utils.translation import ugettext_lazy as _ @@ -175,51 +176,6 @@ class UserManager(models.Manager): return ''.join([choice(allowed_chars) for i in range(length)]) -# A few helper functions for common logic between User and AnonymousUser. -def _user_get_all_permissions(user, obj): - permissions = set() - anon = user.is_anonymous() - for backend in auth.get_backends(): - if not anon or backend.supports_anonymous_user: - if hasattr(backend, "get_all_permissions"): - if obj is not None: - if backend.supports_object_permissions: - permissions.update( - backend.get_all_permissions(user, obj) - ) - else: - permissions.update(backend.get_all_permissions(user)) - return permissions - - -def _user_has_perm(user, perm, obj): - anon = user.is_anonymous() - active = user.is_active - for backend in auth.get_backends(): - if (not active and not anon and backend.supports_inactive_user) or \ - (not anon or backend.supports_anonymous_user): - if hasattr(backend, "has_perm"): - if obj is not None: - if (backend.supports_object_permissions and - backend.has_perm(user, perm, obj)): - return True - else: - if backend.has_perm(user, perm): - return True - return False - - -def _user_has_module_perms(user, app_label): - anon = user.is_anonymous() - active = user.is_active - for backend in auth.get_backends(): - if (not active and not anon and backend.supports_inactive_user) or \ - (not anon or backend.supports_anonymous_user): - if hasattr(backend, "has_module_perms"): - if backend.has_module_perms(user, app_label): - return True - return False - class User(Document): """A User document that aims to mirror most of the API specified by Django @@ -313,9 +269,111 @@ class User(Document): user.save() return user + def get_all_permissions(self, obj=None): + permissions = set() + anon = self.is_anonymous() + for backend in auth.get_backends(): + if not anon or backend.supports_anonymous_user: + if hasattr(backend, "get_all_permissions"): + if obj is not None: + if backend.supports_object_permissions: + permissions.update( + backend.get_all_permissions(user, obj) + ) + else: + permissions.update(backend.get_all_permissions(self)) + return permissions + def get_and_delete_messages(self): return [] + def has_perm(self, perm, obj=None): + anon = self.is_anonymous() + active = self.is_active + for backend in auth.get_backends(): + if (not active and not anon and backend.supports_inactive_user) or \ + (not anon or backend.supports_anonymous_user): + if hasattr(backend, "has_perm"): + if obj is not None: + if (backend.supports_object_permissions and + backend.has_perm(self, perm, obj)): + return True + else: + if backend.has_perm(self, perm): + return True + return False + + def has_perms(self, perm_list, obj=None): + """ + Returns True if the user has each of the specified permissions. + If object is passed, it checks if the user has all required perms + for this object. + """ + for perm in perm_list: + if not self.has_perm(perm, obj): + return False + return True + + def has_module_perms(self, app_label): + anon = self.is_anonymous() + active = self.is_active + for backend in auth.get_backends(): + if (not active and not anon and backend.supports_inactive_user) or \ + (not anon or backend.supports_anonymous_user): + if hasattr(backend, "has_module_perms"): + if backend.has_module_perms(self, app_label): + return True + return False + + def get_and_delete_messages(self): + messages = [] + for m in self.message_set.all(): + messages.append(m.message) + m.delete() + return messages + + def email_user(self, subject, message, from_email=None): + "Sends an e-mail to this User." + from django.core.mail import send_mail + send_mail(subject, message, from_email, [self.email]) + + def get_profile(self): + """ + Returns site-specific profile for this user. Raises + SiteProfileNotAvailable if this site does not allow profiles. + """ + if not hasattr(self, '_profile_cache'): + from django.conf import settings + if not getattr(settings, 'AUTH_PROFILE_MODULE', False): + raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO' + 'DULE in your project settings') + try: + app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') + except ValueError: + raise SiteProfileNotAvailable('app_label and model_name should' + ' be separated by a dot in the AUTH_PROFILE_MODULE set' + 'ting') + + try: + model = models.get_model(app_label, model_name) + if model is None: + raise SiteProfileNotAvailable('Unable to load the profile ' + 'model, check AUTH_PROFILE_MODULE in your project sett' + 'ings') + self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id) + self._profile_cache.user = self + except (ImportError, ImproperlyConfigured): + raise SiteProfileNotAvailable + return self._profile_cache + + def _get_message_set(self): + import warnings + warnings.warn('The user messaging API is deprecated. Please update' + ' your code to use the new messages framework.', + category=DeprecationWarning) + return self._message_set + message_set = property(_get_message_set) + class MongoEngineBackend(object): """Authenticate using MongoEngine and mongoengine.django.auth.User. @@ -329,6 +387,8 @@ class MongoEngineBackend(object): user = User.objects(username=username).first() if user: if password and user.check_password(password): + backend = auth.get_backends()[0] + user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__) return user return None From 0bfc96e459d19d188397db73fa557abbc8ecdc6d Mon Sep 17 00:00:00 2001 From: Luis Araujo Date: Thu, 27 Sep 2012 14:32:50 -0300 Subject: [PATCH 004/464] exposing mongoengine.django module --- mongoengine/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 9044e61..a4c56b8 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -8,6 +8,7 @@ import queryset from queryset import * import signals from signals import * +import django __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) From e4af0e361ab5b91fad7ca9dbc8f5ed631f0b2c77 Mon Sep 17 00:00:00 2001 From: Aleksey Porfirov Date: Mon, 15 Oct 2012 02:11:01 +0400 Subject: [PATCH 005/464] Add session expiration test (with django timezone support activated) --- tests/test_django.py | 45 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/tests/test_django.py b/tests/test_django.py index 398fd3e..3b0b04f 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -12,7 +12,7 @@ try: from django.conf import settings from django.core.paginator import Paginator - settings.configure() + settings.configure(USE_TZ=True) from django.contrib.sessions.tests import SessionTestsMixin from mongoengine.django.sessions import SessionStore, MongoSession @@ -24,6 +24,37 @@ except Exception, err: raise err +from datetime import tzinfo, timedelta +ZERO = timedelta(0) + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes = offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + + +def activate_timezone(tz): + """Activate Django timezone support if it is available. + """ + try: + from django.utils import timezone + timezone.deactivate() + timezone.activate(tz) + except ImportError: + pass + + class QuerySetTest(unittest.TestCase): def setUp(self): @@ -120,3 +151,15 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): session['test'] = True session.save() self.assertTrue('test' in session) + + def test_session_expiration_tz(self): + activate_timezone(FixedOffset(60, 'UTC+1')) + # create and save new session + session = SessionStore() + session.set_expiry(600) # expire in 600 seconds + session['test_expire'] = True + session.save() + # reload session with key + key = session.session_key + session = SessionStore(key) + self.assertTrue('test_expire' in session, 'Session has expired before it is expected') From 0a89899ad088e712b6bca54e32f30aae64ab2bf7 Mon Sep 17 00:00:00 2001 From: Aleksey Porfirov Date: Mon, 15 Oct 2012 02:13:52 +0400 Subject: [PATCH 006/464] Fix django timezone support --- mongoengine/django/auth.py | 9 ++++----- mongoengine/django/sessions.py | 6 +++--- mongoengine/django/utils.py | 6 ++++++ 3 files changed, 13 insertions(+), 8 deletions(-) create mode 100644 mongoengine/django/utils.py diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 65afacf..3776d54 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -1,5 +1,3 @@ -import datetime - from mongoengine import * from django.utils.encoding import smart_str @@ -33,6 +31,7 @@ except ImportError: hash = get_hexdigest(algo, salt, raw_password) return '%s$%s$%s' % (algo, salt, hash) +from .utils import datetime_now REDIRECT_FIELD_NAME = 'next' @@ -62,9 +61,9 @@ class User(Document): is_superuser = BooleanField(default=False, verbose_name=_('superuser status'), help_text=_("Designates that this user has all permissions without explicitly assigning them.")) - last_login = DateTimeField(default=datetime.datetime.now, + last_login = DateTimeField(default=datetime_now, verbose_name=_('last login')) - date_joined = DateTimeField(default=datetime.datetime.now, + date_joined = DateTimeField(default=datetime_now, verbose_name=_('date joined')) meta = { @@ -130,7 +129,7 @@ class User(Document): """Create (and save) a new user with the given username, password and email address. """ - now = datetime.datetime.now() + now = datetime_now() # Normalize the address by lowercasing the domain part of the email # address. diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index f178342..6e964a7 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -1,5 +1,3 @@ -from datetime import datetime - from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError from django.core.exceptions import SuspiciousOperation @@ -10,6 +8,8 @@ from mongoengine import fields from mongoengine.queryset import OperationError from mongoengine.connection import DEFAULT_CONNECTION_NAME +from .utils import datetime_now + MONGOENGINE_SESSION_DB_ALIAS = getattr( settings, 'MONGOENGINE_SESSION_DB_ALIAS', @@ -33,7 +33,7 @@ class SessionStore(SessionBase): def load(self): try: s = MongoSession.objects(session_key=self.session_key, - expire_date__gt=datetime.now())[0] + expire_date__gt=datetime_now())[0] return self.decode(force_unicode(s.session_data)) except (IndexError, SuspiciousOperation): self.create() diff --git a/mongoengine/django/utils.py b/mongoengine/django/utils.py new file mode 100644 index 0000000..d3ef8a4 --- /dev/null +++ b/mongoengine/django/utils.py @@ -0,0 +1,6 @@ +try: + # django >= 1.4 + from django.utils.timezone import now as datetime_now +except ImportError: + from datetime import datetime + datetime_now = datetime.now From 6f29d1238642d395b197645f6770daeab42495d3 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 9 Oct 2012 15:02:13 +0000 Subject: [PATCH 007/464] Changed the inheritance model to remove types The inheritance model has changed, we no longer need to store an array of `types` with the model we can just use the classname in `_cls`. See the upgrade docs for information on how to upgrade MongoEngine/mongoengine#148 --- docs/changelog.rst | 5 + docs/guide/defining-documents.rst | 23 +- docs/upgrade.rst | 39 + mongoengine/__init__.py | 6 +- mongoengine/base.py | 1523 -------------- mongoengine/base/__init__.py | 5 + mongoengine/base/common.py | 25 + mongoengine/base/datastructures.py | 124 ++ mongoengine/base/document.py | 644 ++++++ mongoengine/base/fields.py | 371 ++++ mongoengine/base/metaclasses.py | 388 ++++ mongoengine/common.py | 35 + mongoengine/dereference.py | 2 +- mongoengine/django/shortcuts.py | 2 +- mongoengine/document.py | 28 +- mongoengine/errors.py | 124 ++ mongoengine/fields.py | 6 +- mongoengine/queryset/__init__.py | 11 + mongoengine/queryset/field_list.py | 51 + mongoengine/queryset/manager.py | 61 + mongoengine/{ => queryset}/queryset.py | 818 +------- mongoengine/queryset/transform.py | 237 +++ mongoengine/queryset/visitor.py | 237 +++ setup.cfg | 2 +- tests/__init__.py | 2 + .../__init__.py} | 12 +- tests/document/__init__.py | 11 + tests/document/class_methods.py | 183 ++ tests/document/delta.py | 688 +++++++ tests/document/dynamic.py | 270 +++ tests/document/indexes.py | 637 ++++++ tests/document/inheritance.py | 395 ++++ .../instance.py} | 1752 +---------------- tests/{ => document}/mongoengine.png | Bin tests/migration/__init__.py | 4 + .../test_convert_to_new_inheritance_model.py | 51 + tests/migration/turn_off_inheritance.py | 62 + tests/test_dynamic_document.py | 533 ----- tests/test_fields.py | 5 +- tests/test_queryset.py | 106 +- 40 files changed, 4856 insertions(+), 4622 deletions(-) delete mode 100644 mongoengine/base.py create mode 100644 mongoengine/base/__init__.py create mode 100644 mongoengine/base/common.py create mode 100644 mongoengine/base/datastructures.py create mode 100644 mongoengine/base/document.py create mode 100644 mongoengine/base/fields.py create mode 100644 mongoengine/base/metaclasses.py create mode 100644 mongoengine/common.py create mode 100644 mongoengine/errors.py create mode 100644 mongoengine/queryset/__init__.py create mode 100644 mongoengine/queryset/field_list.py create mode 100644 mongoengine/queryset/manager.py rename mongoengine/{ => queryset}/queryset.py (59%) create mode 100644 mongoengine/queryset/transform.py create mode 100644 mongoengine/queryset/visitor.py rename tests/{test_all_warnings.py => all_warnings/__init__.py} (91%) create mode 100644 tests/document/__init__.py create mode 100644 tests/document/class_methods.py create mode 100644 tests/document/delta.py create mode 100644 tests/document/dynamic.py create mode 100644 tests/document/indexes.py create mode 100644 tests/document/inheritance.py rename tests/{test_document.py => document/instance.py} (50%) rename tests/{ => document}/mongoengine.png (100%) create mode 100644 tests/migration/__init__.py create mode 100644 tests/migration/test_convert_to_new_inheritance_model.py create mode 100644 tests/migration/turn_off_inheritance.py delete mode 100644 tests/test_dynamic_document.py diff --git a/docs/changelog.rst b/docs/changelog.rst index b2a855d..8388b05 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,11 @@ Changelog ========= +Changes in 0.8 +============== +- Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) + + Changes in 0.7.X ================ - Unicode fix for repr (MongoEngine/mongoengine#133) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 3ee7796..cf3b5a6 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -461,9 +461,10 @@ If a dictionary is passed then the following options are available: :attr:`fields` (Default: None) The fields to index. Specified in the same format as described above. -:attr:`types` (Default: True) - Whether the index should have the :attr:`_types` field added automatically - to the start of the index. +:attr:`cls` (Default: True) + If you have polymorphic models that inherit and have `allow_inheritance` + turned on, you can configure whether the index should have the + :attr:`_cls` field added automatically to the start of the index. :attr:`sparse` (Default: False) Whether the index should be sparse. @@ -590,14 +591,14 @@ convenient and efficient retrieval of related documents:: Working with existing data -------------------------- To enable correct retrieval of documents involved in this kind of heirarchy, -two extra attributes are stored on each document in the database: :attr:`_cls` -and :attr:`_types`. These are hidden from the user through the MongoEngine -interface, but may not be present if you are trying to use MongoEngine with -an existing database. For this reason, you may disable this inheritance -mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling -you to work with existing databases. To disable inheritance on a document -class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` -dictionary:: +an extra attribute is stored on each document in the database: :attr:`_cls`. +These are hidden from the user through the MongoEngine interface, but may not +be present if you are trying to use MongoEngine with an existing database. + +For this reason, you may disable this inheritance mechansim, removing the +dependency of :attr:`_cls`, enabling you to work with existing databases. +To disable inheritance on a document class, set :attr:`allow_inheritance` to +``False`` in the :attr:`meta` dictionary:: # Will work with data in an existing collection named 'cmsPage' class Page(Document): diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 82ac7ca..99e3078 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -2,6 +2,45 @@ Upgrading ========= +0.7 to 0.8 +========== + +Inheritance +----------- + +The inheritance model has changed, we no longer need to store an array of +`types` with the model we can just use the classname in `_cls`. This means +that you will have to update your indexes for each of your inherited classes +like so: + + # 1. Declaration of the class + class Animal(Document): + name = StringField() + meta = { + 'allow_inheritance': True, + 'indexes': ['name'] + } + + # 2. Remove _types + collection = Animal._get_collection() + collection.update({}, {"$unset": {"_types": 1}}, multi=True) + + # 3. Confirm extra data is removed + count = collection.find({'_types': {"$exists": True}}).count() + assert count == 0 + + # 4. Remove indexes + info = collection.index_information() + indexes_to_drop = [key for key, value in info.iteritems() + if '_types' in dict(value['key'])] + for index in indexes_to_drop: + collection.drop_index(index) + + # 5. Recreate indexes + Animal.objects._ensure_indexes() + + + 0.6 to 0.7 ========== diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 9044e61..d92165c 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -9,10 +9,10 @@ from queryset import * import signals from signals import * -__all__ = (document.__all__ + fields.__all__ + connection.__all__ + - queryset.__all__ + signals.__all__) +__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + + list(queryset.__all__) + signals.__all__) -VERSION = (0, 7, 5) +VERSION = (0, 8, 0, '+') def get_version(): diff --git a/mongoengine/base.py b/mongoengine/base.py deleted file mode 100644 index fa12e35..0000000 --- a/mongoengine/base.py +++ /dev/null @@ -1,1523 +0,0 @@ -import operator -import sys -import warnings -import weakref - -from collections import defaultdict -from functools import partial - -from queryset import QuerySet, QuerySetManager -from queryset import DoesNotExist, MultipleObjectsReturned -from queryset import DO_NOTHING - -from mongoengine import signals -from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type, - to_str_keys_recursive) - -import pymongo -from bson import ObjectId -from bson.dbref import DBRef - -ALLOW_INHERITANCE = True - -_document_registry = {} -_class_registry = {} - - -class NotRegistered(Exception): - pass - - -class InvalidDocumentError(Exception): - pass - - -class ValidationError(AssertionError): - """Validation exception. - - May represent an error validating a field or a - document containing fields with validation errors. - - :ivar errors: A dictionary of errors for fields within this - document or list, or None if the error is for an - individual field. - """ - - errors = {} - field_name = None - _message = None - - def __init__(self, message="", **kwargs): - self.errors = kwargs.get('errors', {}) - self.field_name = kwargs.get('field_name') - self.message = message - - def __str__(self): - return txt_type(self.message) - - def __repr__(self): - return '%s(%s,)' % (self.__class__.__name__, self.message) - - def __getattribute__(self, name): - message = super(ValidationError, self).__getattribute__(name) - if name == 'message': - if self.field_name: - message = '%s' % message - if self.errors: - message = '%s(%s)' % (message, self._format_errors()) - return message - - def _get_message(self): - return self._message - - def _set_message(self, message): - self._message = message - - message = property(_get_message, _set_message) - - def to_dict(self): - """Returns a dictionary of all errors within a document - - Keys are field names or list indices and values are the - validation error messages, or a nested dictionary of - errors for an embedded document or list. - """ - - def build_dict(source): - errors_dict = {} - if not source: - return errors_dict - if isinstance(source, dict): - for field_name, error in source.iteritems(): - errors_dict[field_name] = build_dict(error) - elif isinstance(source, ValidationError) and source.errors: - return build_dict(source.errors) - else: - return unicode(source) - return errors_dict - if not self.errors: - return {} - return build_dict(self.errors) - - def _format_errors(self): - """Returns a string listing all errors within a document""" - - def generate_key(value, prefix=''): - if isinstance(value, list): - value = ' '.join([generate_key(k) for k in value]) - if isinstance(value, dict): - value = ' '.join( - [generate_key(v, k) for k, v in value.iteritems()]) - - results = "%s.%s" % (prefix, value) if prefix else value - return results - - error_dict = defaultdict(list) - for k, v in self.to_dict().iteritems(): - error_dict[generate_key(v)].append(k) - return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) - - -def get_document(name): - doc = _document_registry.get(name, None) - if not doc: - # Possible old style names - end = ".%s" % name - possible_match = [k for k in _document_registry.keys() - if k.endswith(end)] - if len(possible_match) == 1: - doc = _document_registry.get(possible_match.pop(), None) - if not doc: - raise NotRegistered(""" - `%s` has not been registered in the document registry. - Importing the document class automatically registers it, has it - been imported? - """.strip() % name) - return doc - - -class BaseField(object): - """A base class for fields in a MongoDB document. Instances of this class - may be added to subclasses of `Document` to define a document's schema. - - .. versionchanged:: 0.5 - added verbose and help text - """ - - name = None - - # Fields may have _types inserted into indexes by default - _index_with_types = True - _geo_index = False - - # These track each time a Field instance is created. Used to retain order. - # The auto_creation_counter is used for fields that MongoEngine implicitly - # creates, creation_counter is used for all user-specified fields. - creation_counter = 0 - auto_creation_counter = -1 - - def __init__(self, db_field=None, name=None, required=False, default=None, - unique=False, unique_with=None, primary_key=False, - validation=None, choices=None, verbose_name=None, - help_text=None): - self.db_field = (db_field or name) if not primary_key else '_id' - if name: - msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" - warnings.warn(msg, DeprecationWarning) - self.name = None - self.required = required or primary_key - self.default = default - self.unique = bool(unique or unique_with) - self.unique_with = unique_with - self.primary_key = primary_key - self.validation = validation - self.choices = choices - self.verbose_name = verbose_name - self.help_text = help_text - - # Adjust the appropriate creation counter, and save our local copy. - if self.db_field == '_id': - self.creation_counter = BaseField.auto_creation_counter - BaseField.auto_creation_counter -= 1 - else: - self.creation_counter = BaseField.creation_counter - BaseField.creation_counter += 1 - - def __get__(self, instance, owner): - """Descriptor for retrieving a value from a field in a document. Do - any necessary conversion between Python and MongoDB types. - """ - if instance is None: - # Document class being used rather than a document object - return self - - # Get value from document instance if available, if not use default - value = instance._data.get(self.name) - - if value is None: - value = self.default - # Allow callable default values - if callable(value): - value = value() - - return value - - def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - instance._data[self.name] = value - if instance._initialised: - instance._mark_as_changed(self.name) - - def error(self, message="", errors=None, field_name=None): - """Raises a ValidationError. - """ - field_name = field_name if field_name else self.name - raise ValidationError(message, errors=errors, field_name=field_name) - - def to_python(self, value): - """Convert a MongoDB-compatible type to a Python type. - """ - return value - - def to_mongo(self, value): - """Convert a Python type to a MongoDB-compatible type. - """ - return self.to_python(value) - - def prepare_query_value(self, op, value): - """Prepare a value that is being used in a query for PyMongo. - """ - return value - - def validate(self, value): - """Perform validation on a value. - """ - pass - - def _validate(self, value): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - # check choices - if self.choices: - is_cls = isinstance(value, (Document, EmbeddedDocument)) - value_to_check = value.__class__ if is_cls else value - err_msg = 'an instance' if is_cls else 'one' - if isinstance(self.choices[0], (list, tuple)): - option_keys = [k for k, v in self.choices] - if value_to_check not in option_keys: - msg = ('Value must be %s of %s' % - (err_msg, unicode(option_keys))) - self.error(msg) - elif value_to_check not in self.choices: - msg = ('Value must be %s of %s' % - (err_msg, unicode(self.choices))) - self.error() - - # check validation argument - if self.validation is not None: - if callable(self.validation): - if not self.validation(value): - self.error('Value does not match custom validation method') - else: - raise ValueError('validation argument for "%s" must be a ' - 'callable.' % self.name) - - self.validate(value) - - -class ComplexBaseField(BaseField): - """Handles complex fields, such as lists / dictionaries. - - Allows for nesting of embedded documents inside complex types. - Handles the lazy dereferencing of a queryset by lazily dereferencing all - items in a list / dict rather than one at a time. - - .. versionadded:: 0.5 - """ - - field = None - __dereference = False - - def __get__(self, instance, owner): - """Descriptor to automatically dereference references. - """ - if instance is None: - # Document class being used rather than a document object - return self - - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - dereference = self.field is None or isinstance(self.field, - (GenericReferenceField, ReferenceField)) - if not self._dereference and instance._initialised and dereference: - instance._data[self.name] = self._dereference( - instance._data.get(self.name), max_depth=1, instance=instance, - name=self.name - ) - - value = super(ComplexBaseField, self).__get__(instance, owner) - - # Convert lists / values so we can watch for any changes on them - if (isinstance(value, (list, tuple)) and - not isinstance(value, BaseList)): - value = BaseList(value, instance, self.name) - instance._data[self.name] = value - elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, instance, self.name) - instance._data[self.name] = value - - if (instance._initialised and isinstance(value, (BaseList, BaseDict)) - and not value._dereferenced): - value = self._dereference( - value, max_depth=1, instance=instance, name=self.name - ) - value._dereferenced = True - instance._data[self.name] = value - - return value - - def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - instance._data[self.name] = value - instance._mark_as_changed(self.name) - - def to_python(self, value): - """Convert a MongoDB-compatible type to a Python type. - """ - Document = _import_class('Document') - - if isinstance(value, basestring): - return value - - if hasattr(value, 'to_python'): - return value.to_python() - - is_list = False - if not hasattr(value, 'items'): - try: - is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) - except TypeError: # Not iterable return the value - return value - - if self.field: - value_dict = dict([(key, self.field.to_python(item)) - for key, item in value.items()]) - else: - value_dict = {} - for k, v in value.items(): - if isinstance(v, Document): - # We need the id from the saved object to create the DBRef - if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') - collection = v._get_collection_name() - value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_python'): - value_dict[k] = v.to_python() - else: - value_dict[k] = self.to_python(v) - - if is_list: # Convert back to a list - return [v for k, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] - return value_dict - - def to_mongo(self, value): - """Convert a Python type to a MongoDB-compatible type. - """ - Document = _import_class("Document") - - if isinstance(value, basestring): - return value - - if hasattr(value, 'to_mongo'): - return value.to_mongo() - - is_list = False - if not hasattr(value, 'items'): - try: - is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) - except TypeError: # Not iterable return the value - return value - - if self.field: - value_dict = dict([(key, self.field.to_mongo(item)) - for key, item in value.items()]) - else: - value_dict = {} - for k, v in value.items(): - if isinstance(v, Document): - # We need the id from the saved object to create the DBRef - if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') - - # If its a document that is not inheritable it won't have - # _types / _cls data so make it a generic reference allows - # us to dereference - meta = getattr(v, '_meta', {}) - allow_inheritance = ( - meta.get('allow_inheritance', ALLOW_INHERITANCE) - == False) - if allow_inheritance and not self.field: - GenericReferenceField = _import_class("GenericReferenceField") - value_dict[k] = GenericReferenceField().to_mongo(v) - else: - collection = v._get_collection_name() - value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_mongo'): - value_dict[k] = v.to_mongo() - else: - value_dict[k] = self.to_mongo(v) - - if is_list: # Convert back to a list - return [v for k, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] - return value_dict - - def validate(self, value): - """If field is provided ensure the value is valid. - """ - errors = {} - if self.field: - if hasattr(value, 'iteritems') or hasattr(value, 'items'): - sequence = value.iteritems() - else: - sequence = enumerate(value) - for k, v in sequence: - try: - self.field._validate(v) - except ValidationError, error: - errors[k] = error.errors or error - except (ValueError, AssertionError), error: - errors[k] = error - - if errors: - field_class = self.field.__class__.__name__ - self.error('Invalid %s item (%s)' % (field_class, value), - errors=errors) - # Don't allow empty values if required - if self.required and not value: - self.error('Field is required and cannot be empty') - - def prepare_query_value(self, op, value): - return self.to_mongo(value) - - def lookup_member(self, member_name): - if self.field: - return self.field.lookup_member(member_name) - return None - - def _set_owner_document(self, owner_document): - if self.field: - self.field.owner_document = owner_document - self._owner_document = owner_document - - def _get_owner_document(self, owner_document): - self._owner_document = owner_document - - owner_document = property(_get_owner_document, _set_owner_document) - - @property - def _dereference(self,): - if not self.__dereference: - DeReference = _import_class("DeReference") - self.__dereference = DeReference() # Cached - return self.__dereference - - -class ObjectIdField(BaseField): - """An field wrapper around MongoDB's ObjectIds. - """ - - def to_python(self, value): - if not isinstance(value, ObjectId): - value = ObjectId(value) - return value - - def to_mongo(self, value): - if not isinstance(value, ObjectId): - try: - return ObjectId(unicode(value)) - except Exception, e: - # e.message attribute has been deprecated since Python 2.6 - self.error(unicode(e)) - return value - - def prepare_query_value(self, op, value): - return self.to_mongo(value) - - def validate(self, value): - try: - ObjectId(unicode(value)) - except: - self.error('Invalid Object ID') - - -class DocumentMetaclass(type): - """Metaclass for all documents. - """ - - def __new__(cls, name, bases, attrs): - flattened_bases = cls._get_bases(bases) - super_new = super(DocumentMetaclass, cls).__new__ - - # If a base class just call super - metaclass = attrs.get('my_metaclass') - if metaclass and issubclass(metaclass, DocumentMetaclass): - return super_new(cls, name, bases, attrs) - - attrs['_is_document'] = attrs.get('_is_document', False) - - # EmbeddedDocuments could have meta data for inheritance - if 'meta' in attrs: - attrs['_meta'] = attrs.pop('meta') - - # Handle document Fields - - # Merge all fields from subclasses - doc_fields = {} - for base in flattened_bases[::-1]: - if hasattr(base, '_fields'): - doc_fields.update(base._fields) - - # Standard object mixin - merge in any Fields - if not hasattr(base, '_meta'): - base_fields = {} - for attr_name, attr_value in base.__dict__.iteritems(): - if not isinstance(attr_value, BaseField): - continue - attr_value.name = attr_name - if not attr_value.db_field: - attr_value.db_field = attr_name - base_fields[attr_name] = attr_value - doc_fields.update(base_fields) - - # Discover any document fields - field_names = {} - for attr_name, attr_value in attrs.iteritems(): - if not isinstance(attr_value, BaseField): - continue - attr_value.name = attr_name - if not attr_value.db_field: - attr_value.db_field = attr_name - doc_fields[attr_name] = attr_value - - # Count names to ensure no db_field redefinitions - field_names[attr_value.db_field] = field_names.get( - attr_value.db_field, 0) + 1 - - # Ensure no duplicate db_fields - duplicate_db_fields = [k for k, v in field_names.items() if v > 1] - if duplicate_db_fields: - msg = ("Multiple db_fields defined for: %s " % - ", ".join(duplicate_db_fields)) - raise InvalidDocumentError(msg) - - # Set _fields and db_field maps - attrs['_fields'] = doc_fields - attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) - for k, v in doc_fields.iteritems()]) - attrs['_reverse_db_field_map'] = dict( - (v, k) for k, v in attrs['_db_field_map'].iteritems()) - - # - # Set document hierarchy - # - superclasses = {} - class_name = [name] - for base in flattened_bases: - if (not getattr(base, '_is_base_cls', True) and - not getattr(base, '_meta', {}).get('abstract', True)): - # Collate heirarchy for _cls and _types - class_name.append(base.__name__) - - # Get superclasses from superclass - superclasses[base._class_name] = base - superclasses.update(base._superclasses) - - if hasattr(base, '_meta'): - # Warn if allow_inheritance isn't set and prevent - # inheritance of classes where inheritance is set to False - allow_inheritance = base._meta.get('allow_inheritance', - ALLOW_INHERITANCE) - if (not getattr(base, '_is_base_cls', True) - and allow_inheritance is None): - warnings.warn( - "%s uses inheritance, the default for " - "allow_inheritance is changing to off by default. " - "Please add it to the document meta." % name, - FutureWarning - ) - elif (allow_inheritance == False and - not base._meta.get('abstract')): - raise ValueError('Document %s may not be subclassed' % - base.__name__) - - attrs['_class_name'] = '.'.join(reversed(class_name)) - attrs['_superclasses'] = superclasses - - # Create the new_class - new_class = super_new(cls, name, bases, attrs) - - # Handle delete rules - Document, EmbeddedDocument, DictField = cls._import_classes() - for field in new_class._fields.itervalues(): - f = field - f.owner_document = new_class - delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) - if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): - delete_rule = getattr(f.field, - 'reverse_delete_rule', - DO_NOTHING) - if isinstance(f, DictField) and delete_rule != DO_NOTHING: - msg = ("Reverse delete rules are not supported " - "for %s (field: %s)" % - (field.__class__.__name__, field.name)) - raise InvalidDocumentError(msg) - - f = field.field - - if delete_rule != DO_NOTHING: - if issubclass(new_class, EmbeddedDocument): - msg = ("Reverse delete rules are not supported for " - "EmbeddedDocuments (field: %s)" % field.name) - raise InvalidDocumentError(msg) - f.document_type.register_delete_rule(new_class, - field.name, delete_rule) - - if (field.name and hasattr(Document, field.name) and - EmbeddedDocument not in new_class.mro()): - msg = ("%s is a document method and not a valid " - "field name" % field.name) - raise InvalidDocumentError(msg) - - # Add class to the _document_registry - _document_registry[new_class._class_name] = new_class - - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - if PY3: - for key, val in new_class.__dict__.items(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, '__func__') and not hasattr(f, 'im_func'): - f.__dict__.update({'im_func': getattr(f, '__func__')}) - if hasattr(f, '__self__') and not hasattr(f, 'im_self'): - f.__dict__.update({'im_self': getattr(f, '__self__')}) - - return new_class - - def add_to_class(self, name, value): - setattr(self, name, value) - - @classmethod - def _get_bases(cls, bases): - if isinstance(bases, BasesTuple): - return bases - seen = [] - bases = cls.__get_bases(bases) - unique_bases = (b for b in bases if not (b in seen or seen.append(b))) - return BasesTuple(unique_bases) - - @classmethod - def __get_bases(cls, bases): - for base in bases: - if base is object: - continue - yield base - for child_base in cls.__get_bases(base.__bases__): - yield child_base - - @classmethod - def _import_classes(cls): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - DictField = _import_class('DictField') - return (Document, EmbeddedDocument, DictField) - - -class TopLevelDocumentMetaclass(DocumentMetaclass): - """Metaclass for top-level documents (i.e. documents that have their own - collection in the database. - """ - - def __new__(cls, name, bases, attrs): - flattened_bases = cls._get_bases(bases) - super_new = super(TopLevelDocumentMetaclass, cls).__new__ - - # Set default _meta data if base class, otherwise get user defined meta - if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass): - # defaults - attrs['_meta'] = { - 'abstract': True, - 'max_documents': None, - 'max_size': None, - 'ordering': [], # default ordering applied at runtime - 'indexes': [], # indexes to be ensured at runtime - 'id_field': None, - 'index_background': False, - 'index_drop_dups': False, - 'index_opts': None, - 'delete_rules': None, - 'allow_inheritance': None, - } - attrs['_is_base_cls'] = True - attrs['_meta'].update(attrs.get('meta', {})) - else: - attrs['_meta'] = attrs.get('meta', {}) - # Explictly set abstract to false unless set - attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) - attrs['_is_base_cls'] = False - - # Set flag marking as document class - as opposed to an object mixin - attrs['_is_document'] = True - - # Ensure queryset_class is inherited - if 'objects' in attrs: - manager = attrs['objects'] - if hasattr(manager, 'queryset_class'): - attrs['_meta']['queryset_class'] = manager.queryset_class - - # Clean up top level meta - if 'meta' in attrs: - del(attrs['meta']) - - # Find the parent document class - parent_doc_cls = [b for b in flattened_bases - if b.__class__ == TopLevelDocumentMetaclass] - parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] - - # Prevent classes setting collection different to their parents - # If parent wasn't an abstract class - if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) - and not parent_doc_cls._meta.get('abstract', True)): - msg = "Trying to set a collection on a subclass (%s)" % name - warnings.warn(msg, SyntaxWarning) - del(attrs['_meta']['collection']) - - # Ensure abstract documents have abstract bases - if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): - if (parent_doc_cls and - not parent_doc_cls._meta.get('abstract', False)): - msg = "Abstract document cannot have non-abstract base" - raise ValueError(msg) - return super_new(cls, name, bases, attrs) - - # Merge base class metas. - # Uses a special MetaDict that handles various merging rules - meta = MetaDict() - for base in flattened_bases[::-1]: - # Add any mixin metadata from plain objects - if hasattr(base, 'meta'): - meta.merge(base.meta) - elif hasattr(base, '_meta'): - meta.merge(base._meta) - - # Set collection in the meta if its callable - if (getattr(base, '_is_document', False) and - not base._meta.get('abstract')): - collection = meta.get('collection', None) - if callable(collection): - meta['collection'] = collection(base) - - meta.merge(attrs.get('_meta', {})) # Top level meta - - # Only simple classes (direct subclasses of Document) - # may set allow_inheritance to False - simple_class = all([b._meta.get('abstract') - for b in flattened_bases if hasattr(b, '_meta')]) - if (not simple_class and meta['allow_inheritance'] == False and - not meta['abstract']): - raise ValueError('Only direct subclasses of Document may set ' - '"allow_inheritance" to False') - - # Set default collection name - if 'collection' not in meta: - meta['collection'] = ''.join('_%s' % c if c.isupper() else c - for c in name).strip('_').lower() - attrs['_meta'] = meta - - # Call super and get the new class - new_class = super_new(cls, name, bases, attrs) - - meta = new_class._meta - - # Set index specifications - meta['index_specs'] = [QuerySet._build_index_spec(new_class, spec) - for spec in meta['indexes']] - unique_indexes = cls._unique_with_indexes(new_class) - new_class._meta['unique_indexes'] = unique_indexes - - # If collection is a callable - call it and set the value - collection = meta.get('collection') - if callable(collection): - new_class._meta['collection'] = collection(new_class) - - # Provide a default queryset unless one has been set - manager = attrs.get('objects', QuerySetManager()) - new_class.objects = manager - - # Validate the fields and set primary key if needed - for field_name, field in new_class._fields.iteritems(): - if field.primary_key: - # Ensure only one primary key is set - current_pk = new_class._meta.get('id_field') - if current_pk and current_pk != field_name: - raise ValueError('Cannot override primary key field') - - # Set primary key - if not current_pk: - new_class._meta['id_field'] = field_name - new_class.id = field - - # Set primary key if not defined by the document - if not new_class._meta.get('id_field'): - new_class._meta['id_field'] = 'id' - new_class._fields['id'] = ObjectIdField(db_field='_id') - new_class.id = new_class._fields['id'] - - # Merge in exceptions with parent hierarchy - exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) - module = attrs.get('__module__') - for exc in exceptions_to_merge: - name = exc.__name__ - parents = tuple(getattr(base, name) for base in flattened_bases - if hasattr(base, name)) or (exc,) - # Create new exception and set to new_class - exception = type(name, parents, {'__module__': module}) - setattr(new_class, name, exception) - - return new_class - - @classmethod - def _unique_with_indexes(cls, new_class, namespace=""): - """ - Find and set unique indexes - """ - unique_indexes = [] - for field_name, field in new_class._fields.items(): - # Generate a list of indexes needed by uniqueness constraints - if field.unique: - field.required = True - unique_fields = [field.db_field] - - # Add any unique_with fields to the back of the index spec - if field.unique_with: - if isinstance(field.unique_with, basestring): - field.unique_with = [field.unique_with] - - # Convert unique_with field names to real field names - unique_with = [] - for other_name in field.unique_with: - parts = other_name.split('.') - # Lookup real name - parts = QuerySet._lookup_field(new_class, parts) - name_parts = [part.db_field for part in parts] - unique_with.append('.'.join(name_parts)) - # Unique field should be required - parts[-1].required = True - unique_fields += unique_with - - # Add the new index to the list - index = [("%s%s" % (namespace, f), pymongo.ASCENDING) - for f in unique_fields] - unique_indexes.append(index) - - # Grab any embedded document field unique indexes - if (field.__class__.__name__ == "EmbeddedDocumentField" and - field.document_type != new_class): - field_namespace = "%s." % field_name - unique_indexes += cls._unique_with_indexes(field.document_type, - field_namespace) - - return unique_indexes - - -class MetaDict(dict): - """Custom dictionary for meta classes. - Handles the merging of set indexes - """ - _merge_options = ('indexes',) - - def merge(self, new_options): - for k, v in new_options.iteritems(): - if k in self._merge_options: - self[k] = self.get(k, []) + v - else: - self[k] = v - - -class BaseDocument(object): - - _dynamic = False - _created = True - _dynamic_lock = True - _initialised = False - - def __init__(self, **values): - signals.pre_init.send(self.__class__, document=self, values=values) - - self._data = {} - - # Assign default values to instance - for key, field in self._fields.iteritems(): - if self._db_field_map.get(key, key) in values: - continue - value = getattr(self, key, None) - setattr(self, key, value) - - # Set passed values after initialisation - if self._dynamic: - self._dynamic_fields = {} - dynamic_data = {} - for key, value in values.iteritems(): - if key in self._fields or key == '_id': - setattr(self, key, value) - elif self._dynamic: - dynamic_data[key] = value - else: - for key, value in values.iteritems(): - key = self._reverse_db_field_map.get(key, key) - setattr(self, key, value) - - # Set any get_fieldname_display methods - self.__set_field_display() - - if self._dynamic: - self._dynamic_lock = False - for key, value in dynamic_data.iteritems(): - setattr(self, key, value) - - # Flag initialised - self._initialised = True - signals.post_init.send(self.__class__, document=self) - - def __setattr__(self, name, value): - # Handle dynamic data only if an initialised dynamic document - if self._dynamic and not self._dynamic_lock: - - field = None - if not hasattr(self, name) and not name.startswith('_'): - DynamicField = _import_class("DynamicField") - field = DynamicField(db_field=name) - field.name = name - self._dynamic_fields[name] = field - - if not name.startswith('_'): - value = self.__expand_dynamic_values(name, value) - - # Handle marking data as changed - if name in self._dynamic_fields: - self._data[name] = value - if hasattr(self, '_changed_fields'): - self._mark_as_changed(name) - - if (self._is_document and not self._created and - name in self._meta.get('shard_key', tuple()) and - self._data.get(name) != value): - OperationError = _import_class('OperationError') - msg = "Shard Keys are immutable. Tried to update %s" % name - raise OperationError(msg) - - super(BaseDocument, self).__setattr__(name, value) - - def __expand_dynamic_values(self, name, value): - """expand any dynamic values to their correct types / values""" - if not isinstance(value, (dict, list, tuple)): - return value - - is_list = False - if not hasattr(value, 'items'): - is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) - - if not is_list and '_cls' in value: - cls = get_document(value['_cls']) - return cls(**value) - - data = {} - for k, v in value.items(): - key = name if is_list else k - data[k] = self.__expand_dynamic_values(key, v) - - if is_list: # Convert back to a list - data_items = sorted(data.items(), key=operator.itemgetter(0)) - value = [v for k, v in data_items] - else: - value = data - - # Convert lists / values so we can watch for any changes on them - if (isinstance(value, (list, tuple)) and - not isinstance(value, BaseList)): - value = BaseList(value, self, name) - elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, self, name) - - return value - - def validate(self): - """Ensure that all fields' values are valid and that required fields - are present. - """ - # Get a list of tuples of field names and their current values - fields = [(field, getattr(self, name)) - for name, field in self._fields.items()] - - # Ensure that each field is matched to a valid value - errors = {} - for field, value in fields: - if value is not None: - try: - field._validate(value) - except ValidationError, error: - errors[field.name] = error.errors or error - except (ValueError, AttributeError, AssertionError), error: - errors[field.name] = error - elif field.required: - errors[field.name] = ValidationError('Field is required', - field_name=field.name) - if errors: - raise ValidationError('ValidationError', errors=errors) - - def to_mongo(self): - """Return data dictionary ready for use with MongoDB. - """ - data = {} - for field_name, field in self._fields.items(): - value = getattr(self, field_name, None) - if value is not None: - data[field.db_field] = field.to_mongo(value) - # Only add _cls and _types if allow_inheritance is not False - if not (hasattr(self, '_meta') and - self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == False): - data['_cls'] = self._class_name - data['_types'] = self._superclasses.keys() + [self._class_name] - if '_id' in data and data['_id'] is None: - del data['_id'] - - if not self._dynamic: - return data - - for name, field in self._dynamic_fields.items(): - data[name] = field.to_mongo(self._data.get(name, None)) - return data - - @classmethod - def _get_collection_name(cls): - """Returns the collection name for this class. - """ - return cls._meta.get('collection', None) - - @classmethod - def _from_son(cls, son): - """Create an instance of a Document (subclass) from a PyMongo SON. - """ - # get the class name from the document, falling back to the given - # class if unavailable - class_name = son.get('_cls', cls._class_name) - data = dict(("%s" % key, value) for key, value in son.items()) - if not UNICODE_KWARGS: - # python 2.6.4 and lower cannot handle unicode keys - # passed to class constructor example: cls(**data) - to_str_keys_recursive(data) - - if '_types' in data: - del data['_types'] - - if '_cls' in data: - del data['_cls'] - - # Return correct subclass for document type - if class_name != cls._class_name: - cls = get_document(class_name) - - changed_fields = [] - errors_dict = {} - - for field_name, field in cls._fields.items(): - if field.db_field in data: - value = data[field.db_field] - try: - data[field_name] = (value if value is None - else field.to_python(value)) - if field_name != field.db_field: - del data[field.db_field] - except (AttributeError, ValueError), e: - errors_dict[field_name] = e - elif field.default: - default = field.default - if callable(default): - default = default() - if isinstance(default, BaseDocument): - changed_fields.append(field_name) - - if errors_dict: - errors = "\n".join(["%s - %s" % (k, v) - for k, v in errors_dict.items()]) - msg = ("Invalid data to create a `%s` instance.\n%s" - % (cls._class_name, errors)) - raise InvalidDocumentError(msg) - - obj = cls(**data) - obj._changed_fields = changed_fields - obj._created = False - return obj - - def _mark_as_changed(self, key): - """Marks a key as explicitly changed by the user - """ - if not key: - return - key = self._db_field_map.get(key, key) - if (hasattr(self, '_changed_fields') and - key not in self._changed_fields): - self._changed_fields.append(key) - - def _get_changed_fields(self, key='', inspected=None): - """Returns a list of all fields that have explicitly been changed. - """ - EmbeddedDocument = _import_class("EmbeddedDocument") - DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") - _changed_fields = [] - _changed_fields += getattr(self, '_changed_fields', []) - - inspected = inspected or set() - if hasattr(self, 'id'): - if self.id in inspected: - return _changed_fields - inspected.add(self.id) - - field_list = self._fields.copy() - if self._dynamic: - field_list.update(self._dynamic_fields) - - for field_name in field_list: - - db_field_name = self._db_field_map.get(field_name, field_name) - key = '%s.' % db_field_name - field = self._data.get(field_name, None) - if hasattr(field, 'id'): - if field.id in inspected: - continue - inspected.add(field.id) - - if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) - and db_field_name not in _changed_fields): - # Find all embedded fields that have been changed - changed = field._get_changed_fields(key, inspected) - _changed_fields += ["%s%s" % (key, k) for k in changed if k] - elif (isinstance(field, (list, tuple, dict)) and - db_field_name not in _changed_fields): - # Loop list / dict fields as they contain documents - # Determine the iterator to use - if not hasattr(field, 'items'): - iterator = enumerate(field) - else: - iterator = field.iteritems() - for index, value in iterator: - if not hasattr(value, '_get_changed_fields'): - continue - list_key = "%s%s." % (key, index) - changed = value._get_changed_fields(list_key, inspected) - _changed_fields += ["%s%s" % (list_key, k) - for k in changed if k] - return _changed_fields - - def _delta(self): - """Returns the delta (set, unset) of the changes for a document. - Gets any values that have been explicitly changed. - """ - # Handles cases where not loaded from_son but has _id - doc = self.to_mongo() - set_fields = self._get_changed_fields() - set_data = {} - unset_data = {} - parts = [] - if hasattr(self, '_changed_fields'): - set_data = {} - # Fetch each set item from its path - for path in set_fields: - parts = path.split('.') - d = doc - new_path = [] - for p in parts: - if isinstance(d, DBRef): - break - elif p.isdigit(): - d = d[int(p)] - elif hasattr(d, 'get'): - d = d.get(p) - new_path.append(p) - path = '.'.join(new_path) - set_data[path] = d - else: - set_data = doc - if '_id' in set_data: - del(set_data['_id']) - - # Determine if any changed items were actually unset. - for path, value in set_data.items(): - if value or isinstance(value, bool): - continue - - # If we've set a value that ain't the default value dont unset it. - default = None - if (self._dynamic and len(parts) and - parts[0] in self._dynamic_fields): - del(set_data[path]) - unset_data[path] = 1 - continue - elif path in self._fields: - default = self._fields[path].default - else: # Perform a full lookup for lists / embedded lookups - d = self - parts = path.split('.') - db_field_name = parts.pop() - for p in parts: - if p.isdigit(): - d = d[int(p)] - elif (hasattr(d, '__getattribute__') and - not isinstance(d, dict)): - real_path = d._reverse_db_field_map.get(p, p) - d = getattr(d, real_path) - else: - d = d.get(p) - - if hasattr(d, '_fields'): - field_name = d._reverse_db_field_map.get(db_field_name, - db_field_name) - - if field_name in d._fields: - default = d._fields.get(field_name).default - else: - default = None - - if default is not None: - if callable(default): - default = default() - if default != value: - continue - - del(set_data[path]) - unset_data[path] = 1 - return set_data, unset_data - - @classmethod - def _geo_indices(cls, inspected=None): - inspected = inspected or [] - geo_indices = [] - inspected.append(cls) - - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") - GeoPointField = _import_class("GeoPointField") - - for field in cls._fields.values(): - if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): - continue - if hasattr(field, 'document_type'): - field_cls = field.document_type - if field_cls in inspected: - continue - if hasattr(field_cls, '_geo_indices'): - geo_indices += field_cls._geo_indices(inspected) - elif field._geo_index: - geo_indices.append(field) - return geo_indices - - def __getstate__(self): - removals = ("get_%s_display" % k - for k, v in self._fields.items() if v.choices) - for k in removals: - if hasattr(self, k): - delattr(self, k) - return self.__dict__ - - def __setstate__(self, __dict__): - self.__dict__ = __dict__ - self.__set_field_display() - - def __set_field_display(self): - """Dynamically set the display value for a field with choices""" - for attr_name, field in self._fields.items(): - if field.choices: - setattr(self, - 'get_%s_display' % attr_name, - partial(self.__get_field_display, field=field)) - - def __get_field_display(self, field): - """Returns the display value for a choice field""" - value = getattr(self, field.name) - if field.choices and isinstance(field.choices[0], (list, tuple)): - return dict(field.choices).get(value, value) - return value - - def __iter__(self): - return iter(self._fields) - - def __getitem__(self, name): - """Dictionary-style field access, return a field's value if present. - """ - try: - if name in self._fields: - return getattr(self, name) - except AttributeError: - pass - raise KeyError(name) - - def __setitem__(self, name, value): - """Dictionary-style field access, set a field's value. - """ - # Ensure that the field exists before settings its value - if name not in self._fields: - raise KeyError(name) - return setattr(self, name, value) - - def __contains__(self, name): - try: - val = getattr(self, name) - return val is not None - except AttributeError: - return False - - def __len__(self): - return len(self._data) - - def __repr__(self): - try: - u = self.__str__() - except (UnicodeEncodeError, UnicodeDecodeError): - u = '[Bad Unicode data]' - repr_type = type(u) - return repr_type('<%s: %s>' % (self.__class__.__name__, u)) - - def __str__(self): - if hasattr(self, '__unicode__'): - if PY3: - return self.__unicode__() - else: - return unicode(self).encode('utf-8') - return txt_type('%s object' % self.__class__.__name__) - - def __eq__(self, other): - if isinstance(other, self.__class__) and hasattr(other, 'id'): - if self.id == other.id: - return True - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - if self.pk is None: - # For new object - return super(BaseDocument, self).__hash__() - else: - return hash(self.pk) - - -class BasesTuple(tuple): - """Special class to handle introspection of bases tuple in __new__""" - pass - - -class BaseList(list): - """A special list so we can watch any changes - """ - - _dereferenced = False - _instance = None - _name = None - - def __init__(self, list_items, instance, name): - self._instance = weakref.proxy(instance) - self._name = name - return super(BaseList, self).__init__(list_items) - - def __setitem__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).__setitem__(*args, **kwargs) - - def __delitem__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).__delitem__(*args, **kwargs) - - def __getstate__(self): - self.observer = None - return self - - def __setstate__(self, state): - self = state - return self - - def append(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).append(*args, **kwargs) - - def extend(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).extend(*args, **kwargs) - - def insert(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).insert(*args, **kwargs) - - def pop(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).pop(*args, **kwargs) - - def remove(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).remove(*args, **kwargs) - - def reverse(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).reverse(*args, **kwargs) - - def sort(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseList, self).sort(*args, **kwargs) - - def _mark_as_changed(self): - if hasattr(self._instance, '_mark_as_changed'): - self._instance._mark_as_changed(self._name) - - -class BaseDict(dict): - """A special dict so we can watch any changes - """ - - _dereferenced = False - _instance = None - _name = None - - def __init__(self, dict_items, instance, name): - self._instance = weakref.proxy(instance) - self._name = name - return super(BaseDict, self).__init__(dict_items) - - def __setitem__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).__setitem__(*args, **kwargs) - - def __delete__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).__delete__(*args, **kwargs) - - def __delitem__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).__delitem__(*args, **kwargs) - - def __delattr__(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).__delattr__(*args, **kwargs) - - def __getstate__(self): - self.instance = None - self._dereferenced = False - return self - - def __setstate__(self, state): - self = state - return self - - def clear(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).clear(*args, **kwargs) - - def pop(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).pop(*args, **kwargs) - - def popitem(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).popitem(*args, **kwargs) - - def update(self, *args, **kwargs): - self._mark_as_changed() - return super(BaseDict, self).update(*args, **kwargs) - - def _mark_as_changed(self): - if hasattr(self._instance, '_mark_as_changed'): - self._instance._mark_as_changed(self._name) - - -def _import_class(cls_name): - """Cached mechanism for imports""" - if cls_name in _class_registry: - return _class_registry.get(cls_name) - - doc_classes = ['Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument'] - field_classes = ['DictField', 'DynamicField', 'EmbeddedDocumentField', - 'GenericReferenceField', 'GeoPointField', - 'ReferenceField'] - queryset_classes = ['OperationError'] - deref_classes = ['DeReference'] - - if cls_name in doc_classes: - from mongoengine import document as module - import_classes = doc_classes - elif cls_name in field_classes: - from mongoengine import fields as module - import_classes = field_classes - elif cls_name in queryset_classes: - from mongoengine import queryset as module - import_classes = queryset_classes - elif cls_name in deref_classes: - from mongoengine import dereference as module - import_classes = deref_classes - else: - raise ValueError('No import set for: ' % cls_name) - - for cls in import_classes: - _class_registry[cls] = getattr(module, cls) - - return _class_registry.get(cls_name) diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py new file mode 100644 index 0000000..1d4a6eb --- /dev/null +++ b/mongoengine/base/__init__.py @@ -0,0 +1,5 @@ +from .common import * +from .datastructures import * +from .document import * +from .fields import * +from .metaclasses import * diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py new file mode 100644 index 0000000..648561b --- /dev/null +++ b/mongoengine/base/common.py @@ -0,0 +1,25 @@ +from mongoengine.errors import NotRegistered + +__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') + +ALLOW_INHERITANCE = True + +_document_registry = {} + + +def get_document(name): + doc = _document_registry.get(name, None) + if not doc: + # Possible old style names + end = ".%s" % name + possible_match = [k for k in _document_registry.keys() + if k.endswith(end)] + if len(possible_match) == 1: + doc = _document_registry.get(possible_match.pop(), None) + if not doc: + raise NotRegistered(""" + `%s` has not been registered in the document registry. + Importing the document class automatically registers it, has it + been imported? + """.strip() % name) + return doc diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py new file mode 100644 index 0000000..9a7620e --- /dev/null +++ b/mongoengine/base/datastructures.py @@ -0,0 +1,124 @@ +import weakref + +__all__ = ("BaseDict", "BaseList") + + +class BaseDict(dict): + """A special dict so we can watch any changes + """ + + _dereferenced = False + _instance = None + _name = None + + def __init__(self, dict_items, instance, name): + self._instance = weakref.proxy(instance) + self._name = name + return super(BaseDict, self).__init__(dict_items) + + def __setitem__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).__setitem__(*args, **kwargs) + + def __delete__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).__delete__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).__delitem__(*args, **kwargs) + + def __delattr__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).__delattr__(*args, **kwargs) + + def __getstate__(self): + self.instance = None + self._dereferenced = False + return self + + def __setstate__(self, state): + self = state + return self + + def clear(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).clear(*args, **kwargs) + + def pop(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).pop(*args, **kwargs) + + def popitem(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).popitem(*args, **kwargs) + + def update(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseDict, self).update(*args, **kwargs) + + def _mark_as_changed(self): + if hasattr(self._instance, '_mark_as_changed'): + self._instance._mark_as_changed(self._name) + + +class BaseList(list): + """A special list so we can watch any changes + """ + + _dereferenced = False + _instance = None + _name = None + + def __init__(self, list_items, instance, name): + self._instance = weakref.proxy(instance) + self._name = name + return super(BaseList, self).__init__(list_items) + + def __setitem__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__setitem__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__delitem__(*args, **kwargs) + + def __getstate__(self): + self.observer = None + return self + + def __setstate__(self, state): + self = state + return self + + def append(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).append(*args, **kwargs) + + def extend(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).extend(*args, **kwargs) + + def insert(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).insert(*args, **kwargs) + + def pop(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).pop(*args, **kwargs) + + def remove(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).remove(*args, **kwargs) + + def reverse(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).reverse(*args, **kwargs) + + def sort(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).sort(*args, **kwargs) + + def _mark_as_changed(self): + if hasattr(self._instance, '_mark_as_changed'): + self._instance._mark_as_changed(self._name) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py new file mode 100644 index 0000000..af97e1f --- /dev/null +++ b/mongoengine/base/document.py @@ -0,0 +1,644 @@ +import operator +from functools import partial + +import pymongo +from bson.dbref import DBRef + +from mongoengine import signals +from mongoengine.common import _import_class +from mongoengine.errors import (ValidationError, InvalidDocumentError, + LookUpError) +from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type, + to_str_keys_recursive) + +from .common import get_document, ALLOW_INHERITANCE +from .datastructures import BaseDict, BaseList +from .fields import ComplexBaseField + +__all__ = ('BaseDocument', ) + + +class BaseDocument(object): + + _dynamic = False + _created = True + _dynamic_lock = True + _initialised = False + + def __init__(self, **values): + signals.pre_init.send(self.__class__, document=self, values=values) + + self._data = {} + + # Assign default values to instance + for key, field in self._fields.iteritems(): + if self._db_field_map.get(key, key) in values: + continue + value = getattr(self, key, None) + setattr(self, key, value) + + # Set passed values after initialisation + if self._dynamic: + self._dynamic_fields = {} + dynamic_data = {} + for key, value in values.iteritems(): + if key in self._fields or key == '_id': + setattr(self, key, value) + elif self._dynamic: + dynamic_data[key] = value + else: + for key, value in values.iteritems(): + key = self._reverse_db_field_map.get(key, key) + setattr(self, key, value) + + # Set any get_fieldname_display methods + self.__set_field_display() + + if self._dynamic: + self._dynamic_lock = False + for key, value in dynamic_data.iteritems(): + setattr(self, key, value) + + # Flag initialised + self._initialised = True + signals.post_init.send(self.__class__, document=self) + + def __setattr__(self, name, value): + # Handle dynamic data only if an initialised dynamic document + if self._dynamic and not self._dynamic_lock: + + field = None + if not hasattr(self, name) and not name.startswith('_'): + DynamicField = _import_class("DynamicField") + field = DynamicField(db_field=name) + field.name = name + self._dynamic_fields[name] = field + + if not name.startswith('_'): + value = self.__expand_dynamic_values(name, value) + + # Handle marking data as changed + if name in self._dynamic_fields: + self._data[name] = value + if hasattr(self, '_changed_fields'): + self._mark_as_changed(name) + + if (self._is_document and not self._created and + name in self._meta.get('shard_key', tuple()) and + self._data.get(name) != value): + OperationError = _import_class('OperationError') + msg = "Shard Keys are immutable. Tried to update %s" % name + raise OperationError(msg) + + super(BaseDocument, self).__setattr__(name, value) + + def __getstate__(self): + removals = ("get_%s_display" % k + for k, v in self._fields.items() if v.choices) + for k in removals: + if hasattr(self, k): + delattr(self, k) + return self.__dict__ + + def __setstate__(self, __dict__): + self.__dict__ = __dict__ + self.__set_field_display() + + def __iter__(self): + return iter(self._fields) + + def __getitem__(self, name): + """Dictionary-style field access, return a field's value if present. + """ + try: + if name in self._fields: + return getattr(self, name) + except AttributeError: + pass + raise KeyError(name) + + def __setitem__(self, name, value): + """Dictionary-style field access, set a field's value. + """ + # Ensure that the field exists before settings its value + if name not in self._fields: + raise KeyError(name) + return setattr(self, name, value) + + def __contains__(self, name): + try: + val = getattr(self, name) + return val is not None + except AttributeError: + return False + + def __len__(self): + return len(self._data) + + def __repr__(self): + try: + u = self.__str__() + except (UnicodeEncodeError, UnicodeDecodeError): + u = '[Bad Unicode data]' + repr_type = type(u) + return repr_type('<%s: %s>' % (self.__class__.__name__, u)) + + def __str__(self): + if hasattr(self, '__unicode__'): + if PY3: + return self.__unicode__() + else: + return unicode(self).encode('utf-8') + return txt_type('%s object' % self.__class__.__name__) + + def __eq__(self, other): + if isinstance(other, self.__class__) and hasattr(other, 'id'): + if self.id == other.id: + return True + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + if self.pk is None: + # For new object + return super(BaseDocument, self).__hash__() + else: + return hash(self.pk) + + def to_mongo(self): + """Return data dictionary ready for use with MongoDB. + """ + data = {} + for field_name, field in self._fields.items(): + value = getattr(self, field_name, None) + if value is not None: + data[field.db_field] = field.to_mongo(value) + # Only add _cls if allow_inheritance is not False + if not (hasattr(self, '_meta') and + self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == False): + data['_cls'] = self._class_name + if '_id' in data and data['_id'] is None: + del data['_id'] + + if not self._dynamic: + return data + + for name, field in self._dynamic_fields.items(): + data[name] = field.to_mongo(self._data.get(name, None)) + return data + + def validate(self): + """Ensure that all fields' values are valid and that required fields + are present. + """ + # Get a list of tuples of field names and their current values + fields = [(field, getattr(self, name)) + for name, field in self._fields.items()] + + # Ensure that each field is matched to a valid value + errors = {} + for field, value in fields: + if value is not None: + try: + field._validate(value) + except ValidationError, error: + errors[field.name] = error.errors or error + except (ValueError, AttributeError, AssertionError), error: + errors[field.name] = error + elif field.required: + errors[field.name] = ValidationError('Field is required', + field_name=field.name) + if errors: + raise ValidationError('ValidationError', errors=errors) + + def __expand_dynamic_values(self, name, value): + """expand any dynamic values to their correct types / values""" + if not isinstance(value, (dict, list, tuple)): + return value + + is_list = False + if not hasattr(value, 'items'): + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + + if not is_list and '_cls' in value: + cls = get_document(value['_cls']) + return cls(**value) + + data = {} + for k, v in value.items(): + key = name if is_list else k + data[k] = self.__expand_dynamic_values(key, v) + + if is_list: # Convert back to a list + data_items = sorted(data.items(), key=operator.itemgetter(0)) + value = [v for k, v in data_items] + else: + value = data + + # Convert lists / values so we can watch for any changes on them + if (isinstance(value, (list, tuple)) and + not isinstance(value, BaseList)): + value = BaseList(value, self, name) + elif isinstance(value, dict) and not isinstance(value, BaseDict): + value = BaseDict(value, self, name) + + return value + + def _mark_as_changed(self, key): + """Marks a key as explicitly changed by the user + """ + if not key: + return + key = self._db_field_map.get(key, key) + if (hasattr(self, '_changed_fields') and + key not in self._changed_fields): + self._changed_fields.append(key) + + def _get_changed_fields(self, key='', inspected=None): + """Returns a list of all fields that have explicitly been changed. + """ + EmbeddedDocument = _import_class("EmbeddedDocument") + DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") + _changed_fields = [] + _changed_fields += getattr(self, '_changed_fields', []) + + inspected = inspected or set() + if hasattr(self, 'id'): + if self.id in inspected: + return _changed_fields + inspected.add(self.id) + + field_list = self._fields.copy() + if self._dynamic: + field_list.update(self._dynamic_fields) + + for field_name in field_list: + + db_field_name = self._db_field_map.get(field_name, field_name) + key = '%s.' % db_field_name + field = self._data.get(field_name, None) + if hasattr(field, 'id'): + if field.id in inspected: + continue + inspected.add(field.id) + + if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) + and db_field_name not in _changed_fields): + # Find all embedded fields that have been changed + changed = field._get_changed_fields(key, inspected) + _changed_fields += ["%s%s" % (key, k) for k in changed if k] + elif (isinstance(field, (list, tuple, dict)) and + db_field_name not in _changed_fields): + # Loop list / dict fields as they contain documents + # Determine the iterator to use + if not hasattr(field, 'items'): + iterator = enumerate(field) + else: + iterator = field.iteritems() + for index, value in iterator: + if not hasattr(value, '_get_changed_fields'): + continue + list_key = "%s%s." % (key, index) + changed = value._get_changed_fields(list_key, inspected) + _changed_fields += ["%s%s" % (list_key, k) + for k in changed if k] + return _changed_fields + + def _delta(self): + """Returns the delta (set, unset) of the changes for a document. + Gets any values that have been explicitly changed. + """ + # Handles cases where not loaded from_son but has _id + doc = self.to_mongo() + set_fields = self._get_changed_fields() + set_data = {} + unset_data = {} + parts = [] + if hasattr(self, '_changed_fields'): + set_data = {} + # Fetch each set item from its path + for path in set_fields: + parts = path.split('.') + d = doc + new_path = [] + for p in parts: + if isinstance(d, DBRef): + break + elif p.isdigit(): + d = d[int(p)] + elif hasattr(d, 'get'): + d = d.get(p) + new_path.append(p) + path = '.'.join(new_path) + set_data[path] = d + else: + set_data = doc + if '_id' in set_data: + del(set_data['_id']) + + # Determine if any changed items were actually unset. + for path, value in set_data.items(): + if value or isinstance(value, bool): + continue + + # If we've set a value that ain't the default value dont unset it. + default = None + if (self._dynamic and len(parts) and + parts[0] in self._dynamic_fields): + del(set_data[path]) + unset_data[path] = 1 + continue + elif path in self._fields: + default = self._fields[path].default + else: # Perform a full lookup for lists / embedded lookups + d = self + parts = path.split('.') + db_field_name = parts.pop() + for p in parts: + if p.isdigit(): + d = d[int(p)] + elif (hasattr(d, '__getattribute__') and + not isinstance(d, dict)): + real_path = d._reverse_db_field_map.get(p, p) + d = getattr(d, real_path) + else: + d = d.get(p) + + if hasattr(d, '_fields'): + field_name = d._reverse_db_field_map.get(db_field_name, + db_field_name) + + if field_name in d._fields: + default = d._fields.get(field_name).default + else: + default = None + + if default is not None: + if callable(default): + default = default() + if default != value: + continue + + del(set_data[path]) + unset_data[path] = 1 + return set_data, unset_data + + @classmethod + def _get_collection_name(cls): + """Returns the collection name for this class. + """ + return cls._meta.get('collection', None) + + @classmethod + def _from_son(cls, son): + """Create an instance of a Document (subclass) from a PyMongo SON. + """ + # get the class name from the document, falling back to the given + # class if unavailable + class_name = son.get('_cls', cls._class_name) + data = dict(("%s" % key, value) for key, value in son.items()) + if not UNICODE_KWARGS: + # python 2.6.4 and lower cannot handle unicode keys + # passed to class constructor example: cls(**data) + to_str_keys_recursive(data) + + if '_cls' in data: + del data['_cls'] + + # Return correct subclass for document type + if class_name != cls._class_name: + cls = get_document(class_name) + + changed_fields = [] + errors_dict = {} + + for field_name, field in cls._fields.items(): + if field.db_field in data: + value = data[field.db_field] + try: + data[field_name] = (value if value is None + else field.to_python(value)) + if field_name != field.db_field: + del data[field.db_field] + except (AttributeError, ValueError), e: + errors_dict[field_name] = e + elif field.default: + default = field.default + if callable(default): + default = default() + if isinstance(default, BaseDocument): + changed_fields.append(field_name) + + if errors_dict: + errors = "\n".join(["%s - %s" % (k, v) + for k, v in errors_dict.items()]) + msg = ("Invalid data to create a `%s` instance.\n%s" + % (cls._class_name, errors)) + raise InvalidDocumentError(msg) + + obj = cls(**data) + obj._changed_fields = changed_fields + obj._created = False + return obj + + @classmethod + def _build_index_spec(cls, spec): + """Build a PyMongo index spec from a MongoEngine index spec. + """ + if isinstance(spec, basestring): + spec = {'fields': [spec]} + elif isinstance(spec, (list, tuple)): + spec = {'fields': list(spec)} + elif isinstance(spec, dict): + spec = dict(spec) + + index_list = [] + direction = None + + # Check to see if we need to include _cls + allow_inheritance = cls._meta.get('allow_inheritance', + ALLOW_INHERITANCE) != False + include_cls = allow_inheritance and not spec.get('sparse', False) + + for key in spec['fields']: + # If inherited spec continue + if isinstance(key, (list, tuple)): + continue + + # ASCENDING from +, + # DESCENDING from - + # GEO2D from * + direction = pymongo.ASCENDING + if key.startswith("-"): + direction = pymongo.DESCENDING + elif key.startswith("*"): + direction = pymongo.GEO2D + if key.startswith(("+", "-", "*")): + key = key[1:] + + # Use real field name, do it manually because we need field + # objects for the next part (list field checking) + parts = key.split('.') + if parts in (['pk'], ['id'], ['_id']): + key = '_id' + fields = [] + else: + fields = cls._lookup_field(parts) + parts = [field if field == '_id' else field.db_field + for field in fields] + key = '.'.join(parts) + index_list.append((key, direction)) + + # Don't add cls to a geo index + if include_cls and direction is not pymongo.GEO2D: + index_list.insert(0, ('_cls', 1)) + + spec['fields'] = index_list + if spec.get('sparse', False) and len(spec['fields']) > 1: + raise ValueError( + 'Sparse indexes can only have one field in them. ' + 'See https://jira.mongodb.org/browse/SERVER-2193') + + return spec + + @classmethod + def _unique_with_indexes(cls, namespace=""): + """ + Find and set unique indexes + """ + unique_indexes = [] + for field_name, field in cls._fields.items(): + # Generate a list of indexes needed by uniqueness constraints + if field.unique: + field.required = True + unique_fields = [field.db_field] + + # Add any unique_with fields to the back of the index spec + if field.unique_with: + if isinstance(field.unique_with, basestring): + field.unique_with = [field.unique_with] + + # Convert unique_with field names to real field names + unique_with = [] + for other_name in field.unique_with: + parts = other_name.split('.') + # Lookup real name + parts = cls._lookup_field(parts) + name_parts = [part.db_field for part in parts] + unique_with.append('.'.join(name_parts)) + # Unique field should be required + parts[-1].required = True + unique_fields += unique_with + + # Add the new index to the list + index = [("%s%s" % (namespace, f), pymongo.ASCENDING) + for f in unique_fields] + unique_indexes.append(index) + + # Grab any embedded document field unique indexes + if (field.__class__.__name__ == "EmbeddedDocumentField" and + field.document_type != cls): + field_namespace = "%s." % field_name + doc_cls = field.document_type + unique_indexes += doc_cls._unique_with_indexes(field_namespace) + + return unique_indexes + + @classmethod + def _lookup_field(cls, parts): + """Lookup a field based on its attribute and return a list containing + the field's parents and the field. + """ + if not isinstance(parts, (list, tuple)): + parts = [parts] + fields = [] + field = None + + for field_name in parts: + # Handle ListField indexing: + if field_name.isdigit(): + new_field = field.field + fields.append(field_name) + continue + + if field is None: + # Look up first field from the document + if field_name == 'pk': + # Deal with "primary key" alias + field_name = cls._meta['id_field'] + if field_name in cls._fields: + field = cls._fields[field_name] + elif cls._dynamic: + DynamicField = _import_class('DynamicField') + field = DynamicField(db_field=field_name) + else: + raise LookUpError('Cannot resolve field "%s"' + % field_name) + else: + ReferenceField = _import_class('ReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') + if isinstance(field, (ReferenceField, GenericReferenceField)): + raise LookUpError('Cannot perform join in mongoDB: %s' % + '__'.join(parts)) + if hasattr(getattr(field, 'field', None), 'lookup_member'): + new_field = field.field.lookup_member(field_name) + else: + # Look up subfield on the previous field + new_field = field.lookup_member(field_name) + if not new_field and isinstance(field, ComplexBaseField): + fields.append(field_name) + continue + elif not new_field: + raise LookUpError('Cannot resolve field "%s"' + % field_name) + field = new_field # update field to the new field type + fields.append(field) + return fields + + @classmethod + def _translate_field_name(cls, field, sep='.'): + """Translate a field attribute name to a database field name. + """ + parts = field.split(sep) + parts = [f.db_field for f in cls._lookup_field(parts)] + return '.'.join(parts) + + @classmethod + def _geo_indices(cls, inspected=None): + inspected = inspected or [] + geo_indices = [] + inspected.append(cls) + + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + GeoPointField = _import_class("GeoPointField") + + for field in cls._fields.values(): + if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): + continue + if hasattr(field, 'document_type'): + field_cls = field.document_type + if field_cls in inspected: + continue + if hasattr(field_cls, '_geo_indices'): + geo_indices += field_cls._geo_indices(inspected) + elif field._geo_index: + geo_indices.append(field) + return geo_indices + + def __set_field_display(self): + """Dynamically set the display value for a field with choices""" + for attr_name, field in self._fields.items(): + if field.choices: + setattr(self, + 'get_%s_display' % attr_name, + partial(self.__get_field_display, field=field)) + + def __get_field_display(self, field): + """Returns the display value for a choice field""" + value = getattr(self, field.name) + if field.choices and isinstance(field.choices[0], (list, tuple)): + return dict(field.choices).get(value, value) + return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py new file mode 100644 index 0000000..44f5e13 --- /dev/null +++ b/mongoengine/base/fields.py @@ -0,0 +1,371 @@ +import operator +import warnings + +from bson import DBRef, ObjectId + +from mongoengine.common import _import_class +from mongoengine.errors import ValidationError + +from .common import ALLOW_INHERITANCE +from .datastructures import BaseDict, BaseList + +__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField") + + +class BaseField(object): + """A base class for fields in a MongoDB document. Instances of this class + may be added to subclasses of `Document` to define a document's schema. + + .. versionchanged:: 0.5 - added verbose and help text + """ + + name = None + _geo_index = False + + # These track each time a Field instance is created. Used to retain order. + # The auto_creation_counter is used for fields that MongoEngine implicitly + # creates, creation_counter is used for all user-specified fields. + creation_counter = 0 + auto_creation_counter = -1 + + def __init__(self, db_field=None, name=None, required=False, default=None, + unique=False, unique_with=None, primary_key=False, + validation=None, choices=None, verbose_name=None, + help_text=None): + self.db_field = (db_field or name) if not primary_key else '_id' + if name: + msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" + warnings.warn(msg, DeprecationWarning) + self.name = None + self.required = required or primary_key + self.default = default + self.unique = bool(unique or unique_with) + self.unique_with = unique_with + self.primary_key = primary_key + self.validation = validation + self.choices = choices + self.verbose_name = verbose_name + self.help_text = help_text + + # Adjust the appropriate creation counter, and save our local copy. + if self.db_field == '_id': + self.creation_counter = BaseField.auto_creation_counter + BaseField.auto_creation_counter -= 1 + else: + self.creation_counter = BaseField.creation_counter + BaseField.creation_counter += 1 + + def __get__(self, instance, owner): + """Descriptor for retrieving a value from a field in a document. Do + any necessary conversion between Python and MongoDB types. + """ + if instance is None: + # Document class being used rather than a document object + return self + + # Get value from document instance if available, if not use default + value = instance._data.get(self.name) + + if value is None: + value = self.default + # Allow callable default values + if callable(value): + value = value() + + return value + + def __set__(self, instance, value): + """Descriptor for assigning a value to a field in a document. + """ + instance._data[self.name] = value + if instance._initialised: + instance._mark_as_changed(self.name) + + def error(self, message="", errors=None, field_name=None): + """Raises a ValidationError. + """ + field_name = field_name if field_name else self.name + raise ValidationError(message, errors=errors, field_name=field_name) + + def to_python(self, value): + """Convert a MongoDB-compatible type to a Python type. + """ + return value + + def to_mongo(self, value): + """Convert a Python type to a MongoDB-compatible type. + """ + return self.to_python(value) + + def prepare_query_value(self, op, value): + """Prepare a value that is being used in a query for PyMongo. + """ + return value + + def validate(self, value): + """Perform validation on a value. + """ + pass + + def _validate(self, value): + Document = _import_class('Document') + EmbeddedDocument = _import_class('EmbeddedDocument') + # check choices + if self.choices: + is_cls = isinstance(value, (Document, EmbeddedDocument)) + value_to_check = value.__class__ if is_cls else value + err_msg = 'an instance' if is_cls else 'one' + if isinstance(self.choices[0], (list, tuple)): + option_keys = [k for k, v in self.choices] + if value_to_check not in option_keys: + msg = ('Value must be %s of %s' % + (err_msg, unicode(option_keys))) + self.error(msg) + elif value_to_check not in self.choices: + msg = ('Value must be %s of %s' % + (err_msg, unicode(self.choices))) + self.error() + + # check validation argument + if self.validation is not None: + if callable(self.validation): + if not self.validation(value): + self.error('Value does not match custom validation method') + else: + raise ValueError('validation argument for "%s" must be a ' + 'callable.' % self.name) + + self.validate(value) + + +class ComplexBaseField(BaseField): + """Handles complex fields, such as lists / dictionaries. + + Allows for nesting of embedded documents inside complex types. + Handles the lazy dereferencing of a queryset by lazily dereferencing all + items in a list / dict rather than one at a time. + + .. versionadded:: 0.5 + """ + + field = None + __dereference = False + + def __get__(self, instance, owner): + """Descriptor to automatically dereference references. + """ + if instance is None: + # Document class being used rather than a document object + return self + + ReferenceField = _import_class('ReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') + dereference = self.field is None or isinstance(self.field, + (GenericReferenceField, ReferenceField)) + if not self._dereference and instance._initialised and dereference: + instance._data[self.name] = self._dereference( + instance._data.get(self.name), max_depth=1, instance=instance, + name=self.name + ) + + value = super(ComplexBaseField, self).__get__(instance, owner) + + # Convert lists / values so we can watch for any changes on them + if (isinstance(value, (list, tuple)) and + not isinstance(value, BaseList)): + value = BaseList(value, instance, self.name) + instance._data[self.name] = value + elif isinstance(value, dict) and not isinstance(value, BaseDict): + value = BaseDict(value, instance, self.name) + instance._data[self.name] = value + + if (instance._initialised and isinstance(value, (BaseList, BaseDict)) + and not value._dereferenced): + value = self._dereference( + value, max_depth=1, instance=instance, name=self.name + ) + value._dereferenced = True + instance._data[self.name] = value + + return value + + def __set__(self, instance, value): + """Descriptor for assigning a value to a field in a document. + """ + instance._data[self.name] = value + instance._mark_as_changed(self.name) + + def to_python(self, value): + """Convert a MongoDB-compatible type to a Python type. + """ + Document = _import_class('Document') + + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_python'): + return value.to_python() + + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value + + if self.field: + value_dict = dict([(key, self.field.to_python(item)) + for key, item in value.items()]) + else: + value_dict = {} + for k, v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + self.error('You can only reference documents once they' + ' have been saved to the database') + collection = v._get_collection_name() + value_dict[k] = DBRef(collection, v.pk) + elif hasattr(v, 'to_python'): + value_dict[k] = v.to_python() + else: + value_dict[k] = self.to_python(v) + + if is_list: # Convert back to a list + return [v for k, v in sorted(value_dict.items(), + key=operator.itemgetter(0))] + return value_dict + + def to_mongo(self, value): + """Convert a Python type to a MongoDB-compatible type. + """ + Document = _import_class("Document") + + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_mongo'): + return value.to_mongo() + + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k, v) for k, v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value + + if self.field: + value_dict = dict([(key, self.field.to_mongo(item)) + for key, item in value.items()]) + else: + value_dict = {} + for k, v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + self.error('You can only reference documents once they' + ' have been saved to the database') + + # If its a document that is not inheritable it won't have + # any _cls data so make it a generic reference allows + # us to dereference + meta = getattr(v, '_meta', {}) + allow_inheritance = ( + meta.get('allow_inheritance', ALLOW_INHERITANCE) + == False) + if allow_inheritance and not self.field: + GenericReferenceField = _import_class( + "GenericReferenceField") + value_dict[k] = GenericReferenceField().to_mongo(v) + else: + collection = v._get_collection_name() + value_dict[k] = DBRef(collection, v.pk) + elif hasattr(v, 'to_mongo'): + value_dict[k] = v.to_mongo() + else: + value_dict[k] = self.to_mongo(v) + + if is_list: # Convert back to a list + return [v for k, v in sorted(value_dict.items(), + key=operator.itemgetter(0))] + return value_dict + + def validate(self, value): + """If field is provided ensure the value is valid. + """ + errors = {} + if self.field: + if hasattr(value, 'iteritems') or hasattr(value, 'items'): + sequence = value.iteritems() + else: + sequence = enumerate(value) + for k, v in sequence: + try: + self.field._validate(v) + except ValidationError, error: + errors[k] = error.errors or error + except (ValueError, AssertionError), error: + errors[k] = error + + if errors: + field_class = self.field.__class__.__name__ + self.error('Invalid %s item (%s)' % (field_class, value), + errors=errors) + # Don't allow empty values if required + if self.required and not value: + self.error('Field is required and cannot be empty') + + def prepare_query_value(self, op, value): + return self.to_mongo(value) + + def lookup_member(self, member_name): + if self.field: + return self.field.lookup_member(member_name) + return None + + def _set_owner_document(self, owner_document): + if self.field: + self.field.owner_document = owner_document + self._owner_document = owner_document + + def _get_owner_document(self, owner_document): + self._owner_document = owner_document + + owner_document = property(_get_owner_document, _set_owner_document) + + @property + def _dereference(self,): + if not self.__dereference: + DeReference = _import_class("DeReference") + self.__dereference = DeReference() # Cached + return self.__dereference + + +class ObjectIdField(BaseField): + """A field wrapper around MongoDB's ObjectIds. + """ + + def to_python(self, value): + if not isinstance(value, ObjectId): + value = ObjectId(value) + return value + + def to_mongo(self, value): + if not isinstance(value, ObjectId): + try: + return ObjectId(unicode(value)) + except Exception, e: + # e.message attribute has been deprecated since Python 2.6 + self.error(unicode(e)) + return value + + def prepare_query_value(self, op, value): + return self.to_mongo(value) + + def validate(self, value): + try: + ObjectId(unicode(value)) + except: + self.error('Invalid Object ID') diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py new file mode 100644 index 0000000..f87b03e --- /dev/null +++ b/mongoengine/base/metaclasses.py @@ -0,0 +1,388 @@ +import warnings + +import pymongo + +from mongoengine.common import _import_class +from mongoengine.errors import InvalidDocumentError +from mongoengine.python_support import PY3 +from mongoengine.queryset import (DO_NOTHING, DoesNotExist, + MultipleObjectsReturned, + QuerySet, QuerySetManager) + +from .common import _document_registry, ALLOW_INHERITANCE +from .fields import BaseField, ComplexBaseField, ObjectIdField + +__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') + + +class DocumentMetaclass(type): + """Metaclass for all documents. + """ + + def __new__(cls, name, bases, attrs): + flattened_bases = cls._get_bases(bases) + super_new = super(DocumentMetaclass, cls).__new__ + + # If a base class just call super + metaclass = attrs.get('my_metaclass') + if metaclass and issubclass(metaclass, DocumentMetaclass): + return super_new(cls, name, bases, attrs) + + attrs['_is_document'] = attrs.get('_is_document', False) + + # EmbeddedDocuments could have meta data for inheritance + if 'meta' in attrs: + attrs['_meta'] = attrs.pop('meta') + + # Handle document Fields + + # Merge all fields from subclasses + doc_fields = {} + for base in flattened_bases[::-1]: + if hasattr(base, '_fields'): + doc_fields.update(base._fields) + + # Standard object mixin - merge in any Fields + if not hasattr(base, '_meta'): + base_fields = {} + for attr_name, attr_value in base.__dict__.iteritems(): + if not isinstance(attr_value, BaseField): + continue + attr_value.name = attr_name + if not attr_value.db_field: + attr_value.db_field = attr_name + base_fields[attr_name] = attr_value + doc_fields.update(base_fields) + + # Discover any document fields + field_names = {} + for attr_name, attr_value in attrs.iteritems(): + if not isinstance(attr_value, BaseField): + continue + attr_value.name = attr_name + if not attr_value.db_field: + attr_value.db_field = attr_name + doc_fields[attr_name] = attr_value + + # Count names to ensure no db_field redefinitions + field_names[attr_value.db_field] = field_names.get( + attr_value.db_field, 0) + 1 + + # Ensure no duplicate db_fields + duplicate_db_fields = [k for k, v in field_names.items() if v > 1] + if duplicate_db_fields: + msg = ("Multiple db_fields defined for: %s " % + ", ".join(duplicate_db_fields)) + raise InvalidDocumentError(msg) + + # Set _fields and db_field maps + attrs['_fields'] = doc_fields + attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) + for k, v in doc_fields.iteritems()]) + attrs['_reverse_db_field_map'] = dict( + (v, k) for k, v in attrs['_db_field_map'].iteritems()) + + # + # Set document hierarchy + # + superclasses = () + class_name = [name] + for base in flattened_bases: + if (not getattr(base, '_is_base_cls', True) and + not getattr(base, '_meta', {}).get('abstract', True)): + # Collate heirarchy for _cls and _subclasses + class_name.append(base.__name__) + + if hasattr(base, '_meta'): + # Warn if allow_inheritance isn't set and prevent + # inheritance of classes where inheritance is set to False + allow_inheritance = base._meta.get('allow_inheritance', + ALLOW_INHERITANCE) + if (not getattr(base, '_is_base_cls', True) + and allow_inheritance is None): + warnings.warn( + "%s uses inheritance, the default for " + "allow_inheritance is changing to off by default. " + "Please add it to the document meta." % name, + FutureWarning + ) + elif (allow_inheritance == False and + not base._meta.get('abstract')): + raise ValueError('Document %s may not be subclassed' % + base.__name__) + + # Get superclasses from last base superclass + document_bases = [b for b in flattened_bases + if hasattr(b, '_class_name')] + if document_bases: + superclasses = document_bases[0]._superclasses + superclasses += (document_bases[0]._class_name, ) + + _cls = '.'.join(reversed(class_name)) + attrs['_class_name'] = _cls + attrs['_superclasses'] = superclasses + attrs['_subclasses'] = (_cls, ) + attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types + + # Create the new_class + new_class = super_new(cls, name, bases, attrs) + + # Set _subclasses + for base in document_bases: + if _cls not in base._subclasses: + base._subclasses += (_cls,) + base._types = base._subclasses # TODO depreciate _types + + # Handle delete rules + Document, EmbeddedDocument, DictField = cls._import_classes() + for field in new_class._fields.itervalues(): + f = field + f.owner_document = new_class + delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) + if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): + delete_rule = getattr(f.field, + 'reverse_delete_rule', + DO_NOTHING) + if isinstance(f, DictField) and delete_rule != DO_NOTHING: + msg = ("Reverse delete rules are not supported " + "for %s (field: %s)" % + (field.__class__.__name__, field.name)) + raise InvalidDocumentError(msg) + + f = field.field + + if delete_rule != DO_NOTHING: + if issubclass(new_class, EmbeddedDocument): + msg = ("Reverse delete rules are not supported for " + "EmbeddedDocuments (field: %s)" % field.name) + raise InvalidDocumentError(msg) + f.document_type.register_delete_rule(new_class, + field.name, delete_rule) + + if (field.name and hasattr(Document, field.name) and + EmbeddedDocument not in new_class.mro()): + msg = ("%s is a document method and not a valid " + "field name" % field.name) + raise InvalidDocumentError(msg) + + # Add class to the _document_registry + _document_registry[new_class._class_name] = new_class + + # In Python 2, User-defined methods objects have special read-only + # attributes 'im_func' and 'im_self' which contain the function obj + # and class instance object respectively. With Python 3 these special + # attributes have been replaced by __func__ and __self__. The Blinker + # module continues to use im_func and im_self, so the code below + # copies __func__ into im_func and __self__ into im_self for + # classmethod objects in Document derived classes. + if PY3: + for key, val in new_class.__dict__.items(): + if isinstance(val, classmethod): + f = val.__get__(new_class) + if hasattr(f, '__func__') and not hasattr(f, 'im_func'): + f.__dict__.update({'im_func': getattr(f, '__func__')}) + if hasattr(f, '__self__') and not hasattr(f, 'im_self'): + f.__dict__.update({'im_self': getattr(f, '__self__')}) + + return new_class + + def add_to_class(self, name, value): + setattr(self, name, value) + + @classmethod + def _get_bases(cls, bases): + if isinstance(bases, BasesTuple): + return bases + seen = [] + bases = cls.__get_bases(bases) + unique_bases = (b for b in bases if not (b in seen or seen.append(b))) + return BasesTuple(unique_bases) + + @classmethod + def __get_bases(cls, bases): + for base in bases: + if base is object: + continue + yield base + for child_base in cls.__get_bases(base.__bases__): + yield child_base + + @classmethod + def _import_classes(cls): + Document = _import_class('Document') + EmbeddedDocument = _import_class('EmbeddedDocument') + DictField = _import_class('DictField') + return (Document, EmbeddedDocument, DictField) + + +class TopLevelDocumentMetaclass(DocumentMetaclass): + """Metaclass for top-level documents (i.e. documents that have their own + collection in the database. + """ + + def __new__(cls, name, bases, attrs): + flattened_bases = cls._get_bases(bases) + super_new = super(TopLevelDocumentMetaclass, cls).__new__ + + # Set default _meta data if base class, otherwise get user defined meta + if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass): + # defaults + attrs['_meta'] = { + 'abstract': True, + 'max_documents': None, + 'max_size': None, + 'ordering': [], # default ordering applied at runtime + 'indexes': [], # indexes to be ensured at runtime + 'id_field': None, + 'index_background': False, + 'index_drop_dups': False, + 'index_opts': None, + 'delete_rules': None, + 'allow_inheritance': None, + } + attrs['_is_base_cls'] = True + attrs['_meta'].update(attrs.get('meta', {})) + else: + attrs['_meta'] = attrs.get('meta', {}) + # Explictly set abstract to false unless set + attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) + attrs['_is_base_cls'] = False + + # Set flag marking as document class - as opposed to an object mixin + attrs['_is_document'] = True + + # Ensure queryset_class is inherited + if 'objects' in attrs: + manager = attrs['objects'] + if hasattr(manager, 'queryset_class'): + attrs['_meta']['queryset_class'] = manager.queryset_class + + # Clean up top level meta + if 'meta' in attrs: + del(attrs['meta']) + + # Find the parent document class + parent_doc_cls = [b for b in flattened_bases + if b.__class__ == TopLevelDocumentMetaclass] + parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] + + # Prevent classes setting collection different to their parents + # If parent wasn't an abstract class + if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) + and not parent_doc_cls._meta.get('abstract', True)): + msg = "Trying to set a collection on a subclass (%s)" % name + warnings.warn(msg, SyntaxWarning) + del(attrs['_meta']['collection']) + + # Ensure abstract documents have abstract bases + if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): + if (parent_doc_cls and + not parent_doc_cls._meta.get('abstract', False)): + msg = "Abstract document cannot have non-abstract base" + raise ValueError(msg) + return super_new(cls, name, bases, attrs) + + # Merge base class metas. + # Uses a special MetaDict that handles various merging rules + meta = MetaDict() + for base in flattened_bases[::-1]: + # Add any mixin metadata from plain objects + if hasattr(base, 'meta'): + meta.merge(base.meta) + elif hasattr(base, '_meta'): + meta.merge(base._meta) + + # Set collection in the meta if its callable + if (getattr(base, '_is_document', False) and + not base._meta.get('abstract')): + collection = meta.get('collection', None) + if callable(collection): + meta['collection'] = collection(base) + + meta.merge(attrs.get('_meta', {})) # Top level meta + + # Only simple classes (direct subclasses of Document) + # may set allow_inheritance to False + simple_class = all([b._meta.get('abstract') + for b in flattened_bases if hasattr(b, '_meta')]) + if (not simple_class and meta['allow_inheritance'] == False and + not meta['abstract']): + raise ValueError('Only direct subclasses of Document may set ' + '"allow_inheritance" to False') + + # Set default collection name + if 'collection' not in meta: + meta['collection'] = ''.join('_%s' % c if c.isupper() else c + for c in name).strip('_').lower() + attrs['_meta'] = meta + + # Call super and get the new class + new_class = super_new(cls, name, bases, attrs) + + meta = new_class._meta + + # Set index specifications + meta['index_specs'] = [new_class._build_index_spec(spec) + for spec in meta['indexes']] + unique_indexes = new_class._unique_with_indexes() + new_class._meta['unique_indexes'] = unique_indexes + + # If collection is a callable - call it and set the value + collection = meta.get('collection') + if callable(collection): + new_class._meta['collection'] = collection(new_class) + + # Provide a default queryset unless one has been set + manager = attrs.get('objects', QuerySetManager()) + new_class.objects = manager + + # Validate the fields and set primary key if needed + for field_name, field in new_class._fields.iteritems(): + if field.primary_key: + # Ensure only one primary key is set + current_pk = new_class._meta.get('id_field') + if current_pk and current_pk != field_name: + raise ValueError('Cannot override primary key field') + + # Set primary key + if not current_pk: + new_class._meta['id_field'] = field_name + new_class.id = field + + # Set primary key if not defined by the document + if not new_class._meta.get('id_field'): + new_class._meta['id_field'] = 'id' + new_class._fields['id'] = ObjectIdField(db_field='_id') + new_class.id = new_class._fields['id'] + + # Merge in exceptions with parent hierarchy + exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) + module = attrs.get('__module__') + for exc in exceptions_to_merge: + name = exc.__name__ + parents = tuple(getattr(base, name) for base in flattened_bases + if hasattr(base, name)) or (exc,) + # Create new exception and set to new_class + exception = type(name, parents, {'__module__': module}) + setattr(new_class, name, exception) + + return new_class + + +class MetaDict(dict): + """Custom dictionary for meta classes. + Handles the merging of set indexes + """ + _merge_options = ('indexes',) + + def merge(self, new_options): + for k, v in new_options.iteritems(): + if k in self._merge_options: + self[k] = self.get(k, []) + v + else: + self[k] = v + + +class BasesTuple(tuple): + """Special class to handle introspection of bases tuple in __new__""" + pass diff --git a/mongoengine/common.py b/mongoengine/common.py new file mode 100644 index 0000000..c284777 --- /dev/null +++ b/mongoengine/common.py @@ -0,0 +1,35 @@ +_class_registry_cache = {} + + +def _import_class(cls_name): + """Cached mechanism for imports""" + if cls_name in _class_registry_cache: + return _class_registry_cache.get(cls_name) + + doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', + 'MapReduceDocument') + field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', + 'GenericReferenceField', 'GeoPointField', + 'ReferenceField', 'StringField') + queryset_classes = ('OperationError',) + deref_classes = ('DeReference',) + + if cls_name in doc_classes: + from mongoengine import document as module + import_classes = doc_classes + elif cls_name in field_classes: + from mongoengine import fields as module + import_classes = field_classes + elif cls_name in queryset_classes: + from mongoengine import queryset as module + import_classes = queryset_classes + elif cls_name in deref_classes: + from mongoengine import dereference as module + import_classes = deref_classes + else: + raise ValueError('No import set for: ' % cls_name) + + for cls in import_classes: + _class_registry_cache[cls] = getattr(module, cls) + + return _class_registry_cache.get(cls_name) \ No newline at end of file diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 386dbf4..59cc0a5 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -164,7 +164,7 @@ class DeReference(object): if isinstance(items, (dict, SON)): if '_ref' in items: return self.object_map.get(items['_ref'].id, items) - elif '_types' in items and '_cls' in items: + elif '_cls' in items: doc = get_document(items['_cls'])._from_son(items) doc._data = self._attach_objects(doc._data, depth, doc, None) return doc diff --git a/mongoengine/django/shortcuts.py b/mongoengine/django/shortcuts.py index 637cee1..9cc8370 100644 --- a/mongoengine/django/shortcuts.py +++ b/mongoengine/django/shortcuts.py @@ -1,6 +1,6 @@ from mongoengine.queryset import QuerySet from mongoengine.base import BaseDocument -from mongoengine.base import ValidationError +from mongoengine.errors import ValidationError def _get_queryset(cls): """Inspired by django.shortcuts.*""" diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b3afaf..b1ce13a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -11,9 +11,9 @@ from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, from queryset import OperationError, NotUniqueError from connection import get_db, DEFAULT_CONNECTION_NAME -__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', +__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', - 'InvalidCollectionError', 'NotUniqueError'] + 'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') class InvalidCollectionError(Exception): @@ -28,11 +28,11 @@ class EmbeddedDocument(BaseDocument): A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed, to create a specialised version of the embedded document that will be - stored in the same collection. To facilitate this behaviour, `_cls` and - `_types` fields are added to documents (hidden though the MongoEngine - interface though). To disable this behaviour and remove the dependence on - the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to - ``False`` in the :attr:`meta` dictionary. + stored in the same collection. To facilitate this behaviour a `_cls` + field is added to documents (hidden though the MongoEngine interface). + To disable this behaviour and remove the dependence on the presence of + `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` + dictionary. """ # The __metaclass__ attribute is removed by 2to3 when running with Python3 @@ -76,11 +76,11 @@ class Document(BaseDocument): A :class:`~mongoengine.Document` subclass may be itself subclassed, to create a specialised version of the document that will be stored in the - same collection. To facilitate this behaviour, `_cls` and `_types` - fields are added to documents (hidden though the MongoEngine interface - though). To disable this behaviour and remove the dependence on the - presence of `_cls` and `_types`, set :attr:`allow_inheritance` to - ``False`` in the :attr:`meta` dictionary. + same collection. To facilitate this behaviour a `_cls` + field is added to documents (hidden though the MongoEngine interface). + To disable this behaviour and remove the dependence on the presence of + `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` + dictionary. A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` @@ -101,10 +101,10 @@ class Document(BaseDocument): production systems where index creation is performed as part of a deployment system. - By default, _types will be added to the start of every index (that + By default, _cls will be added to the start of every index (that doesn't contain a list) if allow_inheritance is True. This can be disabled by either setting types to False on the specific index or - by setting index_types to False on the meta dictionary for the document. + by setting index_cls to False on the meta dictionary for the document. """ # The __metaclass__ attribute is removed by 2to3 when running with Python3 diff --git a/mongoengine/errors.py b/mongoengine/errors.py new file mode 100644 index 0000000..eb72503 --- /dev/null +++ b/mongoengine/errors.py @@ -0,0 +1,124 @@ +from collections import defaultdict + +from .python_support import txt_type + + +__all__ = ('NotRegistered', 'InvalidDocumentError', 'ValidationError') + + +class NotRegistered(Exception): + pass + + +class InvalidDocumentError(Exception): + pass + + +class LookUpError(AttributeError): + pass + + +class DoesNotExist(Exception): + pass + + +class MultipleObjectsReturned(Exception): + pass + + +class InvalidQueryError(Exception): + pass + + +class OperationError(Exception): + pass + + +class NotUniqueError(OperationError): + pass + + +class ValidationError(AssertionError): + """Validation exception. + + May represent an error validating a field or a + document containing fields with validation errors. + + :ivar errors: A dictionary of errors for fields within this + document or list, or None if the error is for an + individual field. + """ + + errors = {} + field_name = None + _message = None + + def __init__(self, message="", **kwargs): + self.errors = kwargs.get('errors', {}) + self.field_name = kwargs.get('field_name') + self.message = message + + def __str__(self): + return txt_type(self.message) + + def __repr__(self): + return '%s(%s,)' % (self.__class__.__name__, self.message) + + def __getattribute__(self, name): + message = super(ValidationError, self).__getattribute__(name) + if name == 'message': + if self.field_name: + message = '%s' % message + if self.errors: + message = '%s(%s)' % (message, self._format_errors()) + return message + + def _get_message(self): + return self._message + + def _set_message(self, message): + self._message = message + + message = property(_get_message, _set_message) + + def to_dict(self): + """Returns a dictionary of all errors within a document + + Keys are field names or list indices and values are the + validation error messages, or a nested dictionary of + errors for an embedded document or list. + """ + + def build_dict(source): + errors_dict = {} + if not source: + return errors_dict + if isinstance(source, dict): + for field_name, error in source.iteritems(): + errors_dict[field_name] = build_dict(error) + elif isinstance(source, ValidationError) and source.errors: + return build_dict(source.errors) + else: + return unicode(source) + return errors_dict + if not self.errors: + return {} + return build_dict(self.errors) + + def _format_errors(self): + """Returns a string listing all errors within a document""" + + def generate_key(value, prefix=''): + if isinstance(value, list): + value = ' '.join([generate_key(k) for k in value]) + if isinstance(value, dict): + value = ' '.join( + [generate_key(v, k) for k, v in value.iteritems()]) + + results = "%s.%s" % (prefix, value) if prefix else value + return results + + error_dict = defaultdict(list) + for k, v in self.to_dict().iteritems(): + error_dict[generate_key(v)].append(k) + return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 01d3fc6..9bcba9f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -12,10 +12,11 @@ from operator import itemgetter import gridfs from bson import Binary, DBRef, SON, ObjectId +from mongoengine.errors import ValidationError from mongoengine.python_support import (PY3, bin_type, txt_type, str_types, StringIO) from base import (BaseField, ComplexBaseField, ObjectIdField, - ValidationError, get_document, BaseDocument) + get_document, BaseDocument) from queryset import DO_NOTHING, QuerySet from document import Document, EmbeddedDocument from connection import get_db, DEFAULT_CONNECTION_NAME @@ -568,9 +569,6 @@ class ListField(ComplexBaseField): Required means it cannot be empty - as the default for ListFields is [] """ - # ListFields cannot be indexed with _types - MongoDB doesn't support this - _index_with_types = False - def __init__(self, field=None, **kwargs): self.field = field kwargs.setdefault('default', lambda: []) diff --git a/mongoengine/queryset/__init__.py b/mongoengine/queryset/__init__.py new file mode 100644 index 0000000..f6feeab --- /dev/null +++ b/mongoengine/queryset/__init__.py @@ -0,0 +1,11 @@ +from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, + InvalidQueryError, OperationError, + NotUniqueError) +from .field_list import * +from .manager import * +from .queryset import * +from .transform import * +from .visitor import * + +__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + + transform.__all__ + visitor.__all__) diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py new file mode 100644 index 0000000..1c825fa --- /dev/null +++ b/mongoengine/queryset/field_list.py @@ -0,0 +1,51 @@ + +__all__ = ('QueryFieldList',) + + +class QueryFieldList(object): + """Object that handles combinations of .only() and .exclude() calls""" + ONLY = 1 + EXCLUDE = 0 + + def __init__(self, fields=[], value=ONLY, always_include=[]): + self.value = value + self.fields = set(fields) + self.always_include = set(always_include) + self._id = None + + def __add__(self, f): + if not self.fields: + self.fields = f.fields + self.value = f.value + elif self.value is self.ONLY and f.value is self.ONLY: + self.fields = self.fields.intersection(f.fields) + elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: + self.fields = self.fields.union(f.fields) + elif self.value is self.ONLY and f.value is self.EXCLUDE: + self.fields -= f.fields + elif self.value is self.EXCLUDE and f.value is self.ONLY: + self.value = self.ONLY + self.fields = f.fields - self.fields + + if '_id' in f.fields: + self._id = f.value + + if self.always_include: + if self.value is self.ONLY and self.fields: + self.fields = self.fields.union(self.always_include) + else: + self.fields -= self.always_include + return self + + def __nonzero__(self): + return bool(self.fields) + + def as_dict(self): + field_list = dict((field, self.value) for field in self.fields) + if self._id is not None: + field_list['_id'] = self._id + return field_list + + def reset(self): + self.fields = set([]) + self.value = self.ONLY diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py new file mode 100644 index 0000000..7376e3c --- /dev/null +++ b/mongoengine/queryset/manager.py @@ -0,0 +1,61 @@ +from functools import partial +from .queryset import QuerySet + +__all__ = ('queryset_manager', 'QuerySetManager') + + +class QuerySetManager(object): + """ + The default QuerySet Manager. + + Custom QuerySet Manager functions can extend this class and users can + add extra queryset functionality. Any custom manager methods must accept a + :class:`~mongoengine.Document` class as its first argument, and a + :class:`~mongoengine.queryset.QuerySet` as its second argument. + + The method function should return a :class:`~mongoengine.queryset.QuerySet` + , probably the same one that was passed in, but modified in some way. + """ + + get_queryset = None + + def __init__(self, queryset_func=None): + if queryset_func: + self.get_queryset = queryset_func + self._collections = {} + + def __get__(self, instance, owner): + """Descriptor for instantiating a new QuerySet object when + Document.objects is accessed. + """ + if instance is not None: + # Document class being used rather than a document object + return self + + # owner is the document that contains the QuerySetManager + queryset_class = owner._meta.get('queryset_class') or QuerySet + queryset = queryset_class(owner, owner._get_collection()) + if self.get_queryset: + arg_count = self.get_queryset.func_code.co_argcount + if arg_count == 1: + queryset = self.get_queryset(queryset) + elif arg_count == 2: + queryset = self.get_queryset(owner, queryset) + else: + queryset = partial(self.get_queryset, owner, queryset) + return queryset + + +def queryset_manager(func): + """Decorator that allows you to define custom QuerySet managers on + :class:`~mongoengine.Document` classes. The manager must be a function that + accepts a :class:`~mongoengine.Document` class as its first argument, and a + :class:`~mongoengine.queryset.QuerySet` as its second argument. The method + function should return a :class:`~mongoengine.queryset.QuerySet`, probably + the same one that was passed in, but modified in some way. + """ + if func.func_code.co_argcount == 1: + import warnings + msg = 'Methods decorated with queryset_manager should take 2 arguments' + warnings.warn(msg, DeprecationWarning) + return QuerySetManager(func) diff --git a/mongoengine/queryset.py b/mongoengine/queryset/queryset.py similarity index 59% rename from mongoengine/queryset.py rename to mongoengine/queryset/queryset.py index c774322..5108066 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -4,20 +4,21 @@ import copy import itertools import operator -from collections import defaultdict -from functools import partial - -from mongoengine.python_support import product, reduce - import pymongo from bson.code import Code from mongoengine import signals +from mongoengine.common import _import_class +from mongoengine.errors import (OperationError, NotUniqueError, + InvalidQueryError) -__all__ = ['queryset_manager', 'Q', 'InvalidQueryError', - 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL'] +from . import transform +from .field_list import QueryFieldList +from .visitor import Q +__all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') + # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 @@ -28,308 +29,9 @@ CASCADE = 2 DENY = 3 PULL = 4 - -class DoesNotExist(Exception): - pass - - -class MultipleObjectsReturned(Exception): - pass - - -class InvalidQueryError(Exception): - pass - - -class OperationError(Exception): - pass - - -class NotUniqueError(OperationError): - pass - - RE_TYPE = type(re.compile('')) -class QNodeVisitor(object): - """Base visitor class for visiting Q-object nodes in a query tree. - """ - - def visit_combination(self, combination): - """Called by QCombination objects. - """ - return combination - - def visit_query(self, query): - """Called by (New)Q objects. - """ - return query - - -class SimplificationVisitor(QNodeVisitor): - """Simplifies query trees by combinging unnecessary 'and' connection nodes - into a single Q-object. - """ - - def visit_combination(self, combination): - if combination.operation == combination.AND: - # The simplification only applies to 'simple' queries - if all(isinstance(node, Q) for node in combination.children): - queries = [node.query for node in combination.children] - return Q(**self._query_conjunction(queries)) - return combination - - def _query_conjunction(self, queries): - """Merges query dicts - effectively &ing them together. - """ - query_ops = set() - combined_query = {} - for query in queries: - ops = set(query.keys()) - # Make sure that the same operation isn't applied more than once - # to a single field - intersection = ops.intersection(query_ops) - if intersection: - msg = 'Duplicate query conditions: ' - raise InvalidQueryError(msg + ', '.join(intersection)) - - query_ops.update(ops) - combined_query.update(copy.deepcopy(query)) - return combined_query - - -class QueryTreeTransformerVisitor(QNodeVisitor): - """Transforms the query tree in to a form that may be used with MongoDB. - """ - - def visit_combination(self, combination): - if combination.operation == combination.AND: - # MongoDB doesn't allow us to have too many $or operations in our - # queries, so the aim is to move the ORs up the tree to one - # 'master' $or. Firstly, we must find all the necessary parts (part - # of an AND combination or just standard Q object), and store them - # separately from the OR parts. - or_groups = [] - and_parts = [] - for node in combination.children: - if isinstance(node, QCombination): - if node.operation == node.OR: - # Any of the children in an $or component may cause - # the query to succeed - or_groups.append(node.children) - elif node.operation == node.AND: - and_parts.append(node) - elif isinstance(node, Q): - and_parts.append(node) - - # Now we combine the parts into a usable query. AND together all of - # the necessary parts. Then for each $or part, create a new query - # that ANDs the necessary part with the $or part. - clauses = [] - for or_group in product(*or_groups): - q_object = reduce(lambda a, b: a & b, and_parts, Q()) - q_object = reduce(lambda a, b: a & b, or_group, q_object) - clauses.append(q_object) - # Finally, $or the generated clauses in to one query. Each of the - # clauses is sufficient for the query to succeed. - return reduce(lambda a, b: a | b, clauses, Q()) - - if combination.operation == combination.OR: - children = [] - # Crush any nested ORs in to this combination as MongoDB doesn't - # support nested $or operations - for node in combination.children: - if (isinstance(node, QCombination) and - node.operation == combination.OR): - children += node.children - else: - children.append(node) - combination.children = children - - return combination - - -class QueryCompilerVisitor(QNodeVisitor): - """Compiles the nodes in a query tree to a PyMongo-compatible query - dictionary. - """ - - def __init__(self, document): - self.document = document - - def visit_combination(self, combination): - if combination.operation == combination.OR: - return {'$or': combination.children} - elif combination.operation == combination.AND: - return self._mongo_query_conjunction(combination.children) - return combination - - def visit_query(self, query): - return QuerySet._transform_query(self.document, **query.query) - - def _mongo_query_conjunction(self, queries): - """Merges Mongo query dicts - effectively &ing them together. - """ - combined_query = {} - for query in queries: - for field, ops in query.items(): - if field not in combined_query: - combined_query[field] = ops - else: - # The field is already present in the query the only way - # we can merge is if both the existing value and the new - # value are operation dicts, reject anything else - if (not isinstance(combined_query[field], dict) or - not isinstance(ops, dict)): - message = 'Conflicting values for ' + field - raise InvalidQueryError(message) - - current_ops = set(combined_query[field].keys()) - new_ops = set(ops.keys()) - # Make sure that the same operation isn't applied more than - # once to a single field - intersection = current_ops.intersection(new_ops) - if intersection: - msg = 'Duplicate query conditions: ' - raise InvalidQueryError(msg + ', '.join(intersection)) - - # Right! We've got two non-overlapping dicts of operations! - combined_query[field].update(copy.deepcopy(ops)) - return combined_query - - -class QNode(object): - """Base class for nodes in query trees. - """ - - AND = 0 - OR = 1 - - def to_query(self, document): - query = self.accept(SimplificationVisitor()) - query = query.accept(QueryTreeTransformerVisitor()) - query = query.accept(QueryCompilerVisitor(document)) - return query - - def accept(self, visitor): - raise NotImplementedError - - def _combine(self, other, operation): - """Combine this node with another node into a QCombination object. - """ - if getattr(other, 'empty', True): - return self - - if self.empty: - return other - - return QCombination(operation, [self, other]) - - @property - def empty(self): - return False - - def __or__(self, other): - return self._combine(other, self.OR) - - def __and__(self, other): - return self._combine(other, self.AND) - - -class QCombination(QNode): - """Represents the combination of several conditions by a given logical - operator. - """ - - def __init__(self, operation, children): - self.operation = operation - self.children = [] - for node in children: - # If the child is a combination of the same type, we can merge its - # children directly into this combinations children - if isinstance(node, QCombination) and node.operation == operation: - self.children += node.children - else: - self.children.append(node) - - def accept(self, visitor): - for i in range(len(self.children)): - if isinstance(self.children[i], QNode): - self.children[i] = self.children[i].accept(visitor) - - return visitor.visit_combination(self) - - @property - def empty(self): - return not bool(self.children) - - -class Q(QNode): - """A simple query object, used in a query tree to build up more complex - query structures. - """ - - def __init__(self, **query): - self.query = query - - def accept(self, visitor): - return visitor.visit_query(self) - - @property - def empty(self): - return not bool(self.query) - - -class QueryFieldList(object): - """Object that handles combinations of .only() and .exclude() calls""" - ONLY = 1 - EXCLUDE = 0 - - def __init__(self, fields=[], value=ONLY, always_include=[]): - self.value = value - self.fields = set(fields) - self.always_include = set(always_include) - self._id = None - - def as_dict(self): - field_list = dict((field, self.value) for field in self.fields) - if self._id is not None: - field_list['_id'] = self._id - return field_list - - def __add__(self, f): - if not self.fields: - self.fields = f.fields - self.value = f.value - elif self.value is self.ONLY and f.value is self.ONLY: - self.fields = self.fields.intersection(f.fields) - elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: - self.fields = self.fields.union(f.fields) - elif self.value is self.ONLY and f.value is self.EXCLUDE: - self.fields -= f.fields - elif self.value is self.EXCLUDE and f.value is self.ONLY: - self.value = self.ONLY - self.fields = f.fields - self.fields - - if '_id' in f.fields: - self._id = f.value - - if self.always_include: - if self.value is self.ONLY and self.fields: - self.fields = self.fields.union(self.always_include) - else: - self.fields -= self.always_include - return self - - def reset(self): - self.fields = set([]) - self.value = self.ONLY - - def __nonzero__(self): - return bool(self.fields) - - class QuerySet(object): """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. @@ -357,7 +59,7 @@ class QuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used if document._meta.get('allow_inheritance') != False: - self._initial_query = {'_types': self._document._class_name} + self._initial_query = {"_cls": {"$in": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None self._limit = None @@ -397,7 +99,7 @@ class QuerySet(object): construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering """ - index_spec = QuerySet._build_index_spec(self._document, key_or_list) + index_spec = self._document._build_index_spec(key_or_list) index_spec = index_spec.copy() fields = index_spec.pop('fields') index_spec['drop_dups'] = drop_dups @@ -448,26 +150,26 @@ class QuerySet(object): background = self._document._meta.get('index_background', False) drop_dups = self._document._meta.get('index_drop_dups', False) index_opts = self._document._meta.get('index_opts') or {} - index_types = self._document._meta.get('index_types', True) + index_cls = self._document._meta.get('index_cls', True) # determine if an index which we are creating includes - # _type as its first field; if so, we can avoid creating - # an extra index on _type, as mongodb will use the existing - # index to service queries against _type - types_indexed = False + # _cls as its first field; if so, we can avoid creating + # an extra index on _cls, as mongodb will use the existing + # index to service queries against _cls + cls_indexed = False - def includes_types(fields): + def includes_cls(fields): first_field = None if len(fields): if isinstance(fields[0], basestring): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] - return first_field == '_types' + return first_field == '_cls' # Ensure indexes created by uniqueness constraints for index in self._document._meta['unique_indexes']: - types_indexed = types_indexed or includes_types(index) + cls_indexed = cls_indexed or includes_cls(index) self._collection.ensure_index(index, unique=True, background=background, drop_dups=drop_dups, **index_opts) @@ -477,16 +179,16 @@ class QuerySet(object): for spec in index_spec: spec = spec.copy() fields = spec.pop('fields') - types_indexed = types_indexed or includes_types(fields) + cls_indexed = cls_indexed or includes_cls(fields) opts = index_opts.copy() opts.update(spec) self._collection.ensure_index(fields, background=background, **opts) - # If _types is being used (for polymorphism), it needs an index, - # only if another index doesn't begin with _types - if index_types and '_types' in self._query and not types_indexed: - self._collection.ensure_index('_types', + # If _cls is being used (for polymorphism), it needs an index, + # only if another index doesn't begin with _cls + if index_cls and '_cls' in self._query and not cls_indexed: + self._collection.ensure_index('_cls', background=background, **index_opts) # Add geo indicies @@ -495,79 +197,14 @@ class QuerySet(object): self._collection.ensure_index(index_spec, background=background, **index_opts) - @classmethod - def _build_index_spec(cls, doc_cls, spec): - """Build a PyMongo index spec from a MongoEngine index spec. - """ - if isinstance(spec, basestring): - spec = {'fields': [spec]} - elif isinstance(spec, (list, tuple)): - spec = {'fields': list(spec)} - elif isinstance(spec, dict): - spec = dict(spec) - - index_list = [] - direction = None - - allow_inheritance = doc_cls._meta.get('allow_inheritance') != False - - # If sparse - dont include types - use_types = allow_inheritance and not spec.get('sparse', False) - - for key in spec['fields']: - # If inherited spec continue - if isinstance(key, (list, tuple)): - continue - - # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * - direction = pymongo.ASCENDING - if key.startswith("-"): - direction = pymongo.DESCENDING - elif key.startswith("*"): - direction = pymongo.GEO2D - if key.startswith(("+", "-", "*")): - key = key[1:] - - # Use real field name, do it manually because we need field - # objects for the next part (list field checking) - parts = key.split('.') - if parts in (['pk'], ['id'], ['_id']): - key = '_id' - fields = [] - else: - fields = QuerySet._lookup_field(doc_cls, parts) - parts = [field if field == '_id' else field.db_field - for field in fields] - key = '.'.join(parts) - index_list.append((key, direction)) - - # Check if a list field is being used, don't use _types if it is - if use_types and not all(f._index_with_types for f in fields): - use_types = False - - # If _types is being used, prepend it to every specified index - index_types = doc_cls._meta.get('index_types', True) - - if (spec.get('types', index_types) and use_types - and direction is not pymongo.GEO2D): - index_list.insert(0, ('_types', 1)) - - spec['fields'] = index_list - if spec.get('sparse', False) and len(spec['fields']) > 1: - raise ValueError( - 'Sparse indexes can only have one field in them. ' - 'See https://jira.mongodb.org/browse/SERVER-2193') - - return spec - @classmethod def _reset_already_indexed(cls, document=None): - """Helper to reset already indexed, can be useful for testing purposes""" + """Helper to reset already indexed, can be useful for testing purposes + """ if document: cls.__already_indexed.discard(document) cls.__already_indexed.clear() - @property def _collection(self): """Property that returns the collection object. This allows us to @@ -624,195 +261,12 @@ class QuerySet(object): self._cursor_obj.hint(self._hint) return self._cursor_obj - @classmethod - def _lookup_field(cls, document, parts): - """Lookup a field based on its attribute and return a list containing - the field's parents and the field. - """ - if not isinstance(parts, (list, tuple)): - parts = [parts] - fields = [] - field = None - - for field_name in parts: - # Handle ListField indexing: - if field_name.isdigit(): - try: - new_field = field.field - except AttributeError, err: - raise InvalidQueryError( - "Can't use index on unsubscriptable field (%s)" % err) - fields.append(field_name) - continue - - if field is None: - # Look up first field from the document - if field_name == 'pk': - # Deal with "primary key" alias - field_name = document._meta['id_field'] - if field_name in document._fields: - field = document._fields[field_name] - elif document._dynamic: - from fields import DynamicField - field = DynamicField(db_field=field_name) - else: - raise InvalidQueryError('Cannot resolve field "%s"' - % field_name) - else: - from mongoengine.fields import ReferenceField, GenericReferenceField - if isinstance(field, (ReferenceField, GenericReferenceField)): - raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) - if hasattr(getattr(field, 'field', None), 'lookup_member'): - new_field = field.field.lookup_member(field_name) - else: - # Look up subfield on the previous field - new_field = field.lookup_member(field_name) - from base import ComplexBaseField - if not new_field and isinstance(field, ComplexBaseField): - fields.append(field_name) - continue - elif not new_field: - raise InvalidQueryError('Cannot resolve field "%s"' - % field_name) - field = new_field # update field to the new field type - fields.append(field) - return fields - - @classmethod - def _translate_field_name(cls, doc_cls, field, sep='.'): - """Translate a field attribute name to a database field name. - """ - parts = field.split(sep) - parts = [f.db_field for f in QuerySet._lookup_field(doc_cls, parts)] - return '.'.join(parts) - - @classmethod - def _transform_query(cls, _doc_cls=None, _field_operation=False, **query): - """Transform a query from Django-style format to Mongo format. - """ - operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', - 'all', 'size', 'exists', 'not'] - geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere'] - match_operators = ['contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', - 'exact', 'iexact'] - custom_operators = ['match'] - - mongo_query = {} - merge_query = defaultdict(list) - for key, value in query.items(): - if key == "__raw__": - mongo_query.update(value) - continue - - parts = key.split('__') - indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] - parts = [part for part in parts if not part.isdigit()] - # Check for an operator and transform to mongo-style if there is - op = None - if parts[-1] in operators + match_operators + geo_operators + custom_operators: - op = parts.pop() - - negate = False - if parts[-1] == 'not': - parts.pop() - negate = True - - if _doc_cls: - # Switch field names to proper names [set in Field(name='foo')] - fields = QuerySet._lookup_field(_doc_cls, parts) - parts = [] - - cleaned_fields = [] - for field in fields: - append_field = True - if isinstance(field, basestring): - parts.append(field) - append_field = False - else: - parts.append(field.db_field) - if append_field: - cleaned_fields.append(field) - - # Convert value to proper value - field = cleaned_fields[-1] - - singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] - singular_ops += match_operators - if op in singular_ops: - if isinstance(field, basestring): - if op in match_operators and isinstance(value, basestring): - from mongoengine import StringField - value = StringField.prepare_query_value(op, value) - else: - value = field - else: - value = field.prepare_query_value(op, value) - elif op in ('in', 'nin', 'all', 'near'): - # 'in', 'nin' and 'all' require a list of values - value = [field.prepare_query_value(op, v) for v in value] - - # if op and op not in match_operators: - if op: - if op in geo_operators: - if op == "within_distance": - value = {'$within': {'$center': value}} - elif op == "within_spherical_distance": - value = {'$within': {'$centerSphere': value}} - elif op == "within_polygon": - value = {'$within': {'$polygon': value}} - elif op == "near": - value = {'$near': value} - elif op == "near_sphere": - value = {'$nearSphere': value} - elif op == 'within_box': - value = {'$within': {'$box': value}} - else: - raise NotImplementedError("Geo method '%s' has not " - "been implemented" % op) - elif op in custom_operators: - if op == 'match': - value = {"$elemMatch": value} - else: - NotImplementedError("Custom method '%s' has not " - "been implemented" % op) - elif op not in match_operators: - value = {'$' + op: value} - - if negate: - value = {'$not': value} - - for i, part in indices: - parts.insert(i, part) - key = '.'.join(parts) - if op is None or key not in mongo_query: - mongo_query[key] = value - elif key in mongo_query: - if key in mongo_query and isinstance(mongo_query[key], dict): - mongo_query[key].update(value) - else: - # Store for manually merging later - merge_query[key].append(value) - - # The queryset has been filter in such a way we must manually merge - for k, v in merge_query.items(): - merge_query[k].append(mongo_query[k]) - del mongo_query[k] - if isinstance(v, list): - value = [{k:val} for val in v] - if '$and' in mongo_query.keys(): - mongo_query['$and'].append(value) - else: - mongo_query['$and'] = value - - return mongo_query - def get(self, *q_objs, **query): """Retrieve the the matching object raising :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` exception if multiple results and - :class:`~mongoengine.queryset.DoesNotExist` or `DocumentName.DoesNotExist` - if no results are found. + `DocumentName.MultipleObjectsReturned` exception if multiple results + and :class:`~mongoengine.queryset.DoesNotExist` or + `DocumentName.DoesNotExist` if no results are found. .. versionadded:: 0.3 """ @@ -910,7 +364,7 @@ class QuerySet(object): .. versionadded:: 0.5 """ - from document import Document + Document = _import_class('Document') if not write_options: write_options = {} @@ -1064,7 +518,7 @@ class QuerySet(object): .. versionadded:: 0.3 """ - from document import MapReduceDocument + MapReduceDocument = _import_class('MapReduceDocument') if not hasattr(self._collection, "map_reduce"): raise NotImplementedError("Requires MongoDB >= 1.7.1") @@ -1267,14 +721,16 @@ class QuerySet(object): .. versionadded:: 0.5 """ - self._loaded_fields = QueryFieldList(always_include=self._loaded_fields.always_include) + self._loaded_fields = QueryFieldList( + always_include=self._loaded_fields.always_include) return self def _fields_to_dbfields(self, fields): """Translate fields paths to its db equivalents""" ret = [] for field in fields: - field = ".".join(f.db_field for f in QuerySet._lookup_field(self._document, field.split('.'))) + field = ".".join(f.db_field for f in + self._document._lookup_field(field.split('.'))) ret.append(field) return ret @@ -1288,7 +744,8 @@ class QuerySet(object): """ key_list = [] for key in keys: - if not key: continue + if not key: + continue direction = pymongo.ASCENDING if key[0] == '-': direction = pymongo.DESCENDING @@ -1296,7 +753,7 @@ class QuerySet(object): key = key[1:] key = key.replace('__', '.') try: - key = QuerySet._translate_field_name(self._document, key) + key = self._document._translate_field_name(key) except: pass key_list.append((key, direction)) @@ -1389,107 +846,6 @@ class QuerySet(object): self._collection.remove(self._query, safe=safe) - @classmethod - def _transform_update(cls, _doc_cls=None, **update): - """Transform an update spec from Django-style format to Mongo format. - """ - operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', - 'pull', 'pull_all', 'add_to_set'] - match_operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', - 'all', 'size', 'exists', 'not'] - - mongo_update = {} - for key, value in update.items(): - if key == "__raw__": - mongo_update.update(value) - continue - parts = key.split('__') - # Check for an operator and transform to mongo-style if there is - op = None - if parts[0] in operators: - op = parts.pop(0) - # Convert Pythonic names to Mongo equivalents - if op in ('push_all', 'pull_all'): - op = op.replace('_all', 'All') - elif op == 'dec': - # Support decrement by flipping a positive value's sign - # and using 'inc' - op = 'inc' - if value > 0: - value = -value - elif op == 'add_to_set': - op = op.replace('_to_set', 'ToSet') - - match = None - if parts[-1] in match_operators: - match = parts.pop() - - if _doc_cls: - # Switch field names to proper names [set in Field(name='foo')] - fields = QuerySet._lookup_field(_doc_cls, parts) - parts = [] - - cleaned_fields = [] - for field in fields: - append_field = True - if isinstance(field, basestring): - # Convert the S operator to $ - if field == 'S': - field = '$' - parts.append(field) - append_field = False - else: - parts.append(field.db_field) - if append_field: - cleaned_fields.append(field) - - # Convert value to proper value - field = cleaned_fields[-1] - - if op in (None, 'set', 'push', 'pull'): - if field.required or value is not None: - value = field.prepare_query_value(op, value) - elif op in ('pushAll', 'pullAll'): - value = [field.prepare_query_value(op, v) for v in value] - elif op == 'addToSet': - if isinstance(value, (list, tuple, set)): - value = [field.prepare_query_value(op, v) for v in value] - elif field.required or value is not None: - value = field.prepare_query_value(op, value) - - if match: - match = '$' + match - value = {match: value} - - key = '.'.join(parts) - - if not op: - raise InvalidQueryError("Updates must supply an operation " - "eg: set__FIELD=value") - - if 'pull' in op and '.' in key: - # Dot operators don't work on pull operations - # it uses nested dict syntax - if op == 'pullAll': - raise InvalidQueryError("pullAll operations only support " - "a single field depth") - - parts.reverse() - for key in parts: - value = {key: value} - elif op == 'addToSet' and isinstance(value, list): - value = {key: {"$each": value}} - else: - value = {key: value} - key = '$' + op - - if key not in mongo_update: - mongo_update[key] = value - elif key in mongo_update and isinstance(mongo_update[key], dict): - mongo_update[key].update(value) - - return mongo_update - def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update): """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. @@ -1506,14 +862,9 @@ class QuerySet(object): if not write_options: write_options = {} - update = QuerySet._transform_update(self._document, **update) + update = transform.update(self._document, **update) query = self._query - # SERVER-5247 hack - remove_types = "_types" in query and ".$." in unicode(update) - if remove_types: - del query["_types"] - try: ret = self._collection.update(query, update, multi=multi, upsert=upsert, safe=safe_update, @@ -1537,30 +888,8 @@ class QuerySet(object): .. versionadded:: 0.2 """ - if not update: - raise OperationError("No update parameters, would remove data") - - if not write_options: - write_options = {} - update = QuerySet._transform_update(self._document, **update) - query = self._query - - # SERVER-5247 hack - remove_types = "_types" in query and ".$." in unicode(update) - if remove_types: - del query["_types"] - - try: - # Explicitly provide 'multi=False' to newer versions of PyMongo - # as the default may change to 'True' - ret = self._collection.update(query, update, multi=False, - upsert=upsert, safe=safe_update, - **write_options) - - if ret is not None and 'n' in ret: - return ret['n'] - except pymongo.errors.OperationFailure, e: - raise OperationError(u'Update failed [%s]' % unicode(e)) + return self.update(safe_update=True, upsert=False, multi=False, + write_options=None, **update) def __iter__(self): self.rewind() @@ -1611,14 +940,14 @@ class QuerySet(object): def field_sub(match): # Extract just the field name, and look up the field objects field_name = match.group(1).split('.') - fields = QuerySet._lookup_field(self._document, field_name) + fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript return u'["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects field_name = match.group(1).split('.') - fields = QuerySet._lookup_field(self._document, field_name) + fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript return ".".join([f.db_field for f in fields]) @@ -1650,8 +979,7 @@ class QuerySet(object): """ code = self._sub_js_fields(code) - fields = [QuerySet._translate_field_name(self._document, f) - for f in fields] + fields = [self._document._translate_field_name(f) for f in fields] collection = self._document._get_collection_name() scope = { @@ -1925,63 +1253,5 @@ class QuerySet(object): @property def _dereference(self): if not self.__dereference: - from dereference import DeReference - self.__dereference = DeReference() # Cached + self.__dereference = _import_class('DeReference')() return self.__dereference - - -class QuerySetManager(object): - """ - The default QuerySet Manager. - - Custom QuerySet Manager functions can extend this class and users can - add extra queryset functionality. Any custom manager methods must accept a - :class:`~mongoengine.Document` class as its first argument, and a - :class:`~mongoengine.queryset.QuerySet` as its second argument. - - The method function should return a :class:`~mongoengine.queryset.QuerySet` - , probably the same one that was passed in, but modified in some way. - """ - - get_queryset = None - - def __init__(self, queryset_func=None): - if queryset_func: - self.get_queryset = queryset_func - self._collections = {} - - def __get__(self, instance, owner): - """Descriptor for instantiating a new QuerySet object when - Document.objects is accessed. - """ - if instance is not None: - # Document class being used rather than a document object - return self - - # owner is the document that contains the QuerySetManager - queryset_class = owner._meta.get('queryset_class') or QuerySet - queryset = queryset_class(owner, owner._get_collection()) - if self.get_queryset: - arg_count = self.get_queryset.func_code.co_argcount - if arg_count == 1: - queryset = self.get_queryset(queryset) - elif arg_count == 2: - queryset = self.get_queryset(owner, queryset) - else: - queryset = partial(self.get_queryset, owner, queryset) - return queryset - - -def queryset_manager(func): - """Decorator that allows you to define custom QuerySet managers on - :class:`~mongoengine.Document` classes. The manager must be a function that - accepts a :class:`~mongoengine.Document` class as its first argument, and a - :class:`~mongoengine.queryset.QuerySet` as its second argument. The method - function should return a :class:`~mongoengine.queryset.QuerySet`, probably - the same one that was passed in, but modified in some way. - """ - if func.func_code.co_argcount == 1: - import warnings - msg = 'Methods decorated with queryset_manager should take 2 arguments' - warnings.warn(msg, DeprecationWarning) - return QuerySetManager(func) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py new file mode 100644 index 0000000..8ee84ee --- /dev/null +++ b/mongoengine/queryset/transform.py @@ -0,0 +1,237 @@ +from collections import defaultdict + +from mongoengine.common import _import_class +from mongoengine.errors import InvalidQueryError, LookUpError + +__all__ = ('query', 'update') + + +COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', + 'all', 'size', 'exists', 'not') +GEO_OPERATORS = ('within_distance', 'within_spherical_distance', + 'within_box', 'within_polygon', 'near', 'near_sphere') +STRING_OPERATORS = ('contains', 'icontains', 'startswith', + 'istartswith', 'endswith', 'iendswith', + 'exact', 'iexact') +CUSTOM_OPERATORS = ('match',) +MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + + STRING_OPERATORS + CUSTOM_OPERATORS) + +UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push', + 'push_all', 'pull', 'pull_all', 'add_to_set') + + +def query(_doc_cls=None, _field_operation=False, **query): + """Transform a query from Django-style format to Mongo format. + """ + mongo_query = {} + merge_query = defaultdict(list) + for key, value in query.items(): + if key == "__raw__": + mongo_query.update(value) + continue + + parts = key.split('__') + indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] + parts = [part for part in parts if not part.isdigit()] + # Check for an operator and transform to mongo-style if there is + op = None + if parts[-1] in MATCH_OPERATORS: + op = parts.pop() + + negate = False + if parts[-1] == 'not': + parts.pop() + negate = True + + if _doc_cls: + # Switch field names to proper names [set in Field(name='foo')] + try: + fields = _doc_cls._lookup_field(parts) + except Exception, e: + raise InvalidQueryError(e) + parts = [] + + cleaned_fields = [] + for field in fields: + append_field = True + if isinstance(field, basestring): + parts.append(field) + append_field = False + else: + parts.append(field.db_field) + if append_field: + cleaned_fields.append(field) + + # Convert value to proper value + field = cleaned_fields[-1] + + singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] + singular_ops += STRING_OPERATORS + if op in singular_ops: + if isinstance(field, basestring): + if (op in STRING_OPERATORS and + isinstance(value, basestring)): + StringField = _import_class('StringField') + value = StringField.prepare_query_value(op, value) + else: + value = field + else: + value = field.prepare_query_value(op, value) + elif op in ('in', 'nin', 'all', 'near'): + # 'in', 'nin' and 'all' require a list of values + value = [field.prepare_query_value(op, v) for v in value] + + # if op and op not in COMPARISON_OPERATORS: + if op: + if op in GEO_OPERATORS: + if op == "within_distance": + value = {'$within': {'$center': value}} + elif op == "within_spherical_distance": + value = {'$within': {'$centerSphere': value}} + elif op == "within_polygon": + value = {'$within': {'$polygon': value}} + elif op == "near": + value = {'$near': value} + elif op == "near_sphere": + value = {'$nearSphere': value} + elif op == 'within_box': + value = {'$within': {'$box': value}} + else: + raise NotImplementedError("Geo method '%s' has not " + "been implemented" % op) + elif op in CUSTOM_OPERATORS: + if op == 'match': + value = {"$elemMatch": value} + else: + NotImplementedError("Custom method '%s' has not " + "been implemented" % op) + elif op not in STRING_OPERATORS: + value = {'$' + op: value} + + if negate: + value = {'$not': value} + + for i, part in indices: + parts.insert(i, part) + key = '.'.join(parts) + if op is None or key not in mongo_query: + mongo_query[key] = value + elif key in mongo_query: + if key in mongo_query and isinstance(mongo_query[key], dict): + mongo_query[key].update(value) + else: + # Store for manually merging later + merge_query[key].append(value) + + # The queryset has been filter in such a way we must manually merge + for k, v in merge_query.items(): + merge_query[k].append(mongo_query[k]) + del mongo_query[k] + if isinstance(v, list): + value = [{k:val} for val in v] + if '$and' in mongo_query.keys(): + mongo_query['$and'].append(value) + else: + mongo_query['$and'] = value + + return mongo_query + + +def update(_doc_cls=None, **update): + """Transform an update spec from Django-style format to Mongo format. + """ + mongo_update = {} + for key, value in update.items(): + if key == "__raw__": + mongo_update.update(value) + continue + parts = key.split('__') + # Check for an operator and transform to mongo-style if there is + op = None + if parts[0] in UPDATE_OPERATORS: + op = parts.pop(0) + # Convert Pythonic names to Mongo equivalents + if op in ('push_all', 'pull_all'): + op = op.replace('_all', 'All') + elif op == 'dec': + # Support decrement by flipping a positive value's sign + # and using 'inc' + op = 'inc' + if value > 0: + value = -value + elif op == 'add_to_set': + op = op.replace('_to_set', 'ToSet') + + match = None + if parts[-1] in COMPARISON_OPERATORS: + match = parts.pop() + + if _doc_cls: + # Switch field names to proper names [set in Field(name='foo')] + try: + fields = _doc_cls._lookup_field(parts) + except Exception, e: + raise InvalidQueryError(e) + parts = [] + + cleaned_fields = [] + for field in fields: + append_field = True + if isinstance(field, basestring): + # Convert the S operator to $ + if field == 'S': + field = '$' + parts.append(field) + append_field = False + else: + parts.append(field.db_field) + if append_field: + cleaned_fields.append(field) + + # Convert value to proper value + field = cleaned_fields[-1] + + if op in (None, 'set', 'push', 'pull'): + if field.required or value is not None: + value = field.prepare_query_value(op, value) + elif op in ('pushAll', 'pullAll'): + value = [field.prepare_query_value(op, v) for v in value] + elif op == 'addToSet': + if isinstance(value, (list, tuple, set)): + value = [field.prepare_query_value(op, v) for v in value] + elif field.required or value is not None: + value = field.prepare_query_value(op, value) + + if match: + match = '$' + match + value = {match: value} + + key = '.'.join(parts) + + if not op: + raise InvalidQueryError("Updates must supply an operation " + "eg: set__FIELD=value") + + if 'pull' in op and '.' in key: + # Dot operators don't work on pull operations + # it uses nested dict syntax + if op == 'pullAll': + raise InvalidQueryError("pullAll operations only support " + "a single field depth") + + parts.reverse() + for key in parts: + value = {key: value} + elif op == 'addToSet' and isinstance(value, list): + value = {key: {"$each": value}} + else: + value = {key: value} + key = '$' + op + + if key not in mongo_update: + mongo_update[key] = value + elif key in mongo_update and isinstance(mongo_update[key], dict): + mongo_update[key].update(value) + + return mongo_update diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py new file mode 100644 index 0000000..94d6a5e --- /dev/null +++ b/mongoengine/queryset/visitor.py @@ -0,0 +1,237 @@ +import copy + +from mongoengine.errors import InvalidQueryError +from mongoengine.python_support import product, reduce + +from mongoengine.queryset import transform + +__all__ = ('Q',) + + +class QNodeVisitor(object): + """Base visitor class for visiting Q-object nodes in a query tree. + """ + + def visit_combination(self, combination): + """Called by QCombination objects. + """ + return combination + + def visit_query(self, query): + """Called by (New)Q objects. + """ + return query + + +class SimplificationVisitor(QNodeVisitor): + """Simplifies query trees by combinging unnecessary 'and' connection nodes + into a single Q-object. + """ + + def visit_combination(self, combination): + if combination.operation == combination.AND: + # The simplification only applies to 'simple' queries + if all(isinstance(node, Q) for node in combination.children): + queries = [node.query for node in combination.children] + return Q(**self._query_conjunction(queries)) + return combination + + def _query_conjunction(self, queries): + """Merges query dicts - effectively &ing them together. + """ + query_ops = set() + combined_query = {} + for query in queries: + ops = set(query.keys()) + # Make sure that the same operation isn't applied more than once + # to a single field + intersection = ops.intersection(query_ops) + if intersection: + msg = 'Duplicate query conditions: ' + raise InvalidQueryError(msg + ', '.join(intersection)) + + query_ops.update(ops) + combined_query.update(copy.deepcopy(query)) + return combined_query + + +class QueryTreeTransformerVisitor(QNodeVisitor): + """Transforms the query tree in to a form that may be used with MongoDB. + """ + + def visit_combination(self, combination): + if combination.operation == combination.AND: + # MongoDB doesn't allow us to have too many $or operations in our + # queries, so the aim is to move the ORs up the tree to one + # 'master' $or. Firstly, we must find all the necessary parts (part + # of an AND combination or just standard Q object), and store them + # separately from the OR parts. + or_groups = [] + and_parts = [] + for node in combination.children: + if isinstance(node, QCombination): + if node.operation == node.OR: + # Any of the children in an $or component may cause + # the query to succeed + or_groups.append(node.children) + elif node.operation == node.AND: + and_parts.append(node) + elif isinstance(node, Q): + and_parts.append(node) + + # Now we combine the parts into a usable query. AND together all of + # the necessary parts. Then for each $or part, create a new query + # that ANDs the necessary part with the $or part. + clauses = [] + for or_group in product(*or_groups): + q_object = reduce(lambda a, b: a & b, and_parts, Q()) + q_object = reduce(lambda a, b: a & b, or_group, q_object) + clauses.append(q_object) + # Finally, $or the generated clauses in to one query. Each of the + # clauses is sufficient for the query to succeed. + return reduce(lambda a, b: a | b, clauses, Q()) + + if combination.operation == combination.OR: + children = [] + # Crush any nested ORs in to this combination as MongoDB doesn't + # support nested $or operations + for node in combination.children: + if (isinstance(node, QCombination) and + node.operation == combination.OR): + children += node.children + else: + children.append(node) + combination.children = children + + return combination + + +class QueryCompilerVisitor(QNodeVisitor): + """Compiles the nodes in a query tree to a PyMongo-compatible query + dictionary. + """ + + def __init__(self, document): + self.document = document + + def visit_combination(self, combination): + if combination.operation == combination.OR: + return {'$or': combination.children} + elif combination.operation == combination.AND: + return self._mongo_query_conjunction(combination.children) + return combination + + def visit_query(self, query): + return transform.query(self.document, **query.query) + + def _mongo_query_conjunction(self, queries): + """Merges Mongo query dicts - effectively &ing them together. + """ + combined_query = {} + for query in queries: + for field, ops in query.items(): + if field not in combined_query: + combined_query[field] = ops + else: + # The field is already present in the query the only way + # we can merge is if both the existing value and the new + # value are operation dicts, reject anything else + if (not isinstance(combined_query[field], dict) or + not isinstance(ops, dict)): + message = 'Conflicting values for ' + field + raise InvalidQueryError(message) + + current_ops = set(combined_query[field].keys()) + new_ops = set(ops.keys()) + # Make sure that the same operation isn't applied more than + # once to a single field + intersection = current_ops.intersection(new_ops) + if intersection: + msg = 'Duplicate query conditions: ' + raise InvalidQueryError(msg + ', '.join(intersection)) + + # Right! We've got two non-overlapping dicts of operations! + combined_query[field].update(copy.deepcopy(ops)) + return combined_query + + +class QNode(object): + """Base class for nodes in query trees. + """ + + AND = 0 + OR = 1 + + def to_query(self, document): + query = self.accept(SimplificationVisitor()) + query = query.accept(QueryTreeTransformerVisitor()) + query = query.accept(QueryCompilerVisitor(document)) + return query + + def accept(self, visitor): + raise NotImplementedError + + def _combine(self, other, operation): + """Combine this node with another node into a QCombination object. + """ + if getattr(other, 'empty', True): + return self + + if self.empty: + return other + + return QCombination(operation, [self, other]) + + @property + def empty(self): + return False + + def __or__(self, other): + return self._combine(other, self.OR) + + def __and__(self, other): + return self._combine(other, self.AND) + + +class QCombination(QNode): + """Represents the combination of several conditions by a given logical + operator. + """ + + def __init__(self, operation, children): + self.operation = operation + self.children = [] + for node in children: + # If the child is a combination of the same type, we can merge its + # children directly into this combinations children + if isinstance(node, QCombination) and node.operation == operation: + self.children += node.children + else: + self.children.append(node) + + def accept(self, visitor): + for i in range(len(self.children)): + if isinstance(self.children[i], QNode): + self.children[i] = self.children[i].accept(visitor) + + return visitor.visit_combination(self) + + @property + def empty(self): + return not bool(self.children) + + +class Q(QNode): + """A simple query object, used in a query tree to build up more complex + query structures. + """ + + def __init__(self, **query): + self.query = query + + def accept(self, visitor): + return visitor.visit_query(self) + + @property + def empty(self): + return not bool(self.query) diff --git a/setup.cfg b/setup.cfg index d95a917..3f3faa8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,4 +8,4 @@ detailed-errors = 1 #cover-package = mongoengine py3where = build where = tests -#tests = test_bugfix.py \ No newline at end of file +#tests = document/__init__.py \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..f2a43b0 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,2 @@ +from .all_warnings import AllWarnings +from .document import * \ No newline at end of file diff --git a/tests/test_all_warnings.py b/tests/all_warnings/__init__.py similarity index 91% rename from tests/test_all_warnings.py rename to tests/all_warnings/__init__.py index 9b38fa6..72de822 100644 --- a/tests/test_all_warnings.py +++ b/tests/all_warnings/__init__.py @@ -1,11 +1,19 @@ +""" +This test has been put into a module. This is because it tests warnings that +only get triggered on first hit. This way we can ensure its imported into the +top level and called first by the test suite. +""" + import unittest import warnings from mongoengine import * -from mongoengine.tests import query_counter -class TestWarnings(unittest.TestCase): +__all__ = ('AllWarnings', ) + + +class AllWarnings(unittest.TestCase): def setUp(self): conn = connect(db='mongoenginetest') diff --git a/tests/document/__init__.py b/tests/document/__init__.py new file mode 100644 index 0000000..1ef2520 --- /dev/null +++ b/tests/document/__init__.py @@ -0,0 +1,11 @@ +# TODO EXPLICT IMPORTS + +from class_methods import * +from delta import * +from dynamic import * +from indexes import * +from inheritance import * +from instance import * + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py new file mode 100644 index 0000000..8050998 --- /dev/null +++ b/tests/document/class_methods.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +from __future__ import with_statement +import unittest + +from mongoengine import * + +from mongoengine.queryset import NULLIFY +from mongoengine.connection import get_db + +__all__ = ("ClassMethodsTest", ) + + +class ClassMethodsTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + class Person(Document): + name = StringField() + age = IntField() + + non_field = True + + meta = {"allow_inheritance": True} + + self.Person = Person + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_definition(self): + """Ensure that document may be defined using fields. + """ + self.assertEqual(['age', 'name', 'id'], self.Person._fields.keys()) + self.assertEqual([IntField, StringField, ObjectIdField], + [x.__class__ for x in self.Person._fields.values()]) + + def test_get_db(self): + """Ensure that get_db returns the expected db. + """ + db = self.Person._get_db() + self.assertEqual(self.db, db) + + def test_get_collection_name(self): + """Ensure that get_collection_name returns the expected collection + name. + """ + collection_name = 'person' + self.assertEqual(collection_name, self.Person._get_collection_name()) + + def test_get_collection(self): + """Ensure that get_collection returns the expected collection. + """ + collection_name = 'person' + collection = self.Person._get_collection() + self.assertEqual(self.db[collection_name], collection) + + def test_drop_collection(self): + """Ensure that the collection may be dropped from the database. + """ + collection_name = 'person' + self.Person(name='Test').save() + self.assertTrue(collection_name in self.db.collection_names()) + + self.Person.drop_collection() + self.assertFalse(collection_name in self.db.collection_names()) + + def test_register_delete_rule(self): + """Ensure that register delete rule adds a delete rule to the document + meta. + """ + class Job(Document): + employee = ReferenceField(self.Person) + + self.assertEqual(self.Person._meta.get('delete_rules'), None) + + self.Person.register_delete_rule(Job, 'employee', NULLIFY) + self.assertEqual(self.Person._meta['delete_rules'], + {(Job, 'employee'): NULLIFY}) + + def test_collection_naming(self): + """Ensure that a collection with a specified name may be used. + """ + + class DefaultNamingTest(Document): + pass + self.assertEqual('default_naming_test', + DefaultNamingTest._get_collection_name()) + + class CustomNamingTest(Document): + meta = {'collection': 'pimp_my_collection'} + + self.assertEqual('pimp_my_collection', + CustomNamingTest._get_collection_name()) + + class DynamicNamingTest(Document): + meta = {'collection': lambda c: "DYNAMO"} + self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) + + # Use Abstract class to handle backwards compatibility + class BaseDocument(Document): + meta = { + 'abstract': True, + 'collection': lambda c: c.__name__.lower() + } + + class OldNamingConvention(BaseDocument): + pass + self.assertEqual('oldnamingconvention', + OldNamingConvention._get_collection_name()) + + class InheritedAbstractNamingTest(BaseDocument): + meta = {'collection': 'wibble'} + self.assertEqual('wibble', + InheritedAbstractNamingTest._get_collection_name()) + + # Mixin tests + class BaseMixin(object): + meta = { + 'collection': lambda c: c.__name__.lower() + } + + class OldMixinNamingConvention(Document, BaseMixin): + pass + self.assertEqual('oldmixinnamingconvention', + OldMixinNamingConvention._get_collection_name()) + + class BaseMixin(object): + meta = { + 'collection': lambda c: c.__name__.lower() + } + + class BaseDocument(Document, BaseMixin): + meta = {'allow_inheritance': True} + + class MyDocument(BaseDocument): + pass + + self.assertEqual('basedocument', MyDocument._get_collection_name()) + + def test_custom_collection_name_operations(self): + """Ensure that a collection with a specified name is used as expected. + """ + collection_name = 'personCollTest' + + class Person(Document): + name = StringField() + meta = {'collection': collection_name} + + Person(name="Test User").save() + self.assertTrue(collection_name in self.db.collection_names()) + + user_obj = self.db[collection_name].find_one() + self.assertEqual(user_obj['name'], "Test User") + + user_obj = Person.objects[0] + self.assertEqual(user_obj.name, "Test User") + + Person.drop_collection() + self.assertFalse(collection_name in self.db.collection_names()) + + def test_collection_name_and_primary(self): + """Ensure that a collection with a specified name may be used. + """ + + class Person(Document): + name = StringField(primary_key=True) + meta = {'collection': 'app'} + + Person(name="Test User").save() + + user_obj = Person.objects.first() + self.assertEqual(user_obj.name, "Test User") + + Person.drop_collection() + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/delta.py b/tests/document/delta.py new file mode 100644 index 0000000..f8a071d --- /dev/null +++ b/tests/document/delta.py @@ -0,0 +1,688 @@ +# -*- coding: utf-8 -*- +import unittest + +from mongoengine import * +from mongoengine.connection import get_db + +__all__ = ("DeltaTest",) + + +class DeltaTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + class Person(Document): + name = StringField() + age = IntField() + + non_field = True + + meta = {"allow_inheritance": True} + + self.Person = Person + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_delta(self): + self.delta(Document) + self.delta(DynamicDocument) + + def delta(self, DocClass): + + class Doc(DocClass): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._delta(), ({}, {})) + + doc.string_field = 'hello' + self.assertEqual(doc._get_changed_fields(), ['string_field']) + self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) + + doc._changed_fields = [] + doc.int_field = 1 + self.assertEqual(doc._get_changed_fields(), ['int_field']) + self.assertEqual(doc._delta(), ({'int_field': 1}, {})) + + doc._changed_fields = [] + dict_value = {'hello': 'world', 'ping': 'pong'} + doc.dict_field = dict_value + self.assertEqual(doc._get_changed_fields(), ['dict_field']) + self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) + + doc._changed_fields = [] + list_value = ['1', 2, {'hello': 'world'}] + doc.list_field = list_value + self.assertEqual(doc._get_changed_fields(), ['list_field']) + self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + self.assertEqual(doc._get_changed_fields(), ['dict_field']) + self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) + + doc._changed_fields = [] + doc.list_field = [] + self.assertEqual(doc._get_changed_fields(), ['list_field']) + self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + + def test_delta_recursive(self): + self.delta_recursive(Document, EmbeddedDocument) + self.delta_recursive(DynamicDocument, EmbeddedDocument) + self.delta_recursive(Document, DynamicEmbeddedDocument) + self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) + + def delta_recursive(self, DocClass, EmbeddedClass): + + class Embedded(EmbeddedClass): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + class Doc(DocClass): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._delta(), ({}, {})) + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + + self.assertEqual(doc._get_changed_fields(), ['embedded_field']) + + embedded_delta = { + 'string_field': 'hello', + 'int_field': 1, + 'dict_field': {'hello': 'world'}, + 'list_field': ['1', 2, {'hello': 'world'}] + } + self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) + embedded_delta.update({ + '_cls': 'Embedded', + }) + self.assertEqual(doc._delta(), + ({'embedded_field': embedded_delta}, {})) + + doc.save() + doc = doc.reload(10) + + doc.embedded_field.dict_field = {} + self.assertEqual(doc._get_changed_fields(), + ['embedded_field.dict_field']) + self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) + self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.dict_field, {}) + + doc.embedded_field.list_field = [] + self.assertEqual(doc._get_changed_fields(), + ['embedded_field.list_field']) + self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) + self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field, []) + + embedded_2 = Embedded() + embedded_2.string_field = 'hello' + embedded_2.int_field = 1 + embedded_2.dict_field = {'hello': 'world'} + embedded_2.list_field = ['1', 2, {'hello': 'world'}] + + doc.embedded_field.list_field = ['1', 2, embedded_2] + self.assertEqual(doc._get_changed_fields(), + ['embedded_field.list_field']) + self.assertEqual(doc.embedded_field._delta(), ({ + 'list_field': ['1', 2, { + '_cls': 'Embedded', + 'string_field': 'hello', + 'dict_field': {'hello': 'world'}, + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + + self.assertEqual(doc._delta(), ({ + 'embedded_field.list_field': ['1', 2, { + '_cls': 'Embedded', + 'string_field': 'hello', + 'dict_field': {'hello': 'world'}, + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + doc.save() + doc = doc.reload(10) + + self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.list_field[1], 2) + for k in doc.embedded_field.list_field[2]._fields: + self.assertEqual(doc.embedded_field.list_field[2][k], + embedded_2[k]) + + doc.embedded_field.list_field[2].string_field = 'world' + self.assertEqual(doc._get_changed_fields(), + ['embedded_field.list_field.2.string_field']) + self.assertEqual(doc.embedded_field._delta(), + ({'list_field.2.string_field': 'world'}, {})) + self.assertEqual(doc._delta(), + ({'embedded_field.list_field.2.string_field': 'world'}, {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].string_field, + 'world') + + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + self.assertEqual(doc._get_changed_fields(), + ['embedded_field.list_field']) + self.assertEqual(doc.embedded_field._delta(), ({ + 'list_field': ['1', 2, { + '_cls': 'Embedded', + 'string_field': 'hello world', + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + 'dict_field': {'hello': 'world'}}]}, {})) + self.assertEqual(doc._delta(), ({ + 'embedded_field.list_field': ['1', 2, { + '_cls': 'Embedded', + 'string_field': 'hello world', + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + 'dict_field': {'hello': 'world'}} + ]}, {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].string_field, + 'hello world') + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + self.assertEqual(doc._delta(), + ({'embedded_field.list_field.2.list_field': + [2, {'hello': 'world'}]}, {})) + doc.save() + doc = doc.reload(10) + + doc.embedded_field.list_field[2].list_field.append(1) + self.assertEqual(doc._delta(), + ({'embedded_field.list_field.2.list_field': + [2, {'hello': 'world'}, 1]}, {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].list_field, + [2, {'hello': 'world'}, 1]) + + doc.embedded_field.list_field[2].list_field.sort(key=str) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].list_field, + [1, 2, {'hello': 'world'}]) + + del(doc.embedded_field.list_field[2].list_field[2]['hello']) + self.assertEqual(doc._delta(), + ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) + doc.save() + doc = doc.reload(10) + + del(doc.embedded_field.list_field[2].list_field) + self.assertEqual(doc._delta(), + ({}, {'embedded_field.list_field.2.list_field': 1})) + + doc.save() + doc = doc.reload(10) + + doc.dict_field['Embedded'] = embedded_1 + doc.save() + doc = doc.reload(10) + + doc.dict_field['Embedded'].string_field = 'Hello World' + self.assertEqual(doc._get_changed_fields(), + ['dict_field.Embedded.string_field']) + self.assertEqual(doc._delta(), + ({'dict_field.Embedded.string_field': 'Hello World'}, {})) + + def test_circular_reference_deltas(self): + self.circular_reference_deltas(Document, Document) + self.circular_reference_deltas(Document, DynamicDocument) + self.circular_reference_deltas(DynamicDocument, Document) + self.circular_reference_deltas(DynamicDocument, DynamicDocument) + + def circular_reference_deltas(self, DocClass1, DocClass2): + + class Person(DocClass1): + name = StringField() + owns = ListField(ReferenceField('Organization')) + + class Organization(DocClass2): + name = StringField() + owner = ReferenceField('Person') + + person = Person(name="owner") + person.save() + organization = Organization(name="company") + organization.save() + + person.owns.append(organization) + organization.owner = person + + person.save() + organization.save() + + p = Person.objects[0].select_related() + o = Organization.objects.first() + self.assertEqual(p.owns[0], o) + self.assertEqual(o.owner, p) + + def test_circular_reference_deltas_2(self): + self.circular_reference_deltas_2(Document, Document) + self.circular_reference_deltas_2(Document, DynamicDocument) + self.circular_reference_deltas_2(DynamicDocument, Document) + self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) + + def circular_reference_deltas_2(self, DocClass1, DocClass2): + + class Person(DocClass1): + name = StringField() + owns = ListField(ReferenceField('Organization')) + employer = ReferenceField('Organization') + + class Organization(DocClass2): + name = StringField() + owner = ReferenceField('Person') + employees = ListField(ReferenceField('Person')) + + Person.drop_collection() + Organization.drop_collection() + + person = Person(name="owner") + person.save() + + employee = Person(name="employee") + employee.save() + + organization = Organization(name="company") + organization.save() + + person.owns.append(organization) + organization.owner = person + + organization.employees.append(employee) + employee.employer = organization + + person.save() + organization.save() + employee.save() + + p = Person.objects.get(name="owner") + e = Person.objects.get(name="employee") + o = Organization.objects.first() + + self.assertEqual(p.owns[0], o) + self.assertEqual(o.owner, p) + self.assertEqual(e.employer, o) + + def test_delta_db_field(self): + self.delta_db_field(Document) + self.delta_db_field(DynamicDocument) + + def delta_db_field(self, DocClass): + + class Doc(DocClass): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._delta(), ({}, {})) + + doc.string_field = 'hello' + self.assertEqual(doc._get_changed_fields(), ['db_string_field']) + self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) + + doc._changed_fields = [] + doc.int_field = 1 + self.assertEqual(doc._get_changed_fields(), ['db_int_field']) + self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) + + doc._changed_fields = [] + dict_value = {'hello': 'world', 'ping': 'pong'} + doc.dict_field = dict_value + self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) + self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) + + doc._changed_fields = [] + list_value = ['1', 2, {'hello': 'world'}] + doc.list_field = list_value + self.assertEqual(doc._get_changed_fields(), ['db_list_field']) + self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) + self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) + + doc._changed_fields = [] + doc.list_field = [] + self.assertEqual(doc._get_changed_fields(), ['db_list_field']) + self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) + + # Test it saves that data + doc = Doc() + doc.save() + + doc.string_field = 'hello' + doc.int_field = 1 + doc.dict_field = {'hello': 'world'} + doc.list_field = ['1', 2, {'hello': 'world'}] + doc.save() + doc = doc.reload(10) + + self.assertEqual(doc.string_field, 'hello') + self.assertEqual(doc.int_field, 1) + self.assertEqual(doc.dict_field, {'hello': 'world'}) + self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) + + def test_delta_recursive_db_field(self): + self.delta_recursive_db_field(Document, EmbeddedDocument) + self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) + self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) + self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) + + def delta_recursive_db_field(self, DocClass, EmbeddedClass): + + class Embedded(EmbeddedClass): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + + class Doc(DocClass): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + embedded_field = EmbeddedDocumentField(Embedded, + db_field='db_embedded_field') + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._delta(), ({}, {})) + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + + self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) + + embedded_delta = { + 'db_string_field': 'hello', + 'db_int_field': 1, + 'db_dict_field': {'hello': 'world'}, + 'db_list_field': ['1', 2, {'hello': 'world'}] + } + self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) + embedded_delta.update({ + '_cls': 'Embedded', + }) + self.assertEqual(doc._delta(), + ({'db_embedded_field': embedded_delta}, {})) + + doc.save() + doc = doc.reload(10) + + doc.embedded_field.dict_field = {} + self.assertEqual(doc._get_changed_fields(), + ['db_embedded_field.db_dict_field']) + self.assertEqual(doc.embedded_field._delta(), + ({}, {'db_dict_field': 1})) + self.assertEqual(doc._delta(), + ({}, {'db_embedded_field.db_dict_field': 1})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.dict_field, {}) + + doc.embedded_field.list_field = [] + self.assertEqual(doc._get_changed_fields(), + ['db_embedded_field.db_list_field']) + self.assertEqual(doc.embedded_field._delta(), + ({}, {'db_list_field': 1})) + self.assertEqual(doc._delta(), + ({}, {'db_embedded_field.db_list_field': 1})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field, []) + + embedded_2 = Embedded() + embedded_2.string_field = 'hello' + embedded_2.int_field = 1 + embedded_2.dict_field = {'hello': 'world'} + embedded_2.list_field = ['1', 2, {'hello': 'world'}] + + doc.embedded_field.list_field = ['1', 2, embedded_2] + self.assertEqual(doc._get_changed_fields(), + ['db_embedded_field.db_list_field']) + self.assertEqual(doc.embedded_field._delta(), ({ + 'db_list_field': ['1', 2, { + '_cls': 'Embedded', + 'db_string_field': 'hello', + 'db_dict_field': {'hello': 'world'}, + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + + self.assertEqual(doc._delta(), ({ + 'db_embedded_field.db_list_field': ['1', 2, { + '_cls': 'Embedded', + 'db_string_field': 'hello', + 'db_dict_field': {'hello': 'world'}, + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + doc.save() + doc = doc.reload(10) + + self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.list_field[1], 2) + for k in doc.embedded_field.list_field[2]._fields: + self.assertEqual(doc.embedded_field.list_field[2][k], + embedded_2[k]) + + doc.embedded_field.list_field[2].string_field = 'world' + self.assertEqual(doc._get_changed_fields(), + ['db_embedded_field.db_list_field.2.db_string_field']) + self.assertEqual(doc.embedded_field._delta(), + ({'db_list_field.2.db_string_field': 'world'}, {})) + self.assertEqual(doc._delta(), + ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, + {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].string_field, + 'world') + + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + self.assertEqual(doc._get_changed_fields(), + ['db_embedded_field.db_list_field']) + self.assertEqual(doc.embedded_field._delta(), ({ + 'db_list_field': ['1', 2, { + '_cls': 'Embedded', + 'db_string_field': 'hello world', + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + 'db_dict_field': {'hello': 'world'}}]}, {})) + self.assertEqual(doc._delta(), ({ + 'db_embedded_field.db_list_field': ['1', 2, { + '_cls': 'Embedded', + 'db_string_field': 'hello world', + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + 'db_dict_field': {'hello': 'world'}} + ]}, {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].string_field, + 'hello world') + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + self.assertEqual(doc._delta(), + ({'db_embedded_field.db_list_field.2.db_list_field': + [2, {'hello': 'world'}]}, {})) + doc.save() + doc = doc.reload(10) + + doc.embedded_field.list_field[2].list_field.append(1) + self.assertEqual(doc._delta(), + ({'db_embedded_field.db_list_field.2.db_list_field': + [2, {'hello': 'world'}, 1]}, {})) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].list_field, + [2, {'hello': 'world'}, 1]) + + doc.embedded_field.list_field[2].list_field.sort(key=str) + doc.save() + doc = doc.reload(10) + self.assertEqual(doc.embedded_field.list_field[2].list_field, + [1, 2, {'hello': 'world'}]) + + del(doc.embedded_field.list_field[2].list_field[2]['hello']) + self.assertEqual(doc._delta(), + ({'db_embedded_field.db_list_field.2.db_list_field': + [1, 2, {}]}, {})) + doc.save() + doc = doc.reload(10) + + del(doc.embedded_field.list_field[2].list_field) + self.assertEqual(doc._delta(), ({}, + {'db_embedded_field.db_list_field.2.db_list_field': 1})) + + def test_delta_for_dynamic_documents(self): + class Person(DynamicDocument): + name = StringField() + meta = {'allow_inheritance': True} + + Person.drop_collection() + + p = Person(name="James", age=34) + self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', + '_cls': 'Person'}, {})) + + p.doc = 123 + del(p.doc) + self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', + '_cls': 'Person'}, {'doc': 1})) + + p = Person() + p.name = "Dean" + p.age = 22 + p.save() + + p.age = 24 + self.assertEqual(p.age, 24) + self.assertEqual(p._get_changed_fields(), ['age']) + self.assertEqual(p._delta(), ({'age': 24}, {})) + + p = self.Person.objects(age=22).get() + p.age = 24 + self.assertEqual(p.age, 24) + self.assertEqual(p._get_changed_fields(), ['age']) + self.assertEqual(p._delta(), ({'age': 24}, {})) + + p.save() + self.assertEqual(1, self.Person.objects(age=24).count()) + + def test_dynamic_delta(self): + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._delta(), ({}, {})) + + doc.string_field = 'hello' + self.assertEqual(doc._get_changed_fields(), ['string_field']) + self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) + + doc._changed_fields = [] + doc.int_field = 1 + self.assertEqual(doc._get_changed_fields(), ['int_field']) + self.assertEqual(doc._delta(), ({'int_field': 1}, {})) + + doc._changed_fields = [] + dict_value = {'hello': 'world', 'ping': 'pong'} + doc.dict_field = dict_value + self.assertEqual(doc._get_changed_fields(), ['dict_field']) + self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) + + doc._changed_fields = [] + list_value = ['1', 2, {'hello': 'world'}] + doc.list_field = list_value + self.assertEqual(doc._get_changed_fields(), ['list_field']) + self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + self.assertEqual(doc._get_changed_fields(), ['dict_field']) + self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) + + doc._changed_fields = [] + doc.list_field = [] + self.assertEqual(doc._get_changed_fields(), ['list_field']) + self.assertEqual(doc._delta(), ({}, {'list_field': 1})) diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py new file mode 100644 index 0000000..ef27917 --- /dev/null +++ b/tests/document/dynamic.py @@ -0,0 +1,270 @@ +import unittest + +from mongoengine import * +from mongoengine.connection import get_db + +__all__ = ("DynamicTest", ) + + +class DynamicTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + class Person(DynamicDocument): + name = StringField() + meta = {'allow_inheritance': True} + + Person.drop_collection() + + self.Person = Person + + def test_simple_dynamic_document(self): + """Ensures simple dynamic documents are saved correctly""" + + p = self.Person() + p.name = "James" + p.age = 34 + + self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", + "age": 34}) + + p.save() + + self.assertEqual(self.Person.objects.first().age, 34) + + # Confirm no changes to self.Person + self.assertFalse(hasattr(self.Person, 'age')) + + def test_change_scope_of_variable(self): + """Test changing the scope of a dynamic field has no adverse effects""" + p = self.Person() + p.name = "Dean" + p.misc = 22 + p.save() + + p = self.Person.objects.get() + p.misc = {'hello': 'world'} + p.save() + + p = self.Person.objects.get() + self.assertEqual(p.misc, {'hello': 'world'}) + + def test_delete_dynamic_field(self): + """Test deleting a dynamic field works""" + self.Person.drop_collection() + p = self.Person() + p.name = "Dean" + p.misc = 22 + p.save() + + p = self.Person.objects.get() + p.misc = {'hello': 'world'} + p.save() + + p = self.Person.objects.get() + self.assertEqual(p.misc, {'hello': 'world'}) + collection = self.db[self.Person._get_collection_name()] + obj = collection.find_one() + self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) + + del(p.misc) + p.save() + + p = self.Person.objects.get() + self.assertFalse(hasattr(p, 'misc')) + + obj = collection.find_one() + self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) + + def test_dynamic_document_queries(self): + """Ensure we can query dynamic fields""" + p = self.Person() + p.name = "Dean" + p.age = 22 + p.save() + + self.assertEqual(1, self.Person.objects(age=22).count()) + p = self.Person.objects(age=22) + p = p.get() + self.assertEqual(22, p.age) + + def test_complex_dynamic_document_queries(self): + class Person(DynamicDocument): + name = StringField() + + Person.drop_collection() + + p = Person(name="test") + p.age = "ten" + p.save() + + p1 = Person(name="test1") + p1.age = "less then ten and a half" + p1.save() + + p2 = Person(name="test2") + p2.age = 10 + p2.save() + + self.assertEqual(Person.objects(age__icontains='ten').count(), 2) + self.assertEqual(Person.objects(age__gte=10).count(), 1) + + def test_complex_data_lookups(self): + """Ensure you can query dynamic document dynamic fields""" + p = self.Person() + p.misc = {'hello': 'world'} + p.save() + + self.assertEqual(1, self.Person.objects(misc__hello='world').count()) + + def test_inheritance(self): + """Ensure that dynamic document plays nice with inheritance""" + class Employee(self.Person): + salary = IntField() + + Employee.drop_collection() + + self.assertTrue('name' in Employee._fields) + self.assertTrue('salary' in Employee._fields) + self.assertEqual(Employee._get_collection_name(), + self.Person._get_collection_name()) + + joe_bloggs = Employee() + joe_bloggs.name = "Joe Bloggs" + joe_bloggs.salary = 10 + joe_bloggs.age = 20 + joe_bloggs.save() + + self.assertEqual(1, self.Person.objects(age=20).count()) + self.assertEqual(1, Employee.objects(age=20).count()) + + joe_bloggs = self.Person.objects.first() + self.assertTrue(isinstance(joe_bloggs, Employee)) + + def test_embedded_dynamic_document(self): + """Test dynamic embedded documents""" + class Embedded(DynamicEmbeddedDocument): + pass + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + + self.assertEqual(doc.to_mongo(), {"_cls": "Doc", + "embedded_field": { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ['1', 2, {'hello': 'world'}] + } + }) + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc.embedded_field.__class__, Embedded) + self.assertEqual(doc.embedded_field.string_field, "hello") + self.assertEqual(doc.embedded_field.int_field, 1) + self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) + self.assertEqual(doc.embedded_field.list_field, + ['1', 2, {'hello': 'world'}]) + + def test_complex_embedded_documents(self): + """Test complex dynamic embedded documents setups""" + class Embedded(DynamicEmbeddedDocument): + pass + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + + embedded_2 = Embedded() + embedded_2.string_field = 'hello' + embedded_2.int_field = 1 + embedded_2.dict_field = {'hello': 'world'} + embedded_2.list_field = ['1', 2, {'hello': 'world'}] + + embedded_1.list_field = ['1', 2, embedded_2] + doc.embedded_field = embedded_1 + + self.assertEqual(doc.to_mongo(), {"_cls": "Doc", + "embedded_field": { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ['1', 2, + {"_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ['1', 2, {'hello': 'world'}]} + ] + } + }) + doc.save() + doc = Doc.objects.first() + self.assertEqual(doc.embedded_field.__class__, Embedded) + self.assertEqual(doc.embedded_field.string_field, "hello") + self.assertEqual(doc.embedded_field.int_field, 1) + self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) + self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.list_field[1], 2) + + embedded_field = doc.embedded_field.list_field[2] + + self.assertEqual(embedded_field.__class__, Embedded) + self.assertEqual(embedded_field.string_field, "hello") + self.assertEqual(embedded_field.int_field, 1) + self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) + self.assertEqual(embedded_field.list_field, ['1', 2, + {'hello': 'world'}]) + + def test_dynamic_and_embedded(self): + """Ensure embedded documents play nicely""" + + class Address(EmbeddedDocument): + city = StringField() + + class Person(DynamicDocument): + name = StringField() + meta = {'allow_inheritance': True} + + Person.drop_collection() + + Person(name="Ross", address=Address(city="London")).save() + + person = Person.objects.first() + person.address.city = "Lundenne" + person.save() + + self.assertEqual(Person.objects.first().address.city, "Lundenne") + + person = Person.objects.first() + person.address = Address(city="Londinium") + person.save() + + self.assertEqual(Person.objects.first().address.city, "Londinium") + + person = Person.objects.first() + person.age = 35 + person.save() + self.assertEqual(Person.objects.first().age, 35) diff --git a/tests/document/indexes.py b/tests/document/indexes.py new file mode 100644 index 0000000..a6b74cd --- /dev/null +++ b/tests/document/indexes.py @@ -0,0 +1,637 @@ +# -*- coding: utf-8 -*- +from __future__ import with_statement +import bson +import os +import pickle +import pymongo +import sys +import unittest +import uuid +import warnings + +from nose.plugins.skip import SkipTest +from datetime import datetime + +from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest + +from mongoengine import * +from mongoengine.errors import (NotRegistered, InvalidDocumentError, + InvalidQueryError) +from mongoengine.queryset import NULLIFY, Q +from mongoengine.connection import get_db, get_connection + +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') + +__all__ = ("InstanceTest", ) + + +class InstanceTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + class Person(Document): + name = StringField() + age = IntField() + + non_field = True + + meta = {"allow_inheritance": True} + + self.Person = Person + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_indexes_document(self, ): + """Ensure that indexes are used when meta[indexes] is specified for + Documents + """ + index_test(Document) + + def test_indexes_dynamic_document(self, ): + """Ensure that indexes are used when meta[indexes] is specified for + Dynamic Documents + """ + index_test(DynamicDocument) + + def index_test(self, InheritFrom): + + class BlogPost(InheritFrom): + date = DateTimeField(db_field='addDate', default=datetime.now) + category = StringField() + tags = ListField(StringField()) + meta = { + 'indexes': [ + '-date', + 'tags', + ('category', '-date') + ], + 'allow_inheritance': True + } + + expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]}, + {'fields': [('_cls', 1), ('tags', 1)]}, + {'fields': [('_cls', 1), ('category', 1), + ('addDate', -1)]}] + self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + + BlogPost.objects._ensure_indexes() + info = BlogPost.objects._collection.index_information() + # _id, '-date', 'tags', ('cat', 'date') + # NB: there is no index on _cls by itself, since + # the indices on -date and tags will both contain + # _cls as first element in the key + self.assertEqual(len(info), 4) + info = [value['key'] for key, value in info.iteritems()] + for expected in expected_specs: + self.assertTrue(expected['fields'] in info) + + class ExtendedBlogPost(BlogPost): + title = StringField() + meta = {'indexes': ['title']} + + expected_specs.append({'fields': [('_cls', 1), ('title', 1)]}) + self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs']) + + BlogPost.drop_collection() + + ExtendedBlogPost.objects._ensure_indexes() + info = ExtendedBlogPost.objects._collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + for expected in expected_specs: + self.assertTrue(expected['fields'] in info) + + def test_inherited_index(self): + """Ensure index specs are inhertited correctly""" + + class A(Document): + title = StringField() + meta = { + 'indexes': [ + { + 'fields': ('title',), + }, + ], + 'allow_inheritance': True, + } + + class B(A): + description = StringField() + + self.assertEqual(A._meta['index_specs'], B._meta['index_specs']) + self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], + A._meta['index_specs']) + + def test_build_index_spec_is_not_destructive(self): + + class MyDoc(Document): + keywords = StringField() + + meta = { + 'indexes': ['keywords'], + 'allow_inheritance': False + } + + self.assertEqual(MyDoc._meta['index_specs'], + [{'fields': [('keywords', 1)]}]) + + # Force index creation + MyDoc.objects._ensure_indexes() + + self.assertEqual(MyDoc._meta['index_specs'], + [{'fields': [('keywords', 1)]}]) + + def test_embedded_document_index_meta(self): + """Ensure that embedded document indexes are created explicitly + """ + class Rank(EmbeddedDocument): + title = StringField(required=True) + + class Person(Document): + name = StringField(required=True) + rank = EmbeddedDocumentField(Rank, required=False) + + meta = { + 'indexes': [ + 'rank.title', + ], + 'allow_inheritance': False + } + + self.assertEqual([{'fields': [('rank.title', 1)]}], + Person._meta['index_specs']) + + Person.drop_collection() + + # Indexes are lazy so use list() to perform query + list(Person.objects) + info = Person.objects._collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('rank.title', 1)] in info) + + def test_explicit_geo2d_index(self): + """Ensure that geo2d indexes work when created via meta[indexes] + """ + class Place(Document): + location = DictField() + meta = { + 'allow_inheritance': True, + 'indexes': [ + '*location.point', + ] + } + + self.assertEqual([{'fields': [('location.point', '2d')]}], + Place._meta['index_specs']) + + Place.objects()._ensure_indexes() + info = Place._get_collection().index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('location.point', '2d')] in info) + + def test_dictionary_indexes(self): + """Ensure that indexes are used when meta[indexes] contains + dictionaries instead of lists. + """ + class BlogPost(Document): + date = DateTimeField(db_field='addDate', default=datetime.now) + category = StringField() + tags = ListField(StringField()) + meta = { + 'indexes': [ + {'fields': ['-date'], 'unique': True, + 'sparse': True, 'types': False}, + ], + } + + self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, + 'sparse': True, 'types': False}], + BlogPost._meta['index_specs']) + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + # _id, '-date' + self.assertEqual(len(info), 3) + + # Indexes are lazy so use list() to perform query + list(BlogPost.objects) + info = BlogPost.objects._collection.index_information() + info = [(value['key'], + value.get('unique', False), + value.get('sparse', False)) + for key, value in info.iteritems()] + self.assertTrue(([('addDate', -1)], True, True) in info) + + BlogPost.drop_collection() + + def test_abstract_index_inheritance(self): + + class UserBase(Document): + user_guid = StringField(required=True) + meta = { + 'abstract': True, + 'indexes': ['user_guid'], + 'allow_inheritance': True + } + + class Person(UserBase): + name = StringField() + + meta = { + 'indexes': ['name'], + } + + Person(name="test", user_guid='123').save() + + self.assertEqual(1, Person.objects.count()) + info = Person.objects._collection.index_information() + self.assertEqual(info.keys(), ['_cls_1_name_1', '_cls_1_user_guid_1', + '_id_']) + + def test_disable_index_creation(self): + """Tests setting auto_create_index to False on the connection will + disable any index generation. + """ + class User(Document): + meta = { + 'indexes': ['user_guid'], + 'auto_create_index': False + } + user_guid = StringField(required=True) + + + User.drop_collection() + + u = User(user_guid='123') + u.save() + + self.assertEqual(1, User.objects.count()) + info = User.objects._collection.index_information() + self.assertEqual(info.keys(), ['_id_']) + User.drop_collection() + + def test_embedded_document_index(self): + """Tests settings an index on an embedded document + """ + class Date(EmbeddedDocument): + year = IntField(db_field='yr') + + class BlogPost(Document): + title = StringField() + date = EmbeddedDocumentField(Date) + + meta = { + 'indexes': [ + '-date.year' + ], + } + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + self.assertEqual(info.keys(), ['_cls_1_date.yr_-1', '_id_']) + BlogPost.drop_collection() + + def test_list_embedded_document_index(self): + """Ensure list embedded documents can be indexed + """ + class Tag(EmbeddedDocument): + name = StringField(db_field='tag') + + class BlogPost(Document): + title = StringField() + tags = ListField(EmbeddedDocumentField(Tag)) + + meta = { + 'indexes': [ + 'tags.name' + ] + } + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + # we don't use _cls in with list fields by default + self.assertEqual(info.keys(), ['_id_', '_cls_1_tags.tag_1']) + + post1 = BlogPost(title="Embedded Indexes tests in place", + tags=[Tag(name="about"), Tag(name="time")] + ) + post1.save() + BlogPost.drop_collection() + + def test_recursive_embedded_objects_dont_break_indexes(self): + + class RecursiveObject(EmbeddedDocument): + obj = EmbeddedDocumentField('self') + + class RecursiveDocument(Document): + recursive_obj = EmbeddedDocumentField(RecursiveObject) + meta = {'allow_inheritance': True} + + RecursiveDocument.objects._ensure_indexes() + info = RecursiveDocument._get_collection().index_information() + self.assertEqual(info.keys(), ['_id_', '_cls_1']) + + def test_geo_indexes_recursion(self): + + class Location(Document): + name = StringField() + location = GeoPointField() + + class Parent(Document): + name = StringField() + location = ReferenceField(Location) + + Location.drop_collection() + Parent.drop_collection() + + list(Parent.objects) + + collection = Parent._get_collection() + info = collection.index_information() + + self.assertFalse('location_2d' in info) + + self.assertEqual(len(Parent._geo_indices()), 0) + self.assertEqual(len(Location._geo_indices()), 1) + + def test_covered_index(self): + """Ensure that covered indexes can be used + """ + + class Test(Document): + a = IntField() + + meta = { + 'indexes': ['a'], + 'allow_inheritance': False + } + + Test.drop_collection() + + obj = Test(a=1) + obj.save() + + # Need to be explicit about covered indexes as mongoDB doesn't know if + # the documents returned might have more keys in that here. + query_plan = Test.objects(id=obj.id).exclude('a').explain() + self.assertFalse(query_plan['indexOnly']) + + query_plan = Test.objects(id=obj.id).only('id').explain() + self.assertTrue(query_plan['indexOnly']) + + query_plan = Test.objects(a=1).only('a').exclude('id').explain() + self.assertTrue(query_plan['indexOnly']) + + def test_index_on_id(self): + + class BlogPost(Document): + meta = { + 'indexes': [ + ['categories', 'id'] + ], + 'allow_inheritance': False + } + + title = StringField(required=True) + description = StringField(required=True) + categories = ListField() + + BlogPost.drop_collection() + + indexes = BlogPost.objects._collection.index_information() + self.assertEqual(indexes['categories_1__id_1']['key'], + [('categories', 1), ('_id', 1)]) + + def test_hint(self): + + class BlogPost(Document): + tags = ListField(StringField()) + meta = { + 'indexes': [ + 'tags', + ], + } + + BlogPost.drop_collection() + + for i in xrange(0, 10): + tags = [("tag %i" % n) for n in xrange(0, i % 2)] + BlogPost(tags=tags).save() + + self.assertEqual(BlogPost.objects.count(), 10) + self.assertEqual(BlogPost.objects.hint().count(), 10) + self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) + + self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) + + def invalid_index(): + BlogPost.objects.hint('tags') + self.assertRaises(TypeError, invalid_index) + + def invalid_index_2(): + return BlogPost.objects.hint(('tags', 1)) + self.assertRaises(TypeError, invalid_index_2) + + def test_unique(self): + """Ensure that uniqueness constraints are applied to fields. + """ + class BlogPost(Document): + title = StringField() + slug = StringField(unique=True) + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', slug='test') + post1.save() + + # Two posts with the same slug is not allowed + post2 = BlogPost(title='test2', slug='test') + self.assertRaises(NotUniqueError, post2.save) + + # Ensure backwards compatibilty for errors + self.assertRaises(OperationError, post2.save) + + def test_unique_with(self): + """Ensure that unique_with constraints are applied to fields. + """ + class Date(EmbeddedDocument): + year = IntField(db_field='yr') + + class BlogPost(Document): + title = StringField() + date = EmbeddedDocumentField(Date) + slug = StringField(unique_with='date.year') + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') + post1.save() + + # day is different so won't raise exception + post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') + post2.save() + + # Now there will be two docs with the same slug and the same day: fail + post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') + self.assertRaises(OperationError, post3.save) + + BlogPost.drop_collection() + + def test_unique_embedded_document(self): + """Ensure that uniqueness constraints are applied to fields on embedded documents. + """ + class SubDocument(EmbeddedDocument): + year = IntField(db_field='yr') + slug = StringField(unique=True) + + class BlogPost(Document): + title = StringField() + sub = EmbeddedDocumentField(SubDocument) + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1.save() + + # sub.slug is different so won't raise exception + post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2.save() + + # Now there will be two docs with the same sub.slug + post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + self.assertRaises(NotUniqueError, post3.save) + + BlogPost.drop_collection() + + def test_unique_with_embedded_document_and_embedded_unique(self): + """Ensure that uniqueness constraints are applied to fields on + embedded documents. And work with unique_with as well. + """ + class SubDocument(EmbeddedDocument): + year = IntField(db_field='yr') + slug = StringField(unique=True) + + class BlogPost(Document): + title = StringField(unique_with='sub.year') + sub = EmbeddedDocumentField(SubDocument) + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1.save() + + # sub.slug is different so won't raise exception + post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2.save() + + # Now there will be two docs with the same sub.slug + post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + self.assertRaises(NotUniqueError, post3.save) + + # Now there will be two docs with the same title and year + post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) + self.assertRaises(NotUniqueError, post3.save) + + BlogPost.drop_collection() + + def test_ttl_indexes(self): + + class Log(Document): + created = DateTimeField(default=datetime.now) + meta = { + 'indexes': [ + {'fields': ['created'], 'expireAfterSeconds': 3600} + ] + } + + Log.drop_collection() + + if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3: + raise SkipTest('pymongo needs to be 2.3 or higher for this test') + + connection = get_connection() + version_array = connection.server_info()['versionArray'] + if version_array[0] < 2 and version_array[1] < 2: + raise SkipTest('MongoDB needs to be 2.2 or higher for this test') + + # Indexes are lazy so use list() to perform query + list(Log.objects) + info = Log.objects._collection.index_information() + self.assertEqual(3600, + info['_cls_1_created_1']['expireAfterSeconds']) + + def test_unique_and_indexes(self): + """Ensure that 'unique' constraints aren't overridden by + meta.indexes. + """ + class Customer(Document): + cust_id = IntField(unique=True, required=True) + meta = { + 'indexes': ['cust_id'], + 'allow_inheritance': False, + } + + Customer.drop_collection() + cust = Customer(cust_id=1) + cust.save() + + cust_dupe = Customer(cust_id=1) + try: + cust_dupe.save() + raise AssertionError, "We saved a dupe!" + except NotUniqueError: + pass + Customer.drop_collection() + + def test_unique_and_primary(self): + """If you set a field as primary, then unexpected behaviour can occur. + You won't create a duplicate but you will update an existing document. + """ + + class User(Document): + name = StringField(primary_key=True, unique=True) + password = StringField() + + User.drop_collection() + + user = User(name='huangz', password='secret') + user.save() + + user = User(name='huangz', password='secret2') + user.save() + + self.assertEqual(User.objects.count(), 1) + self.assertEqual(User.objects.get().password, 'secret2') + + User.drop_collection() + + def test_types_index_with_pk(self): + """Ensure you can use `pk` as part of a query""" + + class Comment(EmbeddedDocument): + comment_id = IntField(required=True) + + try: + class BlogPost(Document): + comments = EmbeddedDocumentField(Comment) + meta = {'indexes': [ + {'fields': ['pk', 'comments.comment_id'], + 'unique': True}]} + except UnboundLocalError: + self.fail('Unbound local error at types index + pk definition') + + info = BlogPost.objects._collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + index_item = [('_cls', 1), ('_id', 1), ('comments.comment_id', 1)] + self.assertTrue(index_item in info) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py new file mode 100644 index 0000000..d269ac0 --- /dev/null +++ b/tests/document/inheritance.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +import unittest +import warnings + +from datetime import datetime + +from tests.fixtures import Base + +from mongoengine import Document, EmbeddedDocument, connect +from mongoengine.connection import get_db +from mongoengine.fields import (BooleanField, GenericReferenceField, + IntField, StringField) + +__all__ = ('InheritanceTest', ) + + +class InheritanceTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_superclasses(self): + """Ensure that the correct list of superclasses is assembled. + """ + class Animal(Document): + meta = {'allow_inheritance': True} + class Fish(Animal): pass + class Guppy(Fish): pass + class Mammal(Animal): pass + class Dog(Mammal): pass + class Human(Mammal): pass + + self.assertEqual(Animal._superclasses, ()) + self.assertEqual(Fish._superclasses, ('Animal',)) + self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) + self.assertEqual(Mammal._superclasses, ('Animal',)) + self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) + self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) + + def test_external_superclasses(self): + """Ensure that the correct list of super classes is assembled when + importing part of the model. + """ + class Animal(Base): pass + class Fish(Animal): pass + class Guppy(Fish): pass + class Mammal(Animal): pass + class Dog(Mammal): pass + class Human(Mammal): pass + + self.assertEqual(Animal._superclasses, ('Base', )) + self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) + self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', + 'Base.Animal.Fish')) + self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) + self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', + 'Base.Animal.Mammal')) + self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', + 'Base.Animal.Mammal')) + + def test_subclasses(self): + """Ensure that the correct list of _subclasses (subclasses) is + assembled. + """ + class Animal(Document): + meta = {'allow_inheritance': True} + class Fish(Animal): pass + class Guppy(Fish): pass + class Mammal(Animal): pass + class Dog(Mammal): pass + class Human(Mammal): pass + + self.assertEqual(Animal._subclasses, ('Animal', + 'Animal.Fish', + 'Animal.Fish.Guppy', + 'Animal.Mammal', + 'Animal.Mammal.Dog', + 'Animal.Mammal.Human')) + self.assertEqual(Fish._subclasses, ('Animal.Fish', + 'Animal.Fish.Guppy',)) + self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) + self.assertEqual(Mammal._subclasses, ('Animal.Mammal', + 'Animal.Mammal.Dog', + 'Animal.Mammal.Human')) + self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) + + def test_external_subclasses(self): + """Ensure that the correct list of _subclasses (subclasses) is + assembled when importing part of the model. + """ + class Animal(Base): pass + class Fish(Animal): pass + class Guppy(Fish): pass + class Mammal(Animal): pass + class Dog(Mammal): pass + class Human(Mammal): pass + + self.assertEqual(Animal._subclasses, ('Base.Animal', + 'Base.Animal.Fish', + 'Base.Animal.Fish.Guppy', + 'Base.Animal.Mammal', + 'Base.Animal.Mammal.Dog', + 'Base.Animal.Mammal.Human')) + self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', + 'Base.Animal.Fish.Guppy',)) + self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) + self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', + 'Base.Animal.Mammal.Dog', + 'Base.Animal.Mammal.Human')) + self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) + + def test_dynamic_declarations(self): + """Test that declaring an extra class updates meta data""" + + class Animal(Document): + meta = {'allow_inheritance': True} + + self.assertEqual(Animal._superclasses, ()) + self.assertEqual(Animal._subclasses, ('Animal',)) + + # Test dynamically adding a class changes the meta data + class Fish(Animal): + pass + + self.assertEqual(Animal._superclasses, ()) + self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) + + self.assertEqual(Fish._superclasses, ('Animal', )) + self.assertEqual(Fish._subclasses, ('Animal.Fish',)) + + # Test dynamically adding an inherited class changes the meta data + class Pike(Fish): + pass + + self.assertEqual(Animal._superclasses, ()) + self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', + 'Animal.Fish.Pike')) + + self.assertEqual(Fish._superclasses, ('Animal', )) + self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) + + self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) + self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) + + def test_inheritance_meta_data(self): + """Ensure that document may inherit fields from a superclass document. + """ + class Person(Document): + name = StringField() + age = IntField() + + meta = {'allow_inheritance': True} + + class Employee(Person): + salary = IntField() + + self.assertEqual(['salary', 'age', 'name', 'id'], + Employee._fields.keys()) + self.assertEqual(Employee._get_collection_name(), + Person._get_collection_name()) + + + def test_polymorphic_queries(self): + """Ensure that the correct subclasses are returned from a query + """ + + class Animal(Document): + meta = {'allow_inheritance': True} + class Fish(Animal): pass + class Mammal(Animal): pass + class Dog(Mammal): pass + class Human(Mammal): pass + + Animal.drop_collection() + + Animal().save() + Fish().save() + Mammal().save() + Dog().save() + Human().save() + + classes = [obj.__class__ for obj in Animal.objects] + self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + + classes = [obj.__class__ for obj in Mammal.objects] + self.assertEqual(classes, [Mammal, Dog, Human]) + + classes = [obj.__class__ for obj in Human.objects] + self.assertEqual(classes, [Human]) + + + def test_allow_inheritance(self): + """Ensure that inheritance may be disabled on simple classes and that + _cls and _subclasses will not be used. + """ + + class Animal(Document): + name = StringField() + meta = {'allow_inheritance': False} + + def create_dog_class(): + class Dog(Animal): + pass + + self.assertRaises(ValueError, create_dog_class) + + # Check that _cls etc aren't present on simple documents + dog = Animal(name='dog') + dog.save() + + collection = self.db[Animal._get_collection_name()] + obj = collection.find_one() + self.assertFalse('_cls' in obj) + + def test_cant_turn_off_inheritance_on_subclass(self): + """Ensure if inheritance is on in a subclass you cant turn it off + """ + + class Animal(Document): + name = StringField() + meta = {'allow_inheritance': True} + + def create_mammal_class(): + class Mammal(Animal): + meta = {'allow_inheritance': False} + self.assertRaises(ValueError, create_mammal_class) + + def test_allow_inheritance_abstract_document(self): + """Ensure that abstract documents can set inheritance rules and that + _cls will not be used. + """ + class FinalDocument(Document): + meta = {'abstract': True, + 'allow_inheritance': False} + + class Animal(FinalDocument): + name = StringField() + + def create_mammal_class(): + class Mammal(Animal): + pass + self.assertRaises(ValueError, create_mammal_class) + + # Check that _cls isn't present in simple documents + doc = Animal(name='dog') + self.assertFalse('_cls' in doc.to_mongo()) + + def test_allow_inheritance_embedded_document(self): + """Ensure embedded documents respect inheritance + """ + + class Comment(EmbeddedDocument): + content = StringField() + meta = {'allow_inheritance': False} + + def create_special_comment(): + class SpecialComment(Comment): + pass + + self.assertRaises(ValueError, create_special_comment) + + doc = Comment(content='test') + self.assertFalse('_cls' in doc.to_mongo()) + + class Comment(EmbeddedDocument): + content = StringField() + meta = {'allow_inheritance': True} + + doc = Comment(content='test') + self.assertTrue('_cls' in doc.to_mongo()) + + def test_document_inheritance(self): + """Ensure mutliple inheritance of abstract documents + """ + class DateCreatedDocument(Document): + meta = { + 'allow_inheritance': True, + 'abstract': True, + } + + class DateUpdatedDocument(Document): + meta = { + 'allow_inheritance': True, + 'abstract': True, + } + + try: + class MyDocument(DateCreatedDocument, DateUpdatedDocument): + pass + except: + self.assertTrue(False, "Couldn't create MyDocument class") + + def test_abstract_documents(self): + """Ensure that a document superclass can be marked as abstract + thereby not using it as the name for the collection.""" + + defaults = {'index_background': True, + 'index_drop_dups': True, + 'index_opts': {'hello': 'world'}, + 'allow_inheritance': True, + 'queryset_class': 'QuerySet', + 'db_alias': 'myDB', + 'shard_key': ('hello', 'world')} + + meta_settings = {'abstract': True} + meta_settings.update(defaults) + + class Animal(Document): + name = StringField() + meta = meta_settings + + class Fish(Animal): pass + class Guppy(Fish): pass + + class Mammal(Animal): + meta = {'abstract': True} + class Human(Mammal): pass + + for k, v in defaults.iteritems(): + for cls in [Animal, Fish, Guppy]: + self.assertEqual(cls._meta[k], v) + + self.assertFalse('collection' in Animal._meta) + self.assertFalse('collection' in Mammal._meta) + + self.assertEqual(Animal._get_collection_name(), None) + self.assertEqual(Mammal._get_collection_name(), None) + + self.assertEqual(Fish._get_collection_name(), 'fish') + self.assertEqual(Guppy._get_collection_name(), 'fish') + self.assertEqual(Human._get_collection_name(), 'human') + + def create_bad_abstract(): + class EvilHuman(Human): + evil = BooleanField(default=True) + meta = {'abstract': True} + self.assertRaises(ValueError, create_bad_abstract) + + def test_inherited_collections(self): + """Ensure that subclassed documents don't override parents' + collections + """ + + class Drink(Document): + name = StringField() + meta = {'allow_inheritance': True} + + class Drinker(Document): + drink = GenericReferenceField() + + try: + warnings.simplefilter("error") + + class AcloholicDrink(Drink): + meta = {'collection': 'booze'} + + except SyntaxWarning: + warnings.simplefilter("ignore") + + class AlcoholicDrink(Drink): + meta = {'collection': 'booze'} + + else: + raise AssertionError("SyntaxWarning should be triggered") + + warnings.resetwarnings() + + Drink.drop_collection() + AlcoholicDrink.drop_collection() + Drinker.drop_collection() + + red_bull = Drink(name='Red Bull') + red_bull.save() + + programmer = Drinker(drink=red_bull) + programmer.save() + + beer = AlcoholicDrink(name='Beer') + beer.save() + real_person = Drinker(drink=beer) + real_person.save() + + self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) + self.assertEqual(Drinker.objects[1].drink.name, beer.name) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_document.py b/tests/document/instance.py similarity index 50% rename from tests/test_document.py rename to tests/document/instance.py index a09aaec..95f37d9 100644 --- a/tests/test_document.py +++ b/tests/document/instance.py @@ -15,14 +15,17 @@ from datetime import datetime from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest from mongoengine import * -from mongoengine.base import NotRegistered, InvalidDocumentError -from mongoengine.queryset import InvalidQueryError +from mongoengine.errors import (NotRegistered, InvalidDocumentError, + InvalidQueryError) +from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db, get_connection TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') +__all__ = ("InstanceTest",) -class DocumentTest(unittest.TestCase): + +class InstanceTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') @@ -32,59 +35,57 @@ class DocumentTest(unittest.TestCase): name = StringField() age = IntField() - meta = {'allow_inheritance': True} + non_field = True + + meta = {"allow_inheritance": True} self.Person = Person def tearDown(self): - self.Person.drop_collection() + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) - def test_drop_collection(self): - """Ensure that the collection may be dropped from the database. + def test_capped_collection(self): + """Ensure that capped collections work properly. """ - self.Person(name='Test').save() + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 10, + 'max_size': 90000, + } - collection = self.Person._get_collection_name() - self.assertTrue(collection in self.db.collection_names()) + Log.drop_collection() - self.Person.drop_collection() - self.assertFalse(collection in self.db.collection_names()) + # Ensure that the collection handles up to its maximum + for _ in range(10): + Log().save() - def test_queryset_resurrects_dropped_collection(self): + self.assertEqual(len(Log.objects), 10) - self.Person.objects().item_frequencies('name') - self.Person.drop_collection() + # Check that extra documents don't increase the size + Log().save() + self.assertEqual(len(Log.objects), 10) - self.assertEqual({}, self.Person.objects().item_frequencies('name')) + options = Log.objects._collection.options() + self.assertEqual(options['capped'], True) + self.assertEqual(options['max'], 10) + self.assertEqual(options['size'], 90000) - class Actor(self.Person): - pass + # Check that the document cannot be redefined with different options + def recreate_log_document(): + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 11, + } + # Create the collection by accessing Document.objects + Log.objects + self.assertRaises(InvalidCollectionError, recreate_log_document) - # Ensure works correctly with inhertited classes - Actor.objects().item_frequencies('name') - self.Person.drop_collection() - self.assertEqual({}, Actor.objects().item_frequencies('name')) - - def test_definition(self): - """Ensure that document may be defined using fields. - """ - name_field = StringField() - age_field = IntField() - - class Person(Document): - name = name_field - age = age_field - non_field = True - - self.assertEqual(Person._fields['name'], name_field) - self.assertEqual(Person._fields['age'], age_field) - self.assertFalse('non_field' in Person._fields) - self.assertTrue('id' in Person._fields) - # Test iteration over fields - fields = list(Person()) - self.assertTrue('name' in fields and 'age' in fields) - # Ensure Document isn't treated like an actual document - self.assertFalse(hasattr(Document, '_fields')) + Log.drop_collection() def test_repr(self): """Ensure that unicode representation works @@ -95,146 +96,22 @@ class DocumentTest(unittest.TestCase): def __unicode__(self): return self.title - Article.drop_collection() + doc = Article(title=u'привет мир') - Article(title=u'привет мир').save() + self.assertEqual('', repr(doc)) - self.assertEqual('', repr(Article.objects.first())) - self.assertEqual('[]', repr(Article.objects.all())) + def test_queryset_resurrects_dropped_collection(self): + self.Person.drop_collection() - def test_collection_naming(self): - """Ensure that a collection with a specified name may be used. - """ + self.assertEqual([], list(self.Person.objects())) - class DefaultNamingTest(Document): - pass - self.assertEqual('default_naming_test', DefaultNamingTest._get_collection_name()) - - class CustomNamingTest(Document): - meta = {'collection': 'pimp_my_collection'} - - self.assertEqual('pimp_my_collection', CustomNamingTest._get_collection_name()) - - class DynamicNamingTest(Document): - meta = {'collection': lambda c: "DYNAMO"} - self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) - - # Use Abstract class to handle backwards compatibility - class BaseDocument(Document): - meta = { - 'abstract': True, - 'collection': lambda c: c.__name__.lower() - } - - class OldNamingConvention(BaseDocument): - pass - self.assertEqual('oldnamingconvention', OldNamingConvention._get_collection_name()) - - class InheritedAbstractNamingTest(BaseDocument): - meta = {'collection': 'wibble'} - self.assertEqual('wibble', InheritedAbstractNamingTest._get_collection_name()) - - - # Mixin tests - class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } - - class OldMixinNamingConvention(Document, BaseMixin): - pass - self.assertEqual('oldmixinnamingconvention', OldMixinNamingConvention._get_collection_name()) - - class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } - - class BaseDocument(Document, BaseMixin): - meta = {'allow_inheritance': True} - - class MyDocument(BaseDocument): + class Actor(self.Person): pass - self.assertEqual('basedocument', MyDocument._get_collection_name()) - - def test_get_superclasses(self): - """Ensure that the correct list of superclasses is assembled. - """ - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Mammal(Animal): pass - class Human(Mammal): pass - class Dog(Mammal): pass - - mammal_superclasses = {'Animal': Animal} - self.assertEqual(Mammal._superclasses, mammal_superclasses) - - dog_superclasses = { - 'Animal': Animal, - 'Animal.Mammal': Mammal, - } - self.assertEqual(Dog._superclasses, dog_superclasses) - - def test_external_superclasses(self): - """Ensure that the correct list of sub and super classes is assembled. - when importing part of the model - """ - class Animal(Base): pass - class Fish(Animal): pass - class Mammal(Animal): pass - class Human(Mammal): pass - class Dog(Mammal): pass - - mammal_superclasses = {'Base': Base, 'Base.Animal': Animal} - self.assertEqual(Mammal._superclasses, mammal_superclasses) - - dog_superclasses = { - 'Base': Base, - 'Base.Animal': Animal, - 'Base.Animal.Mammal': Mammal, - } - self.assertEqual(Dog._superclasses, dog_superclasses) - - Base.drop_collection() - - h = Human() - h.save() - - self.assertEqual(Human.objects.count(), 1) - self.assertEqual(Mammal.objects.count(), 1) - self.assertEqual(Animal.objects.count(), 1) - self.assertEqual(Base.objects.count(), 1) - Base.drop_collection() - - def test_polymorphic_queries(self): - """Ensure that the correct subclasses are returned from a query""" - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Mammal(Animal): pass - class Human(Mammal): pass - class Dog(Mammal): pass - - Animal.drop_collection() - - Animal().save() - Fish().save() - Mammal().save() - Human().save() - Dog().save() - - classes = [obj.__class__ for obj in Animal.objects] - self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) - - classes = [obj.__class__ for obj in Mammal.objects] - self.assertEqual(classes, [Mammal, Human, Dog]) - - classes = [obj.__class__ for obj in Human.objects] - self.assertEqual(classes, [Human]) - - Animal.drop_collection() + # Ensure works correctly with inhertited classes + Actor.objects() + self.Person.drop_collection() + self.assertEqual([], list(Actor.objects())) def test_polymorphic_references(self): """Ensure that the correct subclasses are returned from a query when @@ -244,8 +121,8 @@ class DocumentTest(unittest.TestCase): meta = {'allow_inheritance': True} class Fish(Animal): pass class Mammal(Animal): pass - class Human(Mammal): pass class Dog(Mammal): pass + class Human(Mammal): pass class Zoo(Document): animals = ListField(ReferenceField(Animal)) @@ -256,8 +133,8 @@ class DocumentTest(unittest.TestCase): Animal().save() Fish().save() Mammal().save() - Human().save() Dog().save() + Human().save() # Save a reference to each animal zoo = Zoo(animals=Animal.objects) @@ -265,7 +142,7 @@ class DocumentTest(unittest.TestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) + self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) Zoo.drop_collection() @@ -278,7 +155,7 @@ class DocumentTest(unittest.TestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) + self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) Zoo.drop_collection() Animal.drop_collection() @@ -308,466 +185,8 @@ class DocumentTest(unittest.TestCase): self.assertEqual(list_stats, CompareStats.objects.first().stats) - def test_inheritance(self): - """Ensure that document may inherit fields from a superclass document. - """ - class Employee(self.Person): - salary = IntField() - self.assertTrue('name' in Employee._fields) - self.assertTrue('salary' in Employee._fields) - self.assertEqual(Employee._get_collection_name(), - self.Person._get_collection_name()) - # Ensure that MRO error is not raised - class A(Document): - meta = {'allow_inheritance': True} - class B(A): pass - class C(B): pass - - def test_allow_inheritance(self): - """Ensure that inheritance may be disabled on simple classes and that - _cls and _types will not be used. - """ - - class Animal(Document): - name = StringField() - meta = {'allow_inheritance': False} - - Animal.drop_collection() - def create_dog_class(): - class Dog(Animal): - pass - self.assertRaises(ValueError, create_dog_class) - - # Check that _cls etc aren't present on simple documents - dog = Animal(name='dog') - dog.save() - collection = self.db[Animal._get_collection_name()] - obj = collection.find_one() - self.assertFalse('_cls' in obj) - self.assertFalse('_types' in obj) - - Animal.drop_collection() - - def create_employee_class(): - class Employee(self.Person): - meta = {'allow_inheritance': False} - self.assertRaises(ValueError, create_employee_class) - - def test_allow_inheritance_abstract_document(self): - """Ensure that abstract documents can set inheritance rules and that - _cls and _types will not be used. - """ - class FinalDocument(Document): - meta = {'abstract': True, - 'allow_inheritance': False} - - class Animal(FinalDocument): - name = StringField() - - Animal.drop_collection() - def create_dog_class(): - class Dog(Animal): - pass - self.assertRaises(ValueError, create_dog_class) - - # Check that _cls etc aren't present on simple documents - dog = Animal(name='dog') - dog.save() - collection = self.db[Animal._get_collection_name()] - obj = collection.find_one() - self.assertFalse('_cls' in obj) - self.assertFalse('_types' in obj) - - Animal.drop_collection() - - def test_allow_inheritance_embedded_document(self): - - # Test the same for embedded documents - class Comment(EmbeddedDocument): - content = StringField() - meta = {'allow_inheritance': False} - - def create_special_comment(): - class SpecialComment(Comment): - pass - - self.assertRaises(ValueError, create_special_comment) - - comment = Comment(content='test') - self.assertFalse('_cls' in comment.to_mongo()) - self.assertFalse('_types' in comment.to_mongo()) - - class Comment(EmbeddedDocument): - content = StringField() - meta = {'allow_inheritance': True} - - comment = Comment(content='test') - self.assertTrue('_cls' in comment.to_mongo()) - self.assertTrue('_types' in comment.to_mongo()) - - def test_document_inheritance(self): - """Ensure mutliple inheritance of abstract docs works - """ - class DateCreatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } - - class DateUpdatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } - - try: - class MyDocument(DateCreatedDocument, DateUpdatedDocument): - pass - except: - self.assertTrue(False, "Couldn't create MyDocument class") - - def test_how_to_turn_off_inheritance(self): - """Demonstrates migrating from allow_inheritance = True to False. - """ - class Animal(Document): - name = StringField() - meta = { - 'indexes': ['name'] - } - - self.assertEqual(Animal._meta['index_specs'], - [{'fields': [('_types', 1), ('name', 1)]}]) - - Animal.drop_collection() - - dog = Animal(name='dog') - dog.save() - - collection = self.db[Animal._get_collection_name()] - obj = collection.find_one() - self.assertTrue('_cls' in obj) - self.assertTrue('_types' in obj) - - info = collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) - - # Turn off inheritance - class Animal(Document): - name = StringField() - meta = { - 'allow_inheritance': False, - 'indexes': ['name'] - } - - self.assertEqual(Animal._meta['index_specs'], - [{'fields': [('name', 1)]}]) - collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) - - # Confirm extra data is removed - obj = collection.find_one() - self.assertFalse('_cls' in obj) - self.assertFalse('_types' in obj) - - info = collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) - - info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() if '_types' in dict(value['key'])] - for index in indexes_to_drop: - collection.drop_index(index) - - info = collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)]], info) - - # Recreate indexes - dog = Animal.objects.first() - dog.save() - info = collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'name', 1),]], info) - - Animal.drop_collection() - - def test_abstract_documents(self): - """Ensure that a document superclass can be marked as abstract - thereby not using it as the name for the collection.""" - - defaults = {'index_background': True, - 'index_drop_dups': True, - 'index_opts': {'hello': 'world'}, - 'allow_inheritance': True, - 'queryset_class': 'QuerySet', - 'db_alias': 'myDB', - 'shard_key': ('hello', 'world')} - - meta_settings = {'abstract': True} - meta_settings.update(defaults) - - class Animal(Document): - name = StringField() - meta = meta_settings - - class Fish(Animal): pass - class Guppy(Fish): pass - - class Mammal(Animal): - meta = {'abstract': True} - class Human(Mammal): pass - - for k, v in defaults.iteritems(): - for cls in [Animal, Fish, Guppy]: - self.assertEqual(cls._meta[k], v) - - self.assertFalse('collection' in Animal._meta) - self.assertFalse('collection' in Mammal._meta) - - self.assertEqual(Animal._get_collection_name(), None) - self.assertEqual(Mammal._get_collection_name(), None) - - self.assertEqual(Fish._get_collection_name(), 'fish') - self.assertEqual(Guppy._get_collection_name(), 'fish') - self.assertEqual(Human._get_collection_name(), 'human') - - def create_bad_abstract(): - class EvilHuman(Human): - evil = BooleanField(default=True) - meta = {'abstract': True} - self.assertRaises(ValueError, create_bad_abstract) - - def test_collection_name(self): - """Ensure that a collection with a specified name may be used. - """ - collection = 'personCollTest' - if collection in self.db.collection_names(): - self.db.drop_collection(collection) - - class Person(Document): - name = StringField() - meta = {'collection': collection} - - user = Person(name="Test User") - user.save() - self.assertTrue(collection in self.db.collection_names()) - - user_obj = self.db[collection].find_one() - self.assertEqual(user_obj['name'], "Test User") - - user_obj = Person.objects[0] - self.assertEqual(user_obj.name, "Test User") - - Person.drop_collection() - self.assertFalse(collection in self.db.collection_names()) - - def test_collection_name_and_primary(self): - """Ensure that a collection with a specified name may be used. - """ - - class Person(Document): - name = StringField(primary_key=True) - meta = {'collection': 'app'} - - user = Person(name="Test User") - user.save() - - user_obj = Person.objects[0] - self.assertEqual(user_obj.name, "Test User") - - Person.drop_collection() - - def test_inherited_collections(self): - """Ensure that subclassed documents don't override parents' collections. - """ - - class Drink(Document): - name = StringField() - meta = {'allow_inheritance': True} - - class Drinker(Document): - drink = GenericReferenceField() - - try: - warnings.simplefilter("error") - - class AcloholicDrink(Drink): - meta = {'collection': 'booze'} - - except SyntaxWarning, w: - warnings.simplefilter("ignore") - - class AlcoholicDrink(Drink): - meta = {'collection': 'booze'} - - else: - raise AssertionError("SyntaxWarning should be triggered") - - warnings.resetwarnings() - - Drink.drop_collection() - AlcoholicDrink.drop_collection() - Drinker.drop_collection() - - red_bull = Drink(name='Red Bull') - red_bull.save() - - programmer = Drinker(drink=red_bull) - programmer.save() - - beer = AlcoholicDrink(name='Beer') - beer.save() - real_person = Drinker(drink=beer) - real_person.save() - - self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) - self.assertEqual(Drinker.objects[1].drink.name, beer.name) - - def test_capped_collection(self): - """Ensure that capped collections work properly. - """ - class Log(Document): - date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - 'max_size': 90000, - } - - Log.drop_collection() - - # Ensure that the collection handles up to its maximum - for i in range(10): - Log().save() - - self.assertEqual(len(Log.objects), 10) - - # Check that extra documents don't increase the size - Log().save() - self.assertEqual(len(Log.objects), 10) - - options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 90000) - - # Check that the document cannot be redefined with different options - def recreate_log_document(): - class Log(Document): - date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 11, - } - # Create the collection by accessing Document.objects - Log.objects - self.assertRaises(InvalidCollectionError, recreate_log_document) - - Log.drop_collection() - - def test_indexes(self): - """Ensure that indexes are used when meta[indexes] is specified. - """ - class BlogPost(Document): - date = DateTimeField(db_field='addDate', default=datetime.now) - category = StringField() - tags = ListField(StringField()) - meta = { - 'indexes': [ - '-date', - 'tags', - ('category', '-date') - ], - 'allow_inheritance': True - } - - self.assertEqual(BlogPost._meta['index_specs'], - [{'fields': [('_types', 1), ('addDate', -1)]}, - {'fields': [('tags', 1)]}, - {'fields': [('_types', 1), ('category', 1), - ('addDate', -1)]}]) - - BlogPost.drop_collection() - - info = BlogPost.objects._collection.index_information() - # _id, '-date', 'tags', ('cat', 'date') - # NB: there is no index on _types by itself, since - # the indices on -date and tags will both contain - # _types as first element in the key - self.assertEqual(len(info), 4) - - # Indexes are lazy so use list() to perform query - list(BlogPost.objects) - info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] - in info) - self.assertTrue([('_types', 1), ('addDate', -1)] in info) - # tags is a list field so it shouldn't have _types in the index - self.assertTrue([('tags', 1)] in info) - - class ExtendedBlogPost(BlogPost): - title = StringField() - meta = {'indexes': ['title']} - - self.assertEqual(ExtendedBlogPost._meta['index_specs'], - [{'fields': [('_types', 1), ('addDate', -1)]}, - {'fields': [('tags', 1)]}, - {'fields': [('_types', 1), ('category', 1), - ('addDate', -1)]}, - {'fields': [('_types', 1), ('title', 1)]}]) - - BlogPost.drop_collection() - - list(ExtendedBlogPost.objects) - info = ExtendedBlogPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] - in info) - self.assertTrue([('_types', 1), ('addDate', -1)] in info) - self.assertTrue([('_types', 1), ('title', 1)] in info) - - BlogPost.drop_collection() - - def test_inherited_index(self): - """Ensure index specs are inhertited correctly""" - - class A(Document): - title = StringField() - meta = { - 'indexes': [ - { - 'fields': ('title',), - }, - ], - 'allow_inheritance': True, - } - - class B(A): - description = StringField() - - self.assertEqual(A._meta['index_specs'], B._meta['index_specs']) - self.assertEqual([{'fields': [('_types', 1), ('title', 1)]}], - A._meta['index_specs']) - - def test_build_index_spec_is_not_destructive(self): - - class MyDoc(Document): - keywords = StringField() - - meta = { - 'indexes': ['keywords'], - 'allow_inheritance': False - } - - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) - - # Force index creation - MyDoc.objects._ensure_indexes() - - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) def test_db_field_load(self): """Ensure we load data correctly @@ -812,477 +231,8 @@ class DocumentTest(unittest.TestCase): self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") self.assertEqual(Person.objects.get(name="Fred").rank, "Private") - def test_embedded_document_index_meta(self): - """Ensure that embedded document indexes are created explicitly - """ - class Rank(EmbeddedDocument): - title = StringField(required=True) - class Person(Document): - name = StringField(required=True) - rank = EmbeddedDocumentField(Rank, required=False) - meta = { - 'indexes': [ - 'rank.title', - ], - 'allow_inheritance': False - } - - self.assertEqual([{'fields': [('rank.title', 1)]}], - Person._meta['index_specs']) - - Person.drop_collection() - - # Indexes are lazy so use list() to perform query - list(Person.objects) - info = Person.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('rank.title', 1)] in info) - - def test_explicit_geo2d_index(self): - """Ensure that geo2d indexes work when created via meta[indexes] - """ - class Place(Document): - location = DictField() - meta = { - 'indexes': [ - '*location.point', - ], - } - - self.assertEqual([{'fields': [('location.point', '2d')]}], - Place._meta['index_specs']) - - Place.drop_collection() - - info = Place.objects._collection.index_information() - # Indexes are lazy so use list() to perform query - list(Place.objects) - info = Place.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - - self.assertTrue([('location.point', '2d')] in info) - - def test_dictionary_indexes(self): - """Ensure that indexes are used when meta[indexes] contains dictionaries - instead of lists. - """ - class BlogPost(Document): - date = DateTimeField(db_field='addDate', default=datetime.now) - category = StringField() - tags = ListField(StringField()) - meta = { - 'indexes': [ - {'fields': ['-date'], 'unique': True, - 'sparse': True, 'types': False }, - ], - } - - self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, - 'sparse': True, 'types': False}], - BlogPost._meta['index_specs']) - - BlogPost.drop_collection() - - info = BlogPost.objects._collection.index_information() - # _id, '-date' - self.assertEqual(len(info), 3) - - # Indexes are lazy so use list() to perform query - list(BlogPost.objects) - info = BlogPost.objects._collection.index_information() - info = [(value['key'], - value.get('unique', False), - value.get('sparse', False)) - for key, value in info.iteritems()] - self.assertTrue(([('addDate', -1)], True, True) in info) - - BlogPost.drop_collection() - - def test_abstract_index_inheritance(self): - - class UserBase(Document): - meta = { - 'abstract': True, - 'indexes': ['user_guid'] - } - - user_guid = StringField(required=True) - - class Person(UserBase): - meta = { - 'indexes': ['name'], - } - - name = StringField() - - Person.drop_collection() - - p = Person(name="test", user_guid='123') - p.save() - - self.assertEqual(1, Person.objects.count()) - info = Person.objects._collection.index_information() - self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) - Person.drop_collection() - - def test_disable_index_creation(self): - """Tests setting auto_create_index to False on the connection will - disable any index generation. - """ - class User(Document): - meta = { - 'indexes': ['user_guid'], - 'auto_create_index': False - } - user_guid = StringField(required=True) - - - User.drop_collection() - - u = User(user_guid='123') - u.save() - - self.assertEqual(1, User.objects.count()) - info = User.objects._collection.index_information() - self.assertEqual(info.keys(), ['_id_']) - User.drop_collection() - - def test_embedded_document_index(self): - """Tests settings an index on an embedded document - """ - class Date(EmbeddedDocument): - year = IntField(db_field='yr') - - class BlogPost(Document): - title = StringField() - date = EmbeddedDocumentField(Date) - - meta = { - 'indexes': [ - '-date.year' - ], - } - - BlogPost.drop_collection() - - info = BlogPost.objects._collection.index_information() - self.assertEqual(info.keys(), ['_types_1_date.yr_-1', '_id_']) - BlogPost.drop_collection() - - def test_list_embedded_document_index(self): - """Ensure list embedded documents can be indexed - """ - class Tag(EmbeddedDocument): - name = StringField(db_field='tag') - - class BlogPost(Document): - title = StringField() - tags = ListField(EmbeddedDocumentField(Tag)) - - meta = { - 'indexes': [ - 'tags.name' - ], - } - - BlogPost.drop_collection() - - info = BlogPost.objects._collection.index_information() - # we don't use _types in with list fields by default - self.assertEqual(info.keys(), ['_id_', '_types_1', 'tags.tag_1']) - - post1 = BlogPost(title="Embedded Indexes tests in place", - tags=[Tag(name="about"), Tag(name="time")] - ) - post1.save() - BlogPost.drop_collection() - - def test_recursive_embedded_objects_dont_break_indexes(self): - - class RecursiveObject(EmbeddedDocument): - obj = EmbeddedDocumentField('self') - - class RecursiveDocument(Document): - recursive_obj = EmbeddedDocumentField(RecursiveObject) - - info = RecursiveDocument.objects._collection.index_information() - self.assertEqual(info.keys(), ['_id_', '_types_1']) - - def test_geo_indexes_recursion(self): - - class Location(Document): - name = StringField() - location = GeoPointField() - - class Parent(Document): - name = StringField() - location = ReferenceField(Location) - - Location.drop_collection() - Parent.drop_collection() - - list(Parent.objects) - - collection = Parent._get_collection() - info = collection.index_information() - - self.assertFalse('location_2d' in info) - - self.assertEqual(len(Parent._geo_indices()), 0) - self.assertEqual(len(Location._geo_indices()), 1) - - def test_covered_index(self): - """Ensure that covered indexes can be used - """ - - class Test(Document): - a = IntField() - - meta = { - 'indexes': ['a'], - 'allow_inheritance': False - } - - Test.drop_collection() - - obj = Test(a=1) - obj.save() - - # Need to be explicit about covered indexes as mongoDB doesn't know if - # the documents returned might have more keys in that here. - query_plan = Test.objects(id=obj.id).exclude('a').explain() - self.assertFalse(query_plan['indexOnly']) - - query_plan = Test.objects(id=obj.id).only('id').explain() - self.assertTrue(query_plan['indexOnly']) - - query_plan = Test.objects(a=1).only('a').exclude('id').explain() - self.assertTrue(query_plan['indexOnly']) - - def test_index_on_id(self): - - class BlogPost(Document): - meta = { - 'indexes': [ - ['categories', 'id'] - ], - 'allow_inheritance': False - } - - title = StringField(required=True) - description = StringField(required=True) - categories = ListField() - - BlogPost.drop_collection() - - indexes = BlogPost.objects._collection.index_information() - self.assertEqual(indexes['categories_1__id_1']['key'], - [('categories', 1), ('_id', 1)]) - - def test_hint(self): - - class BlogPost(Document): - tags = ListField(StringField()) - meta = { - 'indexes': [ - 'tags', - ], - } - - BlogPost.drop_collection() - - for i in xrange(0, 10): - tags = [("tag %i" % n) for n in xrange(0, i % 2)] - BlogPost(tags=tags).save() - - self.assertEqual(BlogPost.objects.count(), 10) - self.assertEqual(BlogPost.objects.hint().count(), 10) - self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) - - self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) - - def invalid_index(): - BlogPost.objects.hint('tags') - self.assertRaises(TypeError, invalid_index) - - def invalid_index_2(): - return BlogPost.objects.hint(('tags', 1)) - self.assertRaises(TypeError, invalid_index_2) - - def test_unique(self): - """Ensure that uniqueness constraints are applied to fields. - """ - class BlogPost(Document): - title = StringField() - slug = StringField(unique=True) - - BlogPost.drop_collection() - - post1 = BlogPost(title='test1', slug='test') - post1.save() - - # Two posts with the same slug is not allowed - post2 = BlogPost(title='test2', slug='test') - self.assertRaises(NotUniqueError, post2.save) - - # Ensure backwards compatibilty for errors - self.assertRaises(OperationError, post2.save) - - def test_unique_with(self): - """Ensure that unique_with constraints are applied to fields. - """ - class Date(EmbeddedDocument): - year = IntField(db_field='yr') - - class BlogPost(Document): - title = StringField() - date = EmbeddedDocumentField(Date) - slug = StringField(unique_with='date.year') - - BlogPost.drop_collection() - - post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') - post1.save() - - # day is different so won't raise exception - post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') - post2.save() - - # Now there will be two docs with the same slug and the same day: fail - post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') - self.assertRaises(OperationError, post3.save) - - BlogPost.drop_collection() - - def test_unique_embedded_document(self): - """Ensure that uniqueness constraints are applied to fields on embedded documents. - """ - class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') - slug = StringField(unique=True) - - class BlogPost(Document): - title = StringField() - sub = EmbeddedDocumentField(SubDocument) - - BlogPost.drop_collection() - - post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) - post1.save() - - # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) - post2.save() - - # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) - self.assertRaises(NotUniqueError, post3.save) - - BlogPost.drop_collection() - - def test_unique_with_embedded_document_and_embedded_unique(self): - """Ensure that uniqueness constraints are applied to fields on - embedded documents. And work with unique_with as well. - """ - class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') - slug = StringField(unique=True) - - class BlogPost(Document): - title = StringField(unique_with='sub.year') - sub = EmbeddedDocumentField(SubDocument) - - BlogPost.drop_collection() - - post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) - post1.save() - - # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) - post2.save() - - # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) - self.assertRaises(NotUniqueError, post3.save) - - # Now there will be two docs with the same title and year - post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) - self.assertRaises(NotUniqueError, post3.save) - - BlogPost.drop_collection() - - def test_ttl_indexes(self): - - class Log(Document): - created = DateTimeField(default=datetime.now) - meta = { - 'indexes': [ - {'fields': ['created'], 'expireAfterSeconds': 3600} - ] - } - - Log.drop_collection() - - if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3: - raise SkipTest('pymongo needs to be 2.3 or higher for this test') - - connection = get_connection() - version_array = connection.server_info()['versionArray'] - if version_array[0] < 2 and version_array[1] < 2: - raise SkipTest('MongoDB needs to be 2.2 or higher for this test') - - # Indexes are lazy so use list() to perform query - list(Log.objects) - info = Log.objects._collection.index_information() - self.assertEqual(3600, - info['_types_1_created_1']['expireAfterSeconds']) - - def test_unique_and_indexes(self): - """Ensure that 'unique' constraints aren't overridden by - meta.indexes. - """ - class Customer(Document): - cust_id = IntField(unique=True, required=True) - meta = { - 'indexes': ['cust_id'], - 'allow_inheritance': False, - } - - Customer.drop_collection() - cust = Customer(cust_id=1) - cust.save() - - cust_dupe = Customer(cust_id=1) - try: - cust_dupe.save() - raise AssertionError, "We saved a dupe!" - except NotUniqueError: - pass - Customer.drop_collection() - - def test_unique_and_primary(self): - """If you set a field as primary, then unexpected behaviour can occur. - You won't create a duplicate but you will update an existing document. - """ - - class User(Document): - name = StringField(primary_key=True, unique=True) - password = StringField() - - User.drop_collection() - - user = User(name='huangz', password='secret') - user.save() - - user = User(name='huangz', password='secret2') - user.save() - - self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, 'secret2') - - User.drop_collection() def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys. @@ -1876,7 +826,6 @@ class DocumentTest(unittest.TestCase): class Site(Document): page = EmbeddedDocumentField(Page) - Site.drop_collection() site = Site(page=Page(log_message="Warning: Dummy message")) site.save() @@ -1903,7 +852,6 @@ class DocumentTest(unittest.TestCase): class Site(Document): page = EmbeddedDocumentField(Page) - Site.drop_collection() site = Site(page=Page(log_message="Warning: Dummy message")) @@ -1917,519 +865,6 @@ class DocumentTest(unittest.TestCase): site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") - def test_circular_reference_deltas(self): - - class Person(Document): - name = StringField() - owns = ListField(ReferenceField('Organization')) - - class Organization(Document): - name = StringField() - owner = ReferenceField('Person') - - Person.drop_collection() - Organization.drop_collection() - - person = Person(name="owner") - person.save() - organization = Organization(name="company") - organization.save() - - person.owns.append(organization) - organization.owner = person - - person.save() - organization.save() - - p = Person.objects[0].select_related() - o = Organization.objects.first() - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) - - def test_circular_reference_deltas_2(self): - - class Person(Document): - name = StringField() - owns = ListField( ReferenceField( 'Organization' ) ) - employer = ReferenceField( 'Organization' ) - - class Organization( Document ): - name = StringField() - owner = ReferenceField( 'Person' ) - employees = ListField( ReferenceField( 'Person' ) ) - - Person.drop_collection() - Organization.drop_collection() - - person = Person( name="owner" ) - person.save() - - employee = Person( name="employee" ) - employee.save() - - organization = Organization( name="company" ) - organization.save() - - person.owns.append( organization ) - organization.owner = person - - organization.employees.append( employee ) - employee.employer = organization - - person.save() - organization.save() - employee.save() - - p = Person.objects.get(name="owner") - e = Person.objects.get(name="employee") - o = Organization.objects.first() - - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) - self.assertEqual(e.employer, o) - - def test_delta(self): - - class Doc(Document): - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) - - def test_delta_recursive(self): - - class Embedded(EmbeddedDocument): - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - - class Doc(Document): - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - embedded_field = EmbeddedDocumentField(Embedded) - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc._get_changed_fields(), ['embedded_field']) - - embedded_delta = { - 'string_field': 'hello', - 'int_field': 1, - 'dict_field': {'hello': 'world'}, - 'list_field': ['1', 2, {'hello': 'world'}] - } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - embedded_delta.update({ - '_types': ['Embedded'], - '_cls': 'Embedded', - }) - self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) - - doc.save() - doc = doc.reload(10) - - doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) - - doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) - - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) - self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') - - # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' - doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}}]}, {})) - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - ]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') - - # Test list native methods - doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) - doc.save() - doc = doc.reload(10) - - doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) - - doc.embedded_field.list_field[2].list_field.sort(key=str) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) - - del(doc.embedded_field.list_field[2].list_field[2]['hello']) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) - doc.save() - doc = doc.reload(10) - - del(doc.embedded_field.list_field[2].list_field) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) - - doc.save() - doc = doc.reload(10) - - doc.dict_field['Embedded'] = embedded_1 - doc.save() - doc = doc.reload(10) - - doc.dict_field['Embedded'].string_field = 'Hello World' - self.assertEqual(doc._get_changed_fields(), ['dict_field.Embedded.string_field']) - self.assertEqual(doc._delta(), ({'dict_field.Embedded.string_field': 'Hello World'}, {})) - - - def test_delta_db_field(self): - - class Doc(Document): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['db_string_field']) - self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['db_int_field']) - self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) - - # Test it saves that data - doc = Doc() - doc.save() - - doc.string_field = 'hello' - doc.int_field = 1 - doc.dict_field = {'hello': 'world'} - doc.list_field = ['1', 2, {'hello': 'world'}] - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.string_field, 'hello') - self.assertEqual(doc.int_field, 1) - self.assertEqual(doc.dict_field, {'hello': 'world'}) - self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) - - def test_delta_recursive_db_field(self): - - class Embedded(EmbeddedDocument): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - - class Doc(Document): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - embedded_field = EmbeddedDocumentField(Embedded, db_field='db_embedded_field') - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) - - embedded_delta = { - 'db_string_field': 'hello', - 'db_int_field': 1, - 'db_dict_field': {'hello': 'world'}, - 'db_list_field': ['1', 2, {'hello': 'world'}] - } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - embedded_delta.update({ - '_types': ['Embedded'], - '_cls': 'Embedded', - }) - self.assertEqual(doc._delta(), ({'db_embedded_field': embedded_delta}, {})) - - doc.save() - doc = doc.reload(10) - - doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field.db_dict_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'db_dict_field': 1})) - self.assertEqual(doc._delta(), ({}, {'db_embedded_field.db_dict_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) - - doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'db_list_field': 1})) - self.assertEqual(doc._delta(), ({}, {'db_embedded_field.db_list_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'db_list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) - - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field.db_list_field.2.db_string_field']) - self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2.db_string_field': 'world'}, {})) - self.assertEqual(doc._delta(), ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') - - # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' - doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'db_list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}}]}, {})) - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}} - ]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') - - # Test list native methods - doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}]}, {})) - doc.save() - doc = doc.reload(10) - - doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}, 1]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) - - doc.embedded_field.list_field[2].list_field.sort(key=str) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) - - del(doc.embedded_field.list_field[2].list_field[2]['hello']) - self.assertEqual(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [1, 2, {}]}, {})) - doc.save() - doc = doc.reload(10) - - del(doc.embedded_field.list_field[2].list_field) - self.assertEqual(doc._delta(), ({}, {'db_embedded_field.db_list_field.2.db_list_field': 1})) - def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields """ @@ -2437,7 +872,6 @@ class DocumentTest(unittest.TestCase): class User(self.Person): active = BooleanField(default=True) - User.drop_collection() # Create person object and save it to the database @@ -2697,29 +1131,6 @@ class DocumentTest(unittest.TestCase): promoted_employee.reload() self.assertEqual(promoted_employee.details, None) - def test_mixins_dont_add_to_types(self): - - class Mixin(object): - name = StringField() - - class Person(Document, Mixin): - pass - - Person.drop_collection() - - self.assertEqual(Person._fields.keys(), ['name', 'id']) - - Person(name="Rozza").save() - - collection = self.db[Person._get_collection_name()] - obj = collection.find_one() - self.assertEqual(obj['_cls'], 'Person') - self.assertEqual(obj['_types'], ['Person']) - - self.assertEqual(Person.objects.count(), 1) - - Person.drop_collection() - def test_object_mixins(self): class NameMixin(object): @@ -2795,22 +1206,6 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() - def test_cannot_perform_joins_references(self): - - class BlogPost(Document): - author = ReferenceField(self.Person) - author2 = GenericReferenceField() - - def test_reference(): - list(BlogPost.objects(author__name="test")) - - self.assertRaises(InvalidQueryError, test_reference) - - def test_generic_reference(): - list(BlogPost.objects(author2__name="test")) - - self.assertRaises(InvalidQueryError, test_generic_reference) - def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields declare the same db_field""" @@ -3082,17 +1477,17 @@ class DocumentTest(unittest.TestCase): for u in User.objects.all(): all_user_dic[u] = "OK" - self.assertEqual(all_user_dic.get(u1, False), "OK" ) - self.assertEqual(all_user_dic.get(u2, False), "OK" ) - self.assertEqual(all_user_dic.get(u3, False), "OK" ) - self.assertEqual(all_user_dic.get(u4, False), False ) # New object - self.assertEqual(all_user_dic.get(b1, False), False ) # Other object - self.assertEqual(all_user_dic.get(b2, False), False ) # Other object + self.assertEqual(all_user_dic.get(u1, False), "OK") + self.assertEqual(all_user_dic.get(u2, False), "OK") + self.assertEqual(all_user_dic.get(u3, False), "OK") + self.assertEqual(all_user_dic.get(u4, False), False) # New object + self.assertEqual(all_user_dic.get(b1, False), False) # Other object + self.assertEqual(all_user_dic.get(b2, False), False) # Other object # in Set all_user_set = set(User.objects.all()) - self.assertTrue(u1 in all_user_set ) + self.assertTrue(u1 in all_user_set) def test_picklable(self): @@ -3313,7 +1708,7 @@ class DocumentTest(unittest.TestCase): # Bob Book.objects.create(name="1", author=bob, extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) - Book.objects.create(name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()} ) + Book.objects.create(name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()}) Book.objects.create(name="3", author=bob, extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) Book.objects.create(name="4", author=bob) @@ -3325,20 +1720,20 @@ class DocumentTest(unittest.TestCase): Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) # Checks - self.assertEqual(u",".join([str(b) for b in Book.objects.all()] ) , "1,2,3,4,5,6,7,8,9" ) + self.assertEqual(u",".join([str(b) for b in Book.objects.all()]) , "1,2,3,4,5,6,7,8,9") # bob related books self.assertEqual(u",".join([str(b) for b in Book.objects.filter( - Q(extra__a=bob ) | + Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob))]) , "1,2,3,4") # Susan & Karl related books self.assertEqual(u",".join([str(b) for b in Book.objects.filter( - Q(extra__a__all=[karl, susan] ) | - Q(author__all=[karl, susan ] ) | - Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()] ) - ) ] ) , "1" ) + Q(extra__a__all=[karl, susan]) | + Q(author__all=[karl, susan ]) | + Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()]) + ) ]) , "1") # $Where self.assertEqual(u",".join([str(b) for b in Book.objects.filter( @@ -3348,7 +1743,7 @@ class DocumentTest(unittest.TestCase): return this.name == '1' || this.name == '2';}""" } - ) ]), "1,2") + ) ]), "1,2") class ValidatorErrorTest(unittest.TestCase): @@ -3504,5 +1899,6 @@ class ValidatorErrorTest(unittest.TestCase): self.assertRaises(OperationError, change_shard_key) + if __name__ == '__main__': unittest.main() diff --git a/tests/mongoengine.png b/tests/document/mongoengine.png similarity index 100% rename from tests/mongoengine.png rename to tests/document/mongoengine.png diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py new file mode 100644 index 0000000..882e737 --- /dev/null +++ b/tests/migration/__init__.py @@ -0,0 +1,4 @@ +from turn_off_inheritance import * + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/migration/test_convert_to_new_inheritance_model.py b/tests/migration/test_convert_to_new_inheritance_model.py new file mode 100644 index 0000000..0ef37f7 --- /dev/null +++ b/tests/migration/test_convert_to_new_inheritance_model.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +import unittest + +from mongoengine import Document, connect +from mongoengine.connection import get_db +from mongoengine.fields import StringField + +__all__ = ('ConvertToNewInheritanceModel', ) + + +class ConvertToNewInheritanceModel(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_how_to_convert_to_the_new_inheritance_model(self): + """Demonstrates migrating from 0.7 to 0.8 + """ + + # 1. Declaration of the class + class Animal(Document): + name = StringField() + meta = { + 'allow_inheritance': True, + 'indexes': ['name'] + } + + # 2. Remove _types + collection = Animal._get_collection() + collection.update({}, {"$unset": {"_types": 1}}, multi=True) + + # 3. Confirm extra data is removed + count = collection.find({'_types': {"$exists": True}}).count() + assert count == 0 + + # 4. Remove indexes + info = collection.index_information() + indexes_to_drop = [key for key, value in info.iteritems() + if '_types' in dict(value['key'])] + for index in indexes_to_drop: + collection.drop_index(index) + + # 5. Recreate indexes + Animal.objects._ensure_indexes() diff --git a/tests/migration/turn_off_inheritance.py b/tests/migration/turn_off_inheritance.py new file mode 100644 index 0000000..5d0f7d7 --- /dev/null +++ b/tests/migration/turn_off_inheritance.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +import unittest + +from mongoengine import Document, connect +from mongoengine.connection import get_db +from mongoengine.fields import StringField + +__all__ = ('TurnOffInheritanceTest', ) + + +class TurnOffInheritanceTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def tearDown(self): + for collection in self.db.collection_names(): + if 'system.' in collection: + continue + self.db.drop_collection(collection) + + def test_how_to_turn_off_inheritance(self): + """Demonstrates migrating from allow_inheritance = True to False. + """ + + # 1. Old declaration of the class + + class Animal(Document): + name = StringField() + meta = { + 'allow_inheritance': True, + 'indexes': ['name'] + } + + # 2. Turn off inheritance + class Animal(Document): + name = StringField() + meta = { + 'allow_inheritance': False, + 'indexes': ['name'] + } + + # 3. Remove _types and _cls + collection = Animal._get_collection() + collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) + + # 3. Confirm extra data is removed + count = collection.find({"$or": [{'_types': {"$exists": True}}, + {'_cls': {"$exists": True}}]}).count() + assert count == 0 + + # 4. Remove indexes + info = collection.index_information() + indexes_to_drop = [key for key, value in info.iteritems() + if '_types' in dict(value['key']) + or '_cls' in dict(value['key'])] + for index in indexes_to_drop: + collection.drop_index(index) + + # 5. Recreate indexes + Animal.objects._ensure_indexes() diff --git a/tests/test_dynamic_document.py b/tests/test_dynamic_document.py deleted file mode 100644 index 23762a3..0000000 --- a/tests/test_dynamic_document.py +++ /dev/null @@ -1,533 +0,0 @@ -import unittest - -from mongoengine import * -from mongoengine.connection import get_db - - -class DynamicDocTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - class Person(DynamicDocument): - name = StringField() - meta = {'allow_inheritance': True} - - Person.drop_collection() - - self.Person = Person - - def test_simple_dynamic_document(self): - """Ensures simple dynamic documents are saved correctly""" - - p = self.Person() - p.name = "James" - p.age = 34 - - self.assertEqual(p.to_mongo(), - {"_types": ["Person"], "_cls": "Person", - "name": "James", "age": 34} - ) - - p.save() - - self.assertEqual(self.Person.objects.first().age, 34) - - # Confirm no changes to self.Person - self.assertFalse(hasattr(self.Person, 'age')) - - def test_dynamic_document_delta(self): - """Ensures simple dynamic documents can delta correctly""" - p = self.Person(name="James", age=34) - self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) - - p.doc = 123 - del(p.doc) - self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) - - def test_change_scope_of_variable(self): - """Test changing the scope of a dynamic field has no adverse effects""" - p = self.Person() - p.name = "Dean" - p.misc = 22 - p.save() - - p = self.Person.objects.get() - p.misc = {'hello': 'world'} - p.save() - - p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) - - def test_delete_dynamic_field(self): - """Test deleting a dynamic field works""" - self.Person.drop_collection() - p = self.Person() - p.name = "Dean" - p.misc = 22 - p.save() - - p = self.Person.objects.get() - p.misc = {'hello': 'world'} - p.save() - - p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) - collection = self.db[self.Person._get_collection_name()] - obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) - - del(p.misc) - p.save() - - p = self.Person.objects.get() - self.assertFalse(hasattr(p, 'misc')) - - obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) - - def test_dynamic_document_queries(self): - """Ensure we can query dynamic fields""" - p = self.Person() - p.name = "Dean" - p.age = 22 - p.save() - - self.assertEqual(1, self.Person.objects(age=22).count()) - p = self.Person.objects(age=22) - p = p.get() - self.assertEqual(22, p.age) - - def test_complex_dynamic_document_queries(self): - class Person(DynamicDocument): - name = StringField() - - Person.drop_collection() - - p = Person(name="test") - p.age = "ten" - p.save() - - p1 = Person(name="test1") - p1.age = "less then ten and a half" - p1.save() - - p2 = Person(name="test2") - p2.age = 10 - p2.save() - - self.assertEqual(Person.objects(age__icontains='ten').count(), 2) - self.assertEqual(Person.objects(age__gte=10).count(), 1) - - def test_complex_data_lookups(self): - """Ensure you can query dynamic document dynamic fields""" - p = self.Person() - p.misc = {'hello': 'world'} - p.save() - - self.assertEqual(1, self.Person.objects(misc__hello='world').count()) - - def test_inheritance(self): - """Ensure that dynamic document plays nice with inheritance""" - class Employee(self.Person): - salary = IntField() - - Employee.drop_collection() - - self.assertTrue('name' in Employee._fields) - self.assertTrue('salary' in Employee._fields) - self.assertEqual(Employee._get_collection_name(), - self.Person._get_collection_name()) - - joe_bloggs = Employee() - joe_bloggs.name = "Joe Bloggs" - joe_bloggs.salary = 10 - joe_bloggs.age = 20 - joe_bloggs.save() - - self.assertEqual(1, self.Person.objects(age=20).count()) - self.assertEqual(1, Employee.objects(age=20).count()) - - joe_bloggs = self.Person.objects.first() - self.assertTrue(isinstance(joe_bloggs, Employee)) - - def test_embedded_dynamic_document(self): - """Test dynamic embedded documents""" - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", - "embedded_field": { - "_types": ['Embedded'], "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}] - } - }) - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) - - def test_complex_embedded_documents(self): - """Test complex dynamic embedded documents setups""" - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - embedded_1.list_field = ['1', 2, embedded_2] - doc.embedded_field = embedded_1 - - self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", - "embedded_field": { - "_types": ['Embedded'], "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, - {"_types": ['Embedded'], "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}]} - ] - } - }) - doc.save() - doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - - embedded_field = doc.embedded_field.list_field[2] - - self.assertEqual(embedded_field.__class__, Embedded) - self.assertEqual(embedded_field.string_field, "hello") - self.assertEqual(embedded_field.int_field, 1) - self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) - - def test_delta_for_dynamic_documents(self): - p = self.Person() - p.name = "Dean" - p.age = 22 - p.save() - - p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) - - p = self.Person.objects(age=22).get() - p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) - - p.save() - self.assertEqual(1, self.Person.objects(age=24).count()) - - def test_delta(self): - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) - - def test_delta_recursive(self): - """Testing deltaing works with dynamic documents""" - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc._get_changed_fields(), ['embedded_field']) - - embedded_delta = { - 'string_field': 'hello', - 'int_field': 1, - 'dict_field': {'hello': 'world'}, - 'list_field': ['1', 2, {'hello': 'world'}] - } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - embedded_delta.update({ - '_types': ['Embedded'], - '_cls': 'Embedded', - }) - self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) - - doc.save() - doc.reload() - - doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) - - self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) - doc.save() - doc.reload() - - doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) - doc.save() - doc.reload() - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_cls': 'Embedded', - '_types': ['Embedded'], - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - doc.save() - doc.reload() - - self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, []) - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) - - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) - self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) - doc.save() - doc.reload() - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') - - # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' - doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}}]}, {})) - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_types': ['Embedded'], - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - ]}, {})) - doc.save() - doc.reload() - self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') - - # Test list native methods - doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) - doc.save() - doc.reload() - - doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) - doc.save() - doc.reload() - self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) - - doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types - doc.save() - doc.reload() - self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) - - del(doc.embedded_field.list_field[2].list_field[2]['hello']) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) - doc.save() - doc.reload() - - del(doc.embedded_field.list_field[2].list_field) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) - - doc.save() - doc.reload() - - doc.dict_field = {'embedded': embedded_1} - doc.save() - doc.reload() - - doc.dict_field['embedded'].string_field = 'Hello World' - self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field']) - self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) - - def test_indexes(self): - """Ensure that indexes are used when meta[indexes] is specified. - """ - class BlogPost(DynamicDocument): - meta = { - 'indexes': [ - '-date', - ('category', '-date') - ], - } - - BlogPost.drop_collection() - - info = BlogPost.objects._collection.index_information() - # _id, '-date', ('cat', 'date') - # NB: there is no index on _types by itself, since - # the indices on -date and tags will both contain - # _types as first element in the key - self.assertEqual(len(info), 3) - - # Indexes are lazy so use list() to perform query - list(BlogPost.objects) - info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] - in info) - self.assertTrue([('_types', 1), ('date', -1)] in info) - - def test_dynamic_and_embedded(self): - """Ensure embedded documents play nicely""" - - class Address(EmbeddedDocument): - city = StringField() - - class Person(DynamicDocument): - name = StringField() - meta = {'allow_inheritance': True} - - Person.drop_collection() - - Person(name="Ross", address=Address(city="London")).save() - - person = Person.objects.first() - person.address.city = "Lundenne" - person.save() - - self.assertEqual(Person.objects.first().address.city, "Lundenne") - - person = Person.objects.first() - person.address = Address(city="Londinium") - person.save() - - self.assertEqual(Person.objects.first().address.city, "Londinium") - - person = Person.objects.first() - person.age = 35 - person.save() - self.assertEqual(Person.objects.first().age, 35) diff --git a/tests/test_fields.py b/tests/test_fields.py index 9806550..118521f 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -14,10 +14,11 @@ import gridfs from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db -from mongoengine.base import _document_registry, NotRegistered +from mongoengine.base import _document_registry +from mongoengine.errors import NotRegistered from mongoengine.python_support import PY3, b, StringIO, bin_type -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'document/mongoengine.png') class FieldTest(unittest.TestCase): diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 690df5e..cdabadb 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -11,9 +11,12 @@ from mongoengine import * from mongoengine.connection import get_connection from mongoengine.python_support import PY3 from mongoengine.tests import query_counter -from mongoengine.queryset import (QuerySet, QuerySetManager, +from mongoengine.queryset import (Q, QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, - QueryFieldList) + QueryFieldList, queryset_manager) +from mongoengine.queryset import transform +from mongoengine.errors import InvalidQueryError + class QuerySetTest(unittest.TestCase): @@ -40,19 +43,34 @@ class QuerySetTest(unittest.TestCase): def test_transform_query(self): """Ensure that the _transform_query function operates correctly. """ - self.assertEqual(QuerySet._transform_query(name='test', age=30), + self.assertEqual(transform.query(name='test', age=30), {'name': 'test', 'age': 30}) - self.assertEqual(QuerySet._transform_query(age__lt=30), + self.assertEqual(transform.query(age__lt=30), {'age': {'$lt': 30}}) - self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50), + self.assertEqual(transform.query(age__gt=20, age__lt=50), {'age': {'$gt': 20, '$lt': 50}}) - self.assertEqual(QuerySet._transform_query(age=20, age__gt=50), + self.assertEqual(transform.query(age=20, age__gt=50), {'age': 20}) - self.assertEqual(QuerySet._transform_query(friend__age__gte=30), + self.assertEqual(transform.query(friend__age__gte=30), {'friend.age': {'$gte': 30}}) - self.assertEqual(QuerySet._transform_query(name__exists=True), + self.assertEqual(transform.query(name__exists=True), {'name': {'$exists': True}}) + def test_cannot_perform_joins_references(self): + + class BlogPost(Document): + author = ReferenceField(self.Person) + author2 = GenericReferenceField() + + def test_reference(): + list(BlogPost.objects(author__name="test")) + + self.assertRaises(InvalidQueryError, test_reference) + + def test_generic_reference(): + list(BlogPost.objects(author2__name="test")) + + def test_find(self): """Ensure that a query returns a valid set of results. """ @@ -921,10 +939,9 @@ class QuerySetTest(unittest.TestCase): # find all published blog posts before 2010-01-07 published_posts = BlogPost.published() published_posts = published_posts.filter( - published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) + published_date__lt=datetime(2010, 1, 7, 0, 0, 0)) self.assertEqual(published_posts.count(), 2) - blog_posts = BlogPost.objects blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2]) blog_posts = blog_posts.filter(blog=blog_3) @@ -935,7 +952,7 @@ class QuerySetTest(unittest.TestCase): def test_raw_and_merging(self): class Doc(Document): - pass + meta = {'allow_inheritance': False} raw_query = Doc.objects(__raw__={'deleted': False, 'scraped': 'yes', @@ -943,7 +960,7 @@ class QuerySetTest(unittest.TestCase): {'attachments.views.extracted':'no'}] })._query - expected = {'deleted': False, '_types': 'Doc', 'scraped': 'yes', + expected = {'deleted': False, 'scraped': 'yes', '$nor': [{'views.extracted': 'no'}, {'attachments.views.extracted': 'no'}]} self.assertEqual(expected, raw_query) @@ -2598,68 +2615,6 @@ class QuerySetTest(unittest.TestCase): Group.drop_collection() - def test_types_index(self): - """Ensure that and index is used when '_types' is being used in a - query. - """ - class BlogPost(Document): - date = DateTimeField() - meta = {'indexes': ['-date']} - - # Indexes are lazy so use list() to perform query - list(BlogPost.objects) - info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1)] in info) - self.assertTrue([('_types', 1), ('date', -1)] in info) - - def test_dont_index_types(self): - """Ensure that index_types will, when disabled, prevent _types - being added to all indices. - """ - class BloggPost(Document): - date = DateTimeField() - meta = {'index_types': False, - 'indexes': ['-date']} - - # Indexes are lazy so use list() to perform query - list(BloggPost.objects) - info = BloggPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1)] not in info) - self.assertTrue([('date', -1)] in info) - - BloggPost.drop_collection() - - class BloggPost(Document): - title = StringField() - meta = {'allow_inheritance': False} - - # _types is not used on objects where allow_inheritance is False - list(BloggPost.objects) - info = BloggPost.objects._collection.index_information() - self.assertFalse([('_types', 1)] in info.values()) - - BloggPost.drop_collection() - - def test_types_index_with_pk(self): - - class Comment(EmbeddedDocument): - comment_id = IntField(required=True) - - try: - class BlogPost(Document): - comments = EmbeddedDocumentField(Comment) - meta = {'indexes': [{'fields': ['pk', 'comments.comment_id'], - 'unique': True}]} - except UnboundLocalError: - self.fail('Unbound local error at types index + pk definition') - - info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in info.iteritems()] - index_item = [(u'_types', 1), (u'_id', 1), (u'comments.comment_id', 1)] - self.assertTrue(index_item in info) - def test_dict_with_custom_baseclass(self): """Ensure DictField working with custom base clases. """ @@ -3116,6 +3071,7 @@ class QuerySetTest(unittest.TestCase): """ class Comment(Document): message = StringField() + meta = {'allow_inheritance': True} Comment.objects.ensure_index('message') @@ -3124,7 +3080,7 @@ class QuerySetTest(unittest.TestCase): value.get('unique', False), value.get('sparse', False)) for key, value in info.iteritems()] - self.assertTrue(([('_types', 1), ('message', 1)], False, False) in info) + self.assertTrue(([('_cls', 1), ('message', 1)], False, False) in info) def test_where(self): """Ensure that where clauses work. From 59826c8cfd42a13bb0e4cf9f9e0f62ab1d27a543 Mon Sep 17 00:00:00 2001 From: Marcelo Anton Date: Thu, 18 Oct 2012 11:44:18 -0300 Subject: [PATCH 008/464] This change in how the variable is declared DESCRIPTION corrects problems when running the command ``python setup.py bdist_rpm`` --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6d9b51b..35ca579 100644 --- a/setup.py +++ b/setup.py @@ -8,8 +8,8 @@ try: except ImportError: pass -DESCRIPTION = """MongoEngine is a Python Object-Document -Mapper for working with MongoDB.""" +DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ +'Mapper for working with MongoDB.' LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.rst').read() From 3d5b6ae332291821d60d09e761f5e5b02529a404 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Oct 2012 11:36:18 +0000 Subject: [PATCH 009/464] Inheritance is off by default (MongoEngine/mongoengine#122) --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 30 +++++++------- docs/tutorial.rst | 5 ++- docs/upgrade.rst | 24 ++++++++++-- mongoengine/base/common.py | 2 +- mongoengine/base/document.py | 42 +++++++++++++------- mongoengine/base/fields.py | 31 ++++++++++----- mongoengine/base/metaclasses.py | 23 ++++++----- mongoengine/dereference.py | 5 ++- mongoengine/document.py | 19 ++++++--- mongoengine/fields.py | 51 ++++++++++++++---------- mongoengine/queryset/queryset.py | 2 +- tests/all_warnings/__init__.py | 18 +-------- tests/document/delta.py | 29 +++++++------- tests/document/dynamic.py | 12 ++++-- tests/document/inheritance.py | 2 - tests/document/instance.py | 65 +++++++++++++++++-------------- tests/test_dereference.py | 12 ++---- tests/test_fields.py | 32 +++++++++------ tests/test_queryset.py | 17 ++++---- 20 files changed, 245 insertions(+), 177 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8388b05..1970bf0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index cf3b5a6..ea8e05b 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -462,9 +462,10 @@ If a dictionary is passed then the following options are available: The fields to index. Specified in the same format as described above. :attr:`cls` (Default: True) - If you have polymorphic models that inherit and have `allow_inheritance` - turned on, you can configure whether the index should have the - :attr:`_cls` field added automatically to the start of the index. + If you have polymorphic models that inherit and have + :attr:`allow_inheritance` turned on, you can configure whether the index + should have the :attr:`_cls` field added automatically to the start of the + index. :attr:`sparse` (Default: False) Whether the index should be sparse. @@ -573,7 +574,9 @@ defined, you may subclass it and add any extra fields or methods you may need. As this is new class is not a direct subclass of :class:`~mongoengine.Document`, it will not be stored in its own collection; it will use the same collection as its superclass uses. This allows for more -convenient and efficient retrieval of related documents:: +convenient and efficient retrieval of related documents - all you need do is +set :attr:`allow_inheritance` to True in the :attr:`meta` data for a +document.:: # Stored in a collection named 'page' class Page(Document): @@ -585,25 +588,20 @@ convenient and efficient retrieval of related documents:: class DatedPage(Page): date = DateTimeField() -.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. +.. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults + to False, meaning you must set it to True to use inheritance. Working with existing data -------------------------- -To enable correct retrieval of documents involved in this kind of heirarchy, -an extra attribute is stored on each document in the database: :attr:`_cls`. -These are hidden from the user through the MongoEngine interface, but may not -be present if you are trying to use MongoEngine with an existing database. - -For this reason, you may disable this inheritance mechansim, removing the -dependency of :attr:`_cls`, enabling you to work with existing databases. -To disable inheritance on a document class, set :attr:`allow_inheritance` to -``False`` in the :attr:`meta` dictionary:: +As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and +easily get working with existing data. Just define the document to match +the expected schema in your database. If you have wildly varying schemas then +a :class:`~mongoengine.DynamicDocument` might be more appropriate. # Will work with data in an existing collection named 'cmsPage' class Page(Document): title = StringField(max_length=200, required=True) meta = { - 'collection': 'cmsPage', - 'allow_inheritance': False, + 'collection': 'cmsPage' } diff --git a/docs/tutorial.rst b/docs/tutorial.rst index a5284c8..c2fb5b9 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -84,12 +84,15 @@ using* the new fields we need to support video posts. This fits with the Object-Oriented principle of *inheritance* nicely. We can think of :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports -this kind of modelling out of the box:: +this kind of modelling out of the box - all you need do is turn on inheritance +by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) + meta = {'allow_inheritance': True} + class TextPost(Post): content = StringField() diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 99e3078..bf0a842 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -8,10 +8,13 @@ Upgrading Inheritance ----------- +Data Model +~~~~~~~~~~ + The inheritance model has changed, we no longer need to store an array of -`types` with the model we can just use the classname in `_cls`. This means -that you will have to update your indexes for each of your inherited classes -like so: +:attr:`types` with the model we can just use the classname in :attr:`_cls`. +This means that you will have to update your indexes for each of your +inherited classes like so: # 1. Declaration of the class class Animal(Document): @@ -40,6 +43,19 @@ like so: Animal.objects._ensure_indexes() +Document Definition +~~~~~~~~~~~~~~~~~~~ + +The default for inheritance has changed - its now off by default and +:attr:`_cls` will not be stored automatically with the class. So if you extend +your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` +you will need to declare :attr:`allow_inheritance` in the meta data like so: + + class Animal(Document): + name = StringField() + + meta = {'allow_inheritance': True} + 0.6 to 0.7 ========== @@ -123,7 +139,7 @@ Document.objects.with_id - now raises an InvalidQueryError if used with a filter. FutureWarning - A future warning has been added to all inherited classes that -don't define `allow_inheritance` in their meta. +don't define :attr:`allow_inheritance` in their meta. You may need to update pyMongo to 2.0 for use with Sharding. diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index 648561b..82728d1 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -2,7 +2,7 @@ from mongoengine.errors import NotRegistered __all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') -ALLOW_INHERITANCE = True +ALLOW_INHERITANCE = False _document_registry = {} diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index af97e1f..bc509af 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -50,7 +50,6 @@ class BaseDocument(object): for key, value in values.iteritems(): key = self._reverse_db_field_map.get(key, key) setattr(self, key, value) - # Set any get_fieldname_display methods self.__set_field_display() @@ -83,6 +82,11 @@ class BaseDocument(object): if hasattr(self, '_changed_fields'): self._mark_as_changed(name) + # Check if the user has created a new instance of a class + if (self._is_document and self._initialised + and self._created and name == self._meta['id_field']): + super(BaseDocument, self).__setattr__('_created', False) + if (self._is_document and not self._created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): @@ -171,14 +175,24 @@ class BaseDocument(object): """Return data dictionary ready for use with MongoDB. """ data = {} - for field_name, field in self._fields.items(): - value = getattr(self, field_name, None) + for field_name, field in self._fields.iteritems(): + value = self._data.get(field_name, None) if value is not None: - data[field.db_field] = field.to_mongo(value) - # Only add _cls if allow_inheritance is not False - if not (hasattr(self, '_meta') and - self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == False): + value = field.to_mongo(value) + + # Handle self generating fields + if value is None and field._auto_gen: + value = field.generate() + self._data[field_name] = value + + if value is not None: + data[field.db_field] = value + + # Only add _cls if allow_inheritance is True + if (hasattr(self, '_meta') and + self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): data['_cls'] = self._class_name + if '_id' in data and data['_id'] is None: del data['_id'] @@ -194,7 +208,7 @@ class BaseDocument(object): are present. """ # Get a list of tuples of field names and their current values - fields = [(field, getattr(self, name)) + fields = [(field, self._data.get(name)) for name, field in self._fields.items()] # Ensure that each field is matched to a valid value @@ -207,7 +221,7 @@ class BaseDocument(object): errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError), error: errors[field.name] = error - elif field.required: + elif field.required and not getattr(field, '_auto_gen', False): errors[field.name] = ValidationError('Field is required', field_name=field.name) if errors: @@ -313,6 +327,7 @@ class BaseDocument(object): """ # Handles cases where not loaded from_son but has _id doc = self.to_mongo() + set_fields = self._get_changed_fields() set_data = {} unset_data = {} @@ -370,7 +385,6 @@ class BaseDocument(object): if hasattr(d, '_fields'): field_name = d._reverse_db_field_map.get(db_field_name, db_field_name) - if field_name in d._fields: default = d._fields.get(field_name).default else: @@ -379,6 +393,7 @@ class BaseDocument(object): if default is not None: if callable(default): default = default() + if default != value: continue @@ -399,15 +414,12 @@ class BaseDocument(object): # get the class name from the document, falling back to the given # class if unavailable class_name = son.get('_cls', cls._class_name) - data = dict(("%s" % key, value) for key, value in son.items()) + data = dict(("%s" % key, value) for key, value in son.iteritems()) if not UNICODE_KWARGS: # python 2.6.4 and lower cannot handle unicode keys # passed to class constructor example: cls(**data) to_str_keys_recursive(data) - if '_cls' in data: - del data['_cls'] - # Return correct subclass for document type if class_name != cls._class_name: cls = get_document(class_name) @@ -415,7 +427,7 @@ class BaseDocument(object): changed_fields = [] errors_dict = {} - for field_name, field in cls._fields.items(): + for field_name, field in cls._fields.iteritems(): if field.db_field in data: value = data[field.db_field] try: diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 44f5e13..00e040c 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -21,6 +21,7 @@ class BaseField(object): name = None _geo_index = False + _auto_gen = False # Call `generate` to generate a value # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that MongoEngine implicitly @@ -36,7 +37,6 @@ class BaseField(object): if name: msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" warnings.warn(msg, DeprecationWarning) - self.name = None self.required = required or primary_key self.default = default self.unique = bool(unique or unique_with) @@ -62,7 +62,6 @@ class BaseField(object): if instance is None: # Document class being used rather than a document object return self - # Get value from document instance if available, if not use default value = instance._data.get(self.name) @@ -241,12 +240,21 @@ class ComplexBaseField(BaseField): """Convert a Python type to a MongoDB-compatible type. """ Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") + GenericReferenceField = _import_class("GenericReferenceField") if isinstance(value, basestring): return value if hasattr(value, 'to_mongo'): - return value.to_mongo() + if isinstance(value, Document): + return GenericReferenceField().to_mongo(value) + cls = value.__class__ + val = value.to_mongo() + # If we its a document thats not inherited add _cls + if (isinstance(value, EmbeddedDocument)): + val['_cls'] = cls.__name__ + return val is_list = False if not hasattr(value, 'items'): @@ -258,10 +266,10 @@ class ComplexBaseField(BaseField): if self.field: value_dict = dict([(key, self.field.to_mongo(item)) - for key, item in value.items()]) + for key, item in value.iteritems()]) else: value_dict = {} - for k, v in value.items(): + for k, v in value.iteritems(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: @@ -274,16 +282,19 @@ class ComplexBaseField(BaseField): meta = getattr(v, '_meta', {}) allow_inheritance = ( meta.get('allow_inheritance', ALLOW_INHERITANCE) - == False) - if allow_inheritance and not self.field: - GenericReferenceField = _import_class( - "GenericReferenceField") + == True) + if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_mongo'): - value_dict[k] = v.to_mongo() + cls = v.__class__ + val = v.to_mongo() + # If we its a document thats not inherited add _cls + if (isinstance(v, (Document, EmbeddedDocument))): + val['_cls'] = cls.__name__ + value_dict[k] = val else: value_dict[k] = self.to_mongo(v) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index f87b03e..e68ec13 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -34,6 +34,17 @@ class DocumentMetaclass(type): if 'meta' in attrs: attrs['_meta'] = attrs.pop('meta') + # EmbeddedDocuments should inherit meta data + if '_meta' not in attrs: + meta = MetaDict() + for base in flattened_bases[::-1]: + # Add any mixin metadata from plain objects + if hasattr(base, 'meta'): + meta.merge(base.meta) + elif hasattr(base, '_meta'): + meta.merge(base._meta) + attrs['_meta'] = meta + # Handle document Fields # Merge all fields from subclasses @@ -52,6 +63,7 @@ class DocumentMetaclass(type): if not attr_value.db_field: attr_value.db_field = attr_name base_fields[attr_name] = attr_value + doc_fields.update(base_fields) # Discover any document fields @@ -98,15 +110,7 @@ class DocumentMetaclass(type): # inheritance of classes where inheritance is set to False allow_inheritance = base._meta.get('allow_inheritance', ALLOW_INHERITANCE) - if (not getattr(base, '_is_base_cls', True) - and allow_inheritance is None): - warnings.warn( - "%s uses inheritance, the default for " - "allow_inheritance is changing to off by default. " - "Please add it to the document meta." % name, - FutureWarning - ) - elif (allow_inheritance == False and + if (allow_inheritance != True and not base._meta.get('abstract')): raise ValueError('Document %s may not be subclassed' % base.__name__) @@ -353,6 +357,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): if not new_class._meta.get('id_field'): new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') + new_class._fields['id'].name = 'id' new_class.id = new_class._fields['id'] # Merge in exceptions with parent hierarchy diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 59cc0a5..25d46b4 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -121,7 +121,10 @@ class DeReference(object): for key, doc in references.iteritems(): object_map[key] = doc else: # Generic reference: use the refs data to convert to document - if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): + if isinstance(doc_type, (ListField, DictField, MapField,)): + continue + + if doc_type: references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) for ref in references: doc = doc_type._from_son(ref) diff --git a/mongoengine/document.py b/mongoengine/document.py index b1ce13a..95dd624 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -117,6 +117,7 @@ class Document(BaseDocument): """ def fget(self): return getattr(self, self._meta['id_field']) + def fset(self, value): return setattr(self, self._meta['id_field'], value) return property(fget, fset) @@ -125,7 +126,7 @@ class Document(BaseDocument): @classmethod def _get_db(cls): """Some Model using other db_alias""" - return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) + return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) @classmethod def _get_collection(cls): @@ -212,11 +213,11 @@ class Document(BaseDocument): doc = self.to_mongo() - created = force_insert or '_id' not in doc + find_delta = ('_id' not in doc or self._created or force_insert) try: collection = self.__class__.objects._collection - if created: + if find_delta: if force_insert: object_id = collection.insert(doc, safe=safe, **write_options) @@ -271,7 +272,8 @@ class Document(BaseDocument): self._changed_fields = [] self._created = False - signals.post_save.send(self.__class__, document=self, created=created) + signals.post_save.send(self.__class__, document=self, + created=find_delta) return self def cascade_save(self, warn_cascade=None, *args, **kwargs): @@ -373,6 +375,7 @@ class Document(BaseDocument): for name in self._dynamic_fields.keys(): setattr(self, name, self._reload(name, obj._data[name])) self._changed_fields = obj._changed_fields + self._created = False return obj def _reload(self, key, value): @@ -464,7 +467,13 @@ class DynamicEmbeddedDocument(EmbeddedDocument): """Deletes the attribute by setting to None and allowing _delta to unset it""" field_name = args[0] - setattr(self, field_name, None) + if field_name in self._fields: + default = self._fields[field_name].default + if callable(default): + default = default() + setattr(self, field_name, default) + else: + setattr(self, field_name, None) class MapReduceDocument(object): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9bcba9f..15e1626 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -16,12 +16,11 @@ from mongoengine.errors import ValidationError from mongoengine.python_support import (PY3, bin_type, txt_type, str_types, StringIO) from base import (BaseField, ComplexBaseField, ObjectIdField, - get_document, BaseDocument) + get_document, BaseDocument, ALLOW_INHERITANCE) from queryset import DO_NOTHING, QuerySet from document import Document, EmbeddedDocument from connection import get_db, DEFAULT_CONNECTION_NAME - try: from PIL import Image, ImageOps except ImportError: @@ -314,16 +313,16 @@ class DateTimeField(BaseField): usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. - return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], - **kwargs) + return datetime.datetime(*time.strptime(value, + '%Y-%m-%d %H:%M:%S')[:6], **kwargs) except ValueError: try: # Try without seconds. - return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], - **kwargs) + return datetime.datetime(*time.strptime(value, + '%Y-%m-%d %H:%M')[:5], **kwargs) except ValueError: # Try without hour/minutes/seconds. try: - return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], - **kwargs) + return datetime.datetime(*time.strptime(value, + '%Y-%m-%d')[:3], **kwargs) except ValueError: return None @@ -410,6 +409,7 @@ class ComplexDateTimeField(StringField): return super(ComplexDateTimeField, self).__set__(instance, value) def validate(self, value): + value = self.to_python(value) if not isinstance(value, datetime.datetime): self.error('Only datetime objects may used in a ' 'ComplexDateTimeField') @@ -422,6 +422,7 @@ class ComplexDateTimeField(StringField): return original_value def to_mongo(self, value): + value = self.to_python(value) return self._convert_from_datetime(value) def prepare_query_value(self, op, value): @@ -529,7 +530,12 @@ class DynamicField(BaseField): return value if hasattr(value, 'to_mongo'): - return value.to_mongo() + cls = value.__class__ + val = value.to_mongo() + # If we its a document thats not inherited add _cls + if (isinstance(value, (Document, EmbeddedDocument))): + val['_cls'] = cls.__name__ + return val if not isinstance(value, (dict, list, tuple)): return value @@ -540,13 +546,12 @@ class DynamicField(BaseField): value = dict([(k, v) for k, v in enumerate(value)]) data = {} - for k, v in value.items(): + for k, v in value.iteritems(): data[k] = self.to_mongo(v) + value = data if is_list: # Convert back to a list - value = [v for k, v in sorted(data.items(), key=itemgetter(0))] - else: - value = data + value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] return value def lookup_member(self, member_name): @@ -666,7 +671,6 @@ class DictField(ComplexBaseField): if op in match_operators and isinstance(value, basestring): return StringField().prepare_query_value(op, value) - return super(DictField, self).prepare_query_value(op, value) @@ -1323,7 +1327,8 @@ class GeoPointField(BaseField): class SequenceField(IntField): - """Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers) + """Provides a sequental counter see: + http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers .. note:: @@ -1335,17 +1340,21 @@ class SequenceField(IntField): .. versionadded:: 0.5 """ - def __init__(self, collection_name=None, db_alias = None, sequence_name = None, *args, **kwargs): + _auto_gen = True + + def __init__(self, collection_name=None, db_alias=None, + sequence_name=None, *args, **kwargs): self.collection_name = collection_name or 'mongoengine.counters' self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name return super(SequenceField, self).__init__(*args, **kwargs) - def generate_new_value(self): + def generate(self): """ Generate and Increment the counter """ - sequence_name = self.sequence_name or self.owner_document._get_collection_name() + sequence_name = (self.sequence_name or + self.owner_document._get_collection_name()) sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_and_modify(query={"_id": sequence_id}, @@ -1365,7 +1374,7 @@ class SequenceField(IntField): value = instance._data.get(self.name) if not value and instance._initialised: - value = self.generate_new_value() + value = self.generate() instance._data[self.name] = value instance._mark_as_changed(self.name) @@ -1374,13 +1383,13 @@ class SequenceField(IntField): def __set__(self, instance, value): if value is None and instance._initialised: - value = self.generate_new_value() + value = self.generate() return super(SequenceField, self).__set__(instance, value) def to_python(self, value): if value is None: - value = self.generate_new_value() + value = self.generate() return value diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 5108066..dd7200b 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -58,7 +58,7 @@ class QuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used - if document._meta.get('allow_inheritance') != False: + if document._meta.get('allow_inheritance') == True: self._initial_query = {"_cls": {"$in": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 72de822..4609c5a 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -29,22 +29,6 @@ class AllWarnings(unittest.TestCase): # restore default handling of warnings warnings.showwarning = self.showwarning_default - def test_allow_inheritance_future_warning(self): - """Add FutureWarning for future allow_inhertiance default change. - """ - - class SimpleBase(Document): - a = IntField() - - class InheritedClass(SimpleBase): - b = IntField() - - InheritedClass() - self.assertEqual(len(self.warning_list), 1) - warning = self.warning_list[0] - self.assertEqual(FutureWarning, warning["category"]) - self.assertTrue("InheritedClass" in str(warning["message"])) - def test_dbref_reference_field_future_warning(self): class Person(Document): @@ -93,7 +77,7 @@ class AllWarnings(unittest.TestCase): def test_document_collection_syntax_warning(self): class NonAbstractBase(Document): - pass + meta = {'allow_inheritance': True} class InheritedDocumentFailTest(NonAbstractBase): meta = {'collection': 'fail'} diff --git a/tests/document/delta.py b/tests/document/delta.py index f8a071d..c6191d9 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import sys +sys.path[0:0] = [""] import unittest from mongoengine import * @@ -126,9 +128,6 @@ class DeltaTest(unittest.TestCase): 'list_field': ['1', 2, {'hello': 'world'}] } self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - embedded_delta.update({ - '_cls': 'Embedded', - }) self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) @@ -162,6 +161,7 @@ class DeltaTest(unittest.TestCase): doc.embedded_field.list_field = ['1', 2, embedded_2] self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) + self.assertEqual(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_cls': 'Embedded', @@ -175,10 +175,10 @@ class DeltaTest(unittest.TestCase): self.assertEqual(doc._delta(), ({ 'embedded_field.list_field': ['1', 2, { '_cls': 'Embedded', - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], + 'string_field': 'hello', + 'dict_field': {'hello': 'world'}, + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], }] }, {})) doc.save() @@ -467,9 +467,6 @@ class DeltaTest(unittest.TestCase): 'db_list_field': ['1', 2, {'hello': 'world'}] } self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - embedded_delta.update({ - '_cls': 'Embedded', - }) self.assertEqual(doc._delta(), ({'db_embedded_field': embedded_delta}, {})) @@ -520,10 +517,10 @@ class DeltaTest(unittest.TestCase): self.assertEqual(doc._delta(), ({ 'db_embedded_field.db_list_field': ['1', 2, { '_cls': 'Embedded', - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], + 'db_string_field': 'hello', + 'db_dict_field': {'hello': 'world'}, + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], }] }, {})) doc.save() @@ -686,3 +683,7 @@ class DeltaTest(unittest.TestCase): doc.list_field = [] self.assertEqual(doc._get_changed_fields(), ['list_field']) self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index ef27917..d879b54 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -1,4 +1,7 @@ import unittest +import sys + +sys.path[0:0] = [""] from mongoengine import * from mongoengine.connection import get_db @@ -161,7 +164,7 @@ class DynamicTest(unittest.TestCase): embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 - self.assertEqual(doc.to_mongo(), {"_cls": "Doc", + self.assertEqual(doc.to_mongo(), { "embedded_field": { "_cls": "Embedded", "string_field": "hello", @@ -205,7 +208,7 @@ class DynamicTest(unittest.TestCase): embedded_1.list_field = ['1', 2, embedded_2] doc.embedded_field = embedded_1 - self.assertEqual(doc.to_mongo(), {"_cls": "Doc", + self.assertEqual(doc.to_mongo(), { "embedded_field": { "_cls": "Embedded", "string_field": "hello", @@ -246,7 +249,6 @@ class DynamicTest(unittest.TestCase): class Person(DynamicDocument): name = StringField() - meta = {'allow_inheritance': True} Person.drop_collection() @@ -268,3 +270,7 @@ class DynamicTest(unittest.TestCase): person.age = 35 person.save() self.assertEqual(Person.objects.first().age, 35) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index d269ac0..08e2904 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -203,7 +203,6 @@ class InheritanceTest(unittest.TestCase): class Animal(Document): name = StringField() - meta = {'allow_inheritance': False} def create_dog_class(): class Dog(Animal): @@ -258,7 +257,6 @@ class InheritanceTest(unittest.TestCase): class Comment(EmbeddedDocument): content = StringField() - meta = {'allow_inheritance': False} def create_special_comment(): class SpecialComment(Comment): diff --git a/tests/document/instance.py b/tests/document/instance.py index 95f37d9..fcc43ba 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1,24 +1,22 @@ # -*- coding: utf-8 -*- from __future__ import with_statement +import sys +sys.path[0:0] = [""] + import bson import os import pickle -import pymongo -import sys import unittest import uuid -import warnings -from nose.plugins.skip import SkipTest from datetime import datetime - -from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest +from tests.fixtures import PickleEmbedded, PickleTest from mongoengine import * from mongoengine.errors import (NotRegistered, InvalidDocumentError, InvalidQueryError) from mongoengine.queryset import NULLIFY, Q -from mongoengine.connection import get_db, get_connection +from mongoengine.connection import get_db TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') @@ -461,7 +459,7 @@ class InstanceTest(unittest.TestCase): doc.validate() keys = doc._data.keys() self.assertEqual(2, len(keys)) - self.assertTrue(None in keys) + self.assertTrue('id' in keys) self.assertTrue('e' in keys) def test_save(self): @@ -656,8 +654,8 @@ class InstanceTest(unittest.TestCase): self.assertEqual(p1.name, p.parent.name) def test_update(self): - """Ensure that an existing document is updated instead of be overwritten. - """ + """Ensure that an existing document is updated instead of be + overwritten.""" # Create person object and save it to the database person = self.Person(name='Test User', age=30) person.save() @@ -753,30 +751,33 @@ class InstanceTest(unittest.TestCase): float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) - embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, default=lambda: EmbeddedDoc()) + embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, + default=lambda: EmbeddedDoc()) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=bson.ObjectId) - reference_field = ReferenceField(Simple, default=lambda: Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: + Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) - generic_reference_field = GenericReferenceField(default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) + generic_reference_field = GenericReferenceField( + default=lambda: Simple().save()) + sorted_list_field = SortedListField(IntField(), + default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) - generic_embedded_document_field = GenericEmbeddedDocumentField(default=lambda: EmbeddedDoc()) - + generic_embedded_document_field = GenericEmbeddedDocumentField( + default=lambda: EmbeddedDoc()) Simple.drop_collection() Doc.drop_collection() Doc().save() - my_doc = Doc.objects.only("string_field").first() my_doc.string_field = "string" my_doc.save() @@ -1707,9 +1708,12 @@ class InstanceTest(unittest.TestCase): peter = User.objects.create(name="Peter") # Bob - Book.objects.create(name="1", author=bob, extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) - Book.objects.create(name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()}) - Book.objects.create(name="3", author=bob, extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) + Book.objects.create(name="1", author=bob, extra={ + "a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) + Book.objects.create(name="2", author=bob, extra={ + "a": bob.to_dbref(), "b": karl.to_dbref()}) + Book.objects.create(name="3", author=bob, extra={ + "a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) Book.objects.create(name="4", author=bob) # Jon @@ -1717,23 +1721,26 @@ class InstanceTest(unittest.TestCase): Book.objects.create(name="6", author=peter) Book.objects.create(name="7", author=jon) Book.objects.create(name="8", author=jon) - Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) + Book.objects.create(name="9", author=jon, + extra={"a": peter.to_dbref()}) # Checks - self.assertEqual(u",".join([str(b) for b in Book.objects.all()]) , "1,2,3,4,5,6,7,8,9") + self.assertEqual(",".join([str(b) for b in Book.objects.all()]), + "1,2,3,4,5,6,7,8,9") # bob related books - self.assertEqual(u",".join([str(b) for b in Book.objects.filter( + self.assertEqual(",".join([str(b) for b in Book.objects.filter( Q(extra__a=bob) | Q(author=bob) | - Q(extra__b=bob))]) , + Q(extra__b=bob))]), "1,2,3,4") # Susan & Karl related books - self.assertEqual(u",".join([str(b) for b in Book.objects.filter( + self.assertEqual(",".join([str(b) for b in Book.objects.filter( Q(extra__a__all=[karl, susan]) | - Q(author__all=[karl, susan ]) | - Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()]) - ) ]) , "1") + Q(author__all=[karl, susan]) | + Q(extra__b__all=[ + karl.to_dbref(), susan.to_dbref()])) + ]), "1") # $Where self.assertEqual(u",".join([str(b) for b in Book.objects.filter( @@ -1743,7 +1750,7 @@ class InstanceTest(unittest.TestCase): return this.name == '1' || this.name == '2';}""" } - ) ]), "1,2") + )]), "1,2") class ValidatorErrorTest(unittest.TestCase): diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 7b149db..c9631eb 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -331,14 +331,10 @@ class FieldTest(unittest.TestCase): return "" % self.name Person.drop_collection() - paul = Person(name="Paul") - paul.save() - maria = Person(name="Maria") - maria.save() - julia = Person(name='Julia') - julia.save() - anna = Person(name='Anna') - anna.save() + paul = Person(name="Paul").save() + maria = Person(name="Maria").save() + julia = Person(name='Julia').save() + anna = Person(name='Anna').save() paul.other.friends = [maria, julia, anna] paul.other.name = "Paul's friends" diff --git a/tests/test_fields.py b/tests/test_fields.py index 118521f..1c13a58 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -727,7 +727,7 @@ class FieldTest(unittest.TestCase): """Ensure that the list fields can handle the complex types.""" class SettingBase(EmbeddedDocument): - pass + meta = {'allow_inheritance': True} class StringSetting(SettingBase): value = StringField() @@ -743,8 +743,9 @@ class FieldTest(unittest.TestCase): e.mapping.append(StringSetting(value='foo')) e.mapping.append(IntegerSetting(value=42)) e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001, - 'complex': IntegerSetting(value=42), 'list': - [IntegerSetting(value=42), StringSetting(value='foo')]}) + 'complex': IntegerSetting(value=42), + 'list': [IntegerSetting(value=42), + StringSetting(value='foo')]}) e.save() e2 = Simple.objects.get(id=e.id) @@ -844,7 +845,7 @@ class FieldTest(unittest.TestCase): """Ensure that the dict field can handle the complex types.""" class SettingBase(EmbeddedDocument): - pass + meta = {'allow_inheritance': True} class StringSetting(SettingBase): value = StringField() @@ -859,9 +860,11 @@ class FieldTest(unittest.TestCase): e = Simple() e.mapping['somestring'] = StringSetting(value='foo') e.mapping['someint'] = IntegerSetting(value=42) - e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', 'float': 1.001, - 'complex': IntegerSetting(value=42), 'list': - [IntegerSetting(value=42), StringSetting(value='foo')]} + e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', + 'float': 1.001, + 'complex': IntegerSetting(value=42), + 'list': [IntegerSetting(value=42), + StringSetting(value='foo')]} e.save() e2 = Simple.objects.get(id=e.id) @@ -915,7 +918,7 @@ class FieldTest(unittest.TestCase): """Ensure that the MapField can handle complex declared types.""" class SettingBase(EmbeddedDocument): - pass + meta = {"allow_inheritance": True} class StringSetting(SettingBase): value = StringField() @@ -951,7 +954,8 @@ class FieldTest(unittest.TestCase): number = IntField(default=0, db_field='i') class Test(Document): - my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x') + my_map = MapField(field=EmbeddedDocumentField(Embedded), + db_field='x') Test.drop_collection() @@ -1038,6 +1042,8 @@ class FieldTest(unittest.TestCase): class User(EmbeddedDocument): name = StringField() + meta = {'allow_inheritance': True} + class PowerUser(User): power = IntField() @@ -1046,8 +1052,10 @@ class FieldTest(unittest.TestCase): author = EmbeddedDocumentField(User) post = BlogPost(content='What I did today...') - post.author = User(name='Test User') post.author = PowerUser(name='Test User', power=47) + post.save() + + self.assertEqual(47, BlogPost.objects.first().author.power) def test_reference_validation(self): """Ensure that invalid docment objects cannot be assigned to reference @@ -2117,12 +2125,12 @@ class FieldTest(unittest.TestCase): def test_sequence_fields_reload(self): class Animal(Document): counter = SequenceField() - type = StringField() + name = StringField() self.db['mongoengine.counters'].drop() Animal.drop_collection() - a = Animal(type="Boi") + a = Animal(name="Boi") a.save() self.assertEqual(a.counter, 1) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index cdabadb..e9e78b4 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -647,7 +647,8 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(NotUniqueError, throw_operation_error_not_unique) self.assertEqual(Blog.objects.count(), 2) - Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) + Blog.objects.insert([blog2, blog3], write_options={ + 'continue_on_error': True}) self.assertEqual(Blog.objects.count(), 3) def test_get_changed_fields_query_count(self): @@ -673,7 +674,7 @@ class QuerySetTest(unittest.TestCase): r2 = Project(name="r2").save() r3 = Project(name="r3").save() p1 = Person(name="p1", projects=[r1, r2]).save() - p2 = Person(name="p2", projects=[r2]).save() + p2 = Person(name="p2", projects=[r2, r3]).save() o1 = Organization(name="o1", employees=[p1]).save() with query_counter() as q: @@ -688,24 +689,24 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 0) fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.save() + fresh_o1.save() # No changes, does nothing - self.assertEqual(q, 2) + self.assertEqual(q, 1) with query_counter() as q: self.assertEqual(q, 0) fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.save(cascade=False) + fresh_o1.save(cascade=False) # No changes, does nothing - self.assertEqual(q, 2) + self.assertEqual(q, 1) with query_counter() as q: self.assertEqual(q, 0) fresh_o1 = Organization.objects.get(id=o1.id) - fresh_o1.employees.append(p2) - fresh_o1.save(cascade=False) + fresh_o1.employees.append(p2) # Dereferences + fresh_o1.save(cascade=False) # Saves self.assertEqual(q, 3) From c31488add9340e7b4bf85e4d2507a1dac78620e2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 5 Nov 2012 11:14:02 +0000 Subject: [PATCH 010/464] Version bump --- docs/changelog.rst | 2 +- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b2a855d..aac24c6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,7 +2,7 @@ Changelog ========= -Changes in 0.7.X +Changes in 0.7.6 ================ - Unicode fix for repr (MongoEngine/mongoengine#133) - Allow updates with match operators (MongoEngine/mongoengine#144) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 9044e61..cdfbfff 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -12,7 +12,7 @@ from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) -VERSION = (0, 7, 5) +VERSION = (0, 7, 6) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 641b3de..d796f99 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.5 +Version: 0.7.6 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 7d90aa76ff7116269dea42f2c6629ea6b868b0de Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 6 Nov 2012 16:04:23 +0000 Subject: [PATCH 011/464] Add _instance to Embedded Documents Fixes MongoEngine/mongoengine#139 --- mongoengine/base/datastructures.py | 20 +++++++++++- mongoengine/base/fields.py | 4 +++ mongoengine/document.py | 2 ++ mongoengine/fields.py | 7 +++-- tests/document/instance.py | 50 ++++++++++++++++++++++++------ 5 files changed, 70 insertions(+), 13 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 9a7620e..c750b5b 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,4 +1,5 @@ import weakref +from mongoengine.common import _import_class __all__ = ("BaseDict", "BaseList") @@ -16,6 +17,14 @@ class BaseDict(dict): self._name = name return super(BaseDict, self).__init__(dict_items) + def __getitem__(self, *args, **kwargs): + value = super(BaseDict, self).__getitem__(*args, **kwargs) + + EmbeddedDocument = _import_class('EmbeddedDocument') + if isinstance(value, EmbeddedDocument) and value._instance is None: + value._instance = self._instance + return value + def __setitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).__setitem__(*args, **kwargs) @@ -75,6 +84,14 @@ class BaseList(list): self._name = name return super(BaseList, self).__init__(list_items) + def __getitem__(self, *args, **kwargs): + value = super(BaseList, self).__getitem__(*args, **kwargs) + + EmbeddedDocument = _import_class('EmbeddedDocument') + if isinstance(value, EmbeddedDocument) and value._instance is None: + value._instance = self._instance + return value + def __setitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).__setitem__(*args, **kwargs) @@ -84,7 +101,8 @@ class BaseList(list): return super(BaseList, self).__delitem__(*args, **kwargs) def __getstate__(self): - self.observer = None + self.instance = None + self._dereferenced = False return self def __setstate__(self, state): diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 00e040c..fc1a076 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -1,5 +1,6 @@ import operator import warnings +import weakref from bson import DBRef, ObjectId @@ -71,6 +72,9 @@ class BaseField(object): if callable(value): value = value() + EmbeddedDocument = _import_class('EmbeddedDocument') + if isinstance(value, EmbeddedDocument) and value._instance is None: + value._instance = weakref.proxy(instance) return value def __set__(self, instance, value): diff --git a/mongoengine/document.py b/mongoengine/document.py index 95dd624..adbdcca 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -40,6 +40,8 @@ class EmbeddedDocument(BaseDocument): my_metaclass = DocumentMetaclass __metaclass__ = DocumentMetaclass + _instance = None + def __init__(self, *args, **kwargs): super(EmbeddedDocument, self).__init__(*args, **kwargs) self._changed_fields = [] diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 15e1626..94e1155 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -625,7 +625,8 @@ class SortedListField(ListField): def to_mongo(self, value): value = super(SortedListField, self).to_mongo(value) if self._ordering is not None: - return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) + return sorted(value, key=itemgetter(self._ordering), + reverse=self._order_reverse) return sorted(value, reverse=self._order_reverse) @@ -655,7 +656,9 @@ class DictField(ComplexBaseField): self.error('Only dictionaries may be used in a DictField') if any(k for k in value.keys() if not isinstance(k, basestring)): - self.error('Invalid dictionary key - documents must have only string keys') + msg = ("Invalid dictionary key - documents must " + "have only string keys") + self.error(msg) if any(('.' in k or '$' in k) for k in value.keys()): self.error('Invalid dictionary key name - keys may not contain "."' ' or "$" characters') diff --git a/tests/document/instance.py b/tests/document/instance.py index fcc43ba..48ddc10 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -183,9 +183,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(list_stats, CompareStats.objects.first().stats) - - - def test_db_field_load(self): """Ensure we load data correctly """ @@ -214,24 +211,24 @@ class InstanceTest(unittest.TestCase): class Person(Document): name = StringField(required=True) - rank_ = EmbeddedDocumentField(Rank, required=False, db_field='rank') + rank_ = EmbeddedDocumentField(Rank, + required=False, + db_field='rank') @property def rank(self): - return self.rank_.title if self.rank_ is not None else "Private" + if self.rank_ is None: + return "Private" + return self.rank_.title Person.drop_collection() Person(name="Jack", rank_=Rank(title="Corporal")).save() - Person(name="Fred").save() self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") self.assertEqual(Person.objects.get(name="Fred").rank, "Private") - - - def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys. """ @@ -247,7 +244,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(User._meta['id_field'], 'username') def create_invalid_user(): - User(name='test').save() # no primary key field + User(name='test').save() # no primary key field self.assertRaises(ValidationError, create_invalid_user) def define_invalid_user(): @@ -424,6 +421,36 @@ class InstanceTest(unittest.TestCase): self.assertTrue('content' in Comment._fields) self.assertFalse('id' in Comment._fields) + def test_embedded_document_instance(self): + """Ensure that embedded documents can reference parent instance + """ + class Embedded(EmbeddedDocument): + string = StringField() + + class Doc(Document): + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection() + Doc(embedded_field=Embedded(string="Hi")).save() + + doc = Doc.objects.get() + self.assertEqual(doc, doc.embedded_field._instance) + + def test_embedded_document_complex_instance(self): + """Ensure that embedded documents in complex fields can reference + parent instance""" + class Embedded(EmbeddedDocument): + string = StringField() + + class Doc(Document): + embedded_field = ListField(EmbeddedDocumentField(Embedded)) + + Doc.drop_collection() + Doc(embedded_field=[Embedded(string="Hi")]).save() + + doc = Doc.objects.get() + self.assertEqual(doc, doc.embedded_field[0]._instance) + def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. """ @@ -442,6 +469,7 @@ class InstanceTest(unittest.TestCase): comment.date = datetime.now() comment.validate() + self.assertEqual(comment._instance, None) def test_embedded_db_field_validate(self): @@ -475,11 +503,13 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person_obj['age'], 30) self.assertEqual(person_obj['_id'], person.id) # Test skipping validation on save + class Recipient(Document): email = EmailField(required=True) recipient = Recipient(email='root@localhost') self.assertRaises(ValidationError, recipient.save) + try: recipient.save(validate=False) except ValidationError: From f0f1308465773545a6b3cead3abe2c1f82b2f2e8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 6 Nov 2012 16:06:54 +0000 Subject: [PATCH 012/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index ecd487f..33d22e1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) - Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) From f2049e9c1896eae2e34b9a25d6bdbf82fa8375e2 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 6 Nov 2012 18:55:13 +0000 Subject: [PATCH 013/464] Adding QuerySet(read_preference=pymongo.ReadPreference.X) and QuerySet().read_preference() method to override connection-level read_preference on a per-query basis. --- mongoengine/queryset/queryset.py | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index dd7200b..0437395 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -53,6 +53,7 @@ class QuerySet(object): self._timeout = True self._class_check = True self._slave_okay = False + self._read_preference = None self._iter = False self._scalar = [] @@ -75,7 +76,8 @@ class QuerySet(object): copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_limit', '_skip', '_slave_okay', '_hint') + '_timeout', '_limit', '_skip', '_slave_okay', '_hint', + '_read_preference') for prop in copy_props: val = getattr(self, prop) @@ -109,7 +111,8 @@ class QuerySet(object): self._collection.ensure_index(fields, **index_spec) return self - def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): + def __call__(self, q_obj=None, class_check=True, slave_okay=False, read_preference=None, + **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. @@ -121,6 +124,8 @@ class QuerySet(object): querying collection :param slave_okay: if True, allows this query to be run against a replica secondary. + :params read_preference: if set, overrides connection-level + read_preference from `ReplicaSetConnection`. :param query: Django-style query keyword arguments """ query = Q(**query) @@ -129,6 +134,8 @@ class QuerySet(object): self._query_obj &= query self._mongo_query = None self._cursor_obj = None + if read_preference is not None: + self._read_preference = read_preference self._class_check = class_check return self @@ -229,8 +236,10 @@ class QuerySet(object): cursor_args = { 'snapshot': self._snapshot, 'timeout': self._timeout, - 'slave_okay': self._slave_okay + 'slave_okay': self._slave_okay, } + if self._read_preference is not None: + cursor_args['read_preference'] = self._read_preference if self._loaded_fields: cursor_args['fields'] = self._loaded_fields.as_dict() return cursor_args @@ -802,6 +811,15 @@ class QuerySet(object): self._slave_okay = enabled return self + def read_preference(self, read_preference): + """Change the read_preference when querying. + + :param read_preference: override ReplicaSetConnection-level + preference. + """ + self._read_preference = read_preference + return self + def delete(self, safe=False): """Delete the documents matched by the query. From 7073b9d395429a753f95ecdb26decb4344784691 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 6 Nov 2012 18:55:14 +0000 Subject: [PATCH 014/464] Added validation and tests --- docs/changelog.rst | 1 + docs/guide/connecting.rst | 6 ++ mongoengine/queryset/queryset.py | 118 ++++++++++++++++++------------- tests/test_queryset.py | 16 +++++ 4 files changed, 93 insertions(+), 48 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 33d22e1..5ea1e4f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Added support setting for read prefrence at a query level (MongoEngine/mongoengine#157) - Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) - Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index bc45dbf..657c46c 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -33,6 +33,12 @@ MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnec to use them please use a URI style connection and provide the `replicaSet` name in the connection kwargs. +Read preferences are supported throught the connection or via individual +queries by passing the read_preference :: + + Bar.objects().read_preference(ReadPreference.PRIMARY) + Bar.objects(read_preference=ReadPreference.PRIMARY) + Multiple Databases ================== diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 0437395..cf4b4f8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -6,6 +6,7 @@ import operator import pymongo from bson.code import Code +from pymongo.common import validate_read_preference from mongoengine import signals from mongoengine.common import _import_class @@ -68,7 +69,8 @@ class QuerySet(object): self._hint = -1 # Using -1 as None is a valid value for hint def clone(self): - """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` + """Creates a copy of the current + :class:`~mongoengine.queryset.QuerySet` .. versionadded:: 0.5 """ @@ -111,8 +113,8 @@ class QuerySet(object): self._collection.ensure_index(fields, **index_spec) return self - def __call__(self, q_obj=None, class_check=True, slave_okay=False, read_preference=None, - **query): + def __call__(self, q_obj=None, class_check=True, slave_okay=False, + read_preference=None, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. @@ -124,7 +126,7 @@ class QuerySet(object): querying collection :param slave_okay: if True, allows this query to be run against a replica secondary. - :params read_preference: if set, overrides connection-level + :params read_preference: if set, overrides connection-level read_preference from `ReplicaSetConnection`. :param query: Django-style query keyword arguments """ @@ -135,7 +137,7 @@ class QuerySet(object): self._mongo_query = None self._cursor_obj = None if read_preference is not None: - self._read_preference = read_preference + self.read_preference(read_preference) self._class_check = class_check return self @@ -282,39 +284,43 @@ class QuerySet(object): self.limit(2) self.__call__(*q_objs, **query) try: - result1 = self.next() + result = self.next() except StopIteration: - raise self._document.DoesNotExist("%s matching query does not exist." - % self._document._class_name) + msg = ("%s matching query does not exist." + % self._document._class_name) + raise self._document.DoesNotExist(msg) try: - result2 = self.next() + self.next() except StopIteration: - return result1 + return result self.rewind() message = u'%d items returned, instead of 1' % self.count() raise self._document.MultipleObjectsReturned(message) - def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query): - """Retrieve unique object or create, if it doesn't exist. Returns a tuple of - ``(object, created)``, where ``object`` is the retrieved or created object - and ``created`` is a boolean specifying whether a new object was created. Raises + def get_or_create(self, write_options=None, auto_save=True, + *q_objs, **query): + """Retrieve unique object or create, if it doesn't exist. Returns a + tuple of ``(object, created)``, where ``object`` is the retrieved or + created object and ``created`` is a boolean specifying whether a new + object was created. Raises :class:`~mongoengine.queryset.MultipleObjectsReturned` or `DocumentName.MultipleObjectsReturned` if multiple results are found. A new document will be created if the document doesn't exists; a dictionary of default values for the new document may be provided as a keyword argument called :attr:`defaults`. - .. note:: This requires two separate operations and therefore a - race condition exists. Because there are no transactions in mongoDB - other approaches should be investigated, to ensure you don't - accidently duplicate data when using this method. + .. warning:: This requires two separate operations and therefore a + race condition exists. Because there are no transactions in + mongoDB other approaches should be investigated, to ensure you + don't accidently duplicate data when using this method. :param write_options: optional extra keyword arguments used if we have to create a new document. Passes any write_options onto :meth:`~mongoengine.Document.save` - :param auto_save: if the object is to be saved automatically if not found. + :param auto_save: if the object is to be saved automatically if + not found. .. versionchanged:: 0.6 - added `auto_save` .. versionadded:: 0.3 @@ -352,21 +358,24 @@ class QuerySet(object): result = None return result - def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None): + def insert(self, doc_or_docs, load_bulk=True, safe=False, + write_options=None): """bulk insert documents If ``safe=True`` and the operation is unsuccessful, an :class:`~mongoengine.OperationError` will be raised. :param docs_or_doc: a document or list of documents to be inserted - :param load_bulk (optional): If True returns the list of document instances + :param load_bulk (optional): If True returns the list of document + instances :param safe: check if the operation succeeded before returning :param write_options: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.insert` - which will be used as options for the resultant ``getLastError`` command. - For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two - servers have recorded the write and will force an fsync on each server being - written to. + which will be used as options for the resultant + ``getLastError`` command. For example, + ``insert(..., {w: 2, fsync: True})`` will wait until at least + two servers have recorded the write and will force an fsync on + each server being written to. By default returns document instances, set ``load_bulk`` to False to return just ``ObjectIds`` @@ -388,7 +397,8 @@ class QuerySet(object): raw = [] for doc in docs: if not isinstance(doc, self._document): - msg = "Some documents inserted aren't instances of %s" % str(self._document) + msg = ("Some documents inserted aren't instances of %s" + % str(self._document)) raise OperationError(msg) if doc.pk: msg = "Some documents have ObjectIds use doc.update() instead" @@ -429,7 +439,8 @@ class QuerySet(object): .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set """ if not self._query_obj.empty: - raise InvalidQueryError("Cannot use a filter whilst using `with_id`") + msg = "Cannot use a filter whilst using `with_id`" + raise InvalidQueryError(msg) return self.filter(pk=object_id).first() def in_bulk(self, object_ids): @@ -503,9 +514,9 @@ class QuerySet(object): :param reduce_f: reduce function, as :class:`~bson.code.Code` or string :param output: output collection name, if set to 'inline' will try to - use :class:`~pymongo.collection.Collection.inline_map_reduce` - This can also be a dictionary containing output options - see: http://docs.mongodb.org/manual/reference/commands/#mapReduce + use :class:`~pymongo.collection.Collection.inline_map_reduce` + This can also be a dictionary containing output options + see: http://docs.mongodb.org/manual/reference/commands/#mapReduce :param finalize_f: finalize function, an optional function that performs any post-reduction processing. :param scope: values to insert into map/reduce global scope. Optional. @@ -568,7 +579,8 @@ class QuerySet(object): map_reduce_function = 'map_reduce' mr_args['out'] = output - results = getattr(self._collection, map_reduce_function)(map_f, reduce_f, **mr_args) + results = getattr(self._collection, map_reduce_function)( + map_f, reduce_f, **mr_args) if map_reduce_function == 'map_reduce': results = results.find() @@ -609,9 +621,9 @@ class QuerySet(object): """Added 'hint' support, telling Mongo the proper index to use for the query. - Judicious use of hints can greatly improve query performance. When doing - a query on multiple fields (at least one of which is indexed) pass the - indexed field as a hint to the query. + Judicious use of hints can greatly improve query performance. When + doing a query on multiple fields (at least one of which is indexed) + pass the indexed field as a hint to the query. Hinting will not do anything if the corresponding index does not exist. The last hint applied to this cursor takes precedence over all others. @@ -695,9 +707,9 @@ class QuerySet(object): Retrieving a Subrange of Array Elements: You can use the $slice operator to retrieve a subrange of elements in - an array :: + an array. For example to get the first 5 comments:: - post = BlogPost.objects(...).fields(slice__comments=5) // first 5 comments + post = BlogPost.objects(...).fields(slice__comments=5) :param kwargs: A dictionary identifying what to include @@ -724,9 +736,10 @@ class QuerySet(object): return self def all_fields(self): - """Include all fields. Reset all previously calls of .only() and .exclude(). :: + """Include all fields. Reset all previously calls of .only() or + .exclude(). :: - post = BlogPost.objects(...).exclude("comments").only("title").all_fields() + post = BlogPost.objects.exclude("comments").all_fields() .. versionadded:: 0.5 """ @@ -817,6 +830,7 @@ class QuerySet(object): :param read_preference: override ReplicaSetConnection-level preference. """ + validate_read_preference('read_preference', read_preference) self._read_preference = read_preference return self @@ -839,9 +853,10 @@ class QuerySet(object): for rule_entry in delete_rules: document_cls, field_name = rule_entry rule = doc._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: - msg = u'Could not delete document (at least %s.%s refers to it)' % \ - (document_cls.__name__, field_name) + if rule == DENY and document_cls.objects( + **{field_name + '__in': self}).count() > 0: + msg = ("Could not delete document (%s.%s refers to it)" + % (document_cls.__name__, field_name)) raise OperationError(msg) for rule_entry in delete_rules: @@ -864,13 +879,15 @@ class QuerySet(object): self._collection.remove(self._query, safe=safe) - def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update): + def update(self, safe_update=True, upsert=False, multi=True, + write_options=None, **update): """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` + :param write_options: extra keyword arguments for + :meth:`~pymongo.collection.Collection.update` .. versionadded:: 0.2 """ @@ -895,13 +912,15 @@ class QuerySet(object): raise OperationError(message) raise OperationError(u'Update failed (%s)' % unicode(err)) - def update_one(self, safe_update=True, upsert=False, write_options=None, **update): + def update_one(self, safe_update=True, upsert=False, write_options=None, + **update): """Perform an atomic update on first field matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` + :param write_options: extra keyword arguments for + :meth:`~pymongo.collection.Collection.update` :param update: Django-style update keyword arguments .. versionadded:: 0.2 @@ -970,7 +989,8 @@ class QuerySet(object): return ".".join([f.db_field for f in fields]) code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, code) + code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, + code) return code def exec_js(self, code, *fields, **options): @@ -1094,7 +1114,8 @@ class QuerySet(object): } """) - for result in self.map_reduce(map_func, reduce_func, finalize_f=finalize_func, output='inline'): + for result in self.map_reduce(map_func, reduce_func, + finalize_f=finalize_func, output='inline'): return result.value else: return 0 @@ -1122,7 +1143,8 @@ class QuerySet(object): document lookups """ if map_reduce: - return self._item_frequencies_map_reduce(field, normalize=normalize) + return self._item_frequencies_map_reduce(field, + normalize=normalize) return self._item_frequencies_exec_js(field, normalize=normalize) def _item_frequencies_map_reduce(self, field, normalize=False): diff --git a/tests/test_queryset.py b/tests/test_queryset.py index e9e78b4..dcb2524 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -1,9 +1,13 @@ from __future__ import with_statement +import sys +sys.path[0:0] = [""] import unittest from datetime import datetime, timedelta import pymongo +from pymongo.errors import ConfigurationError +from pymongo.read_preferences import ReadPreference from bson import ObjectId @@ -3648,6 +3652,18 @@ class QueryFieldListTest(unittest.TestCase): ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) + def test_read_preference(self): + class Bar(Document): + pass + + Bar.drop_collection() + bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY)) + self.assertEqual([], bars) + + self.assertRaises(ConfigurationError, Bar.objects, + read_preference='Primary') + + if __name__ == '__main__': unittest.main() From 1986e82783ba7432728ee1a21a45dae7d8971d34 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 12:12:28 +0000 Subject: [PATCH 015/464] Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) --- docs/changelog.rst | 1 + docs/guide/document-instances.rst | 28 ++++++++++++ mongoengine/base/document.py | 46 +++++++++++++++----- mongoengine/base/fields.py | 6 +-- mongoengine/common.py | 4 +- mongoengine/document.py | 10 +++-- mongoengine/fields.py | 8 ++-- tests/document/instance.py | 72 ++++++++++++++++++++++++++++++- 8 files changed, 150 insertions(+), 25 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5ea1e4f..ca18d3e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) - Added support setting for read prefrence at a query level (MongoEngine/mongoengine#157) - Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) - Inheritance is off by default (MongoEngine/mongoengine#122) diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index 54fa804..b3bf687 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -38,6 +38,34 @@ already exist, then any changes will be updated atomically. For example:: .. seealso:: :ref:`guide-atomic-updates` +Pre save data validation and cleaning +------------------------------------- +MongoEngine allows you to create custom cleaning rules for your documents when +calling :meth:`~mongoengine.Document.save`. By providing a custom +:meth:`~mongoengine.Document.clean` method you can do any pre validation / data +cleaning. + +This might be useful if you want to ensure a default value based on other +document values for example:: + + class Essay(Document): + status = StringField(choices=('Published', 'Draft'), required=True) + pub_date = DateTimeField() + + def clean(self): + """Ensures that only published essays have a `pub_date` and + automatically sets the pub_date if published and not set""" + if self.status == 'Draft' and self.pub_date is not None: + msg = 'Draft entries should not have a publication date.' + raise ValidationError(msg) + # Set the pub_date for published items if not set. + if self.status == 'Published' and self.pub_date is None: + self.pub_date = datetime.now() + +.. note:: + Cleaning is only called if validation is turned on and when calling +:meth:`~mongoengine.Document.save`. + Cascading Saves --------------- If your document contains :class:`~mongoengine.ReferenceField` or diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index bc509af..46f5320 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -15,7 +15,9 @@ from .common import get_document, ALLOW_INHERITANCE from .datastructures import BaseDict, BaseList from .fields import ComplexBaseField -__all__ = ('BaseDocument', ) +__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') + +NON_FIELD_ERRORS = '__all__' class BaseDocument(object): @@ -82,11 +84,6 @@ class BaseDocument(object): if hasattr(self, '_changed_fields'): self._mark_as_changed(name) - # Check if the user has created a new instance of a class - if (self._is_document and self._initialised - and self._created and name == self._meta['id_field']): - super(BaseDocument, self).__setattr__('_created', False) - if (self._is_document and not self._created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): @@ -94,6 +91,11 @@ class BaseDocument(object): msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) + # Check if the user has created a new instance of a class + if (self._is_document and self._initialised + and self._created and name == self._meta['id_field']): + super(BaseDocument, self).__setattr__('_created', False) + super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): @@ -171,6 +173,16 @@ class BaseDocument(object): else: return hash(self.pk) + def clean(self): + """ + Hook for doing document level data cleaning before validation is run. + + Any ValidationError raised by this method will not be associated with + a particular field; it will have a special-case association with the + field defined by NON_FIELD_ERRORS. + """ + pass + def to_mongo(self): """Return data dictionary ready for use with MongoDB. """ @@ -203,20 +215,33 @@ class BaseDocument(object): data[name] = field.to_mongo(self._data.get(name, None)) return data - def validate(self): + def validate(self, clean=True): """Ensure that all fields' values are valid and that required fields are present. """ + # Ensure that each field is matched to a valid value + errors = {} + if clean: + try: + self.clean() + except ValidationError, error: + errors[NON_FIELD_ERRORS] = error + # Get a list of tuples of field names and their current values fields = [(field, self._data.get(name)) for name, field in self._fields.items()] - # Ensure that each field is matched to a valid value - errors = {} + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") + for field, value in fields: if value is not None: try: - field._validate(value) + if isinstance(field, (EmbeddedDocumentField, + GenericEmbeddedDocumentField)): + field._validate(value, clean=clean) + else: + field._validate(value) except ValidationError, error: errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError), error: @@ -224,6 +249,7 @@ class BaseDocument(object): elif field.required and not getattr(field, '_auto_gen', False): errors[field.name] = ValidationError('Field is required', field_name=field.name) + if errors: raise ValidationError('ValidationError', errors=errors) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index fc1a076..11719b5 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -105,12 +105,12 @@ class BaseField(object): """ return value - def validate(self, value): + def validate(self, value, clean=True): """Perform validation on a value. """ pass - def _validate(self, value): + def _validate(self, value, **kwargs): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') # check choices @@ -138,7 +138,7 @@ class BaseField(object): raise ValueError('validation argument for "%s" must be a ' 'callable.' % self.name) - self.validate(value) + self.validate(value, **kwargs) class ComplexBaseField(BaseField): diff --git a/mongoengine/common.py b/mongoengine/common.py index c284777..c76801c 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -9,8 +9,8 @@ def _import_class(cls_name): doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', 'MapReduceDocument') field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', - 'GenericReferenceField', 'GeoPointField', - 'ReferenceField', 'StringField') + 'GenericReferenceField', 'GenericEmbeddedDocumentField', + 'GeoPointField', 'ReferenceField', 'StringField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) diff --git a/mongoengine/document.py b/mongoengine/document.py index adbdcca..fcf8256 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -100,8 +100,8 @@ class Document(BaseDocument): Automatic index creation can be disabled by specifying attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to False then indexes will not be created by MongoEngine. This is useful in - production systems where index creation is performed as part of a deployment - system. + production systems where index creation is performed as part of a + deployment system. By default, _cls will be added to the start of every index (that doesn't contain a list) if allow_inheritance is True. This can be @@ -165,7 +165,7 @@ class Document(BaseDocument): cls._collection = db[collection_name] return cls._collection - def save(self, safe=True, force_insert=False, validate=True, + def save(self, safe=True, force_insert=False, validate=True, clean=True, write_options=None, cascade=None, cascade_kwargs=None, _refs=None): """Save the :class:`~mongoengine.Document` to the database. If the @@ -179,6 +179,8 @@ class Document(BaseDocument): :param force_insert: only try to create a new document, don't allow updates of existing documents :param validate: validates the document; set to ``False`` to skip. + :param clean: call the document clean method, requires `validate` to be + True. :param write_options: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.save` OR :meth:`~pymongo.collection.Collection.insert` @@ -208,7 +210,7 @@ class Document(BaseDocument): signals.pre_save.send(self.__class__, document=self) if validate: - self.validate() + self.validate(clean=clean) if not write_options: write_options = {} diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 94e1155..8aa7f64 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -461,7 +461,7 @@ class EmbeddedDocumentField(BaseField): return value return self.document_type.to_mongo(value) - def validate(self, value): + def validate(self, value, clean=True): """Make sure that the document instance is an instance of the EmbeddedDocument subclass provided when the document was defined. """ @@ -469,7 +469,7 @@ class EmbeddedDocumentField(BaseField): if not isinstance(value, self.document_type): self.error('Invalid embedded document instance provided to an ' 'EmbeddedDocumentField') - self.document_type.validate(value) + self.document_type.validate(value, clean) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -499,12 +499,12 @@ class GenericEmbeddedDocumentField(BaseField): return value - def validate(self, value): + def validate(self, value, clean=True): if not isinstance(value, EmbeddedDocument): self.error('Invalid embedded document instance provided to an ' 'GenericEmbeddedDocumentField') - value.validate() + value.validate(clean=clean) def to_mongo(self, document): if document is None: diff --git a/tests/document/instance.py b/tests/document/instance.py index 48ddc10..2e07eb2 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -490,6 +490,76 @@ class InstanceTest(unittest.TestCase): self.assertTrue('id' in keys) self.assertTrue('e' in keys) + def test_document_clean(self): + class TestDocument(Document): + status = StringField() + pub_date = DateTimeField() + + def clean(self): + if self.status == 'draft' and self.pub_date is not None: + msg = 'Draft entries may not have a publication date.' + raise ValidationError(msg) + # Set the pub_date for published items if not set. + if self.status == 'published' and self.pub_date is None: + self.pub_date = datetime.now() + + TestDocument.drop_collection() + + t = TestDocument(status="draft", pub_date=datetime.now()) + + try: + t.save() + except ValidationError, e: + expect_msg = "Draft entries may not have a publication date." + self.assertTrue(expect_msg in e.message) + self.assertEqual(e.to_dict(), {'__all__': expect_msg}) + + t = TestDocument(status="published") + t.save(clean=False) + + self.assertEquals(t.pub_date, None) + + t = TestDocument(status="published") + t.save(clean=True) + + self.assertEquals(type(t.pub_date), datetime) + + def test_document_embedded_clean(self): + class TestEmbeddedDocument(EmbeddedDocument): + x = IntField(required=True) + y = IntField(required=True) + z = IntField(required=True) + + meta = {'allow_inheritance': False} + + def clean(self): + if self.z: + if self.z != self.x + self.y: + raise ValidationError('Value of z != x + y') + else: + self.z = self.x + self.y + + class TestDocument(Document): + doc = EmbeddedDocumentField(TestEmbeddedDocument) + status = StringField() + + TestDocument.drop_collection() + + t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) + try: + t.save() + except ValidationError, e: + expect_msg = "Value of z != x + y" + self.assertTrue(expect_msg in e.message) + self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) + + t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() + self.assertEquals(t.doc.z, 35) + + # Asserts not raises + t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5)) + t.save(clean=False) + def test_save(self): """Ensure that a document may be saved in the database. """ @@ -1935,7 +2005,5 @@ class ValidatorErrorTest(unittest.TestCase): self.assertRaises(OperationError, change_shard_key) - - if __name__ == '__main__': unittest.main() From 99fe1da34564b29cb2505fe938b09ca4253a884c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 13:20:34 +0000 Subject: [PATCH 016/464] Add value_decorator into SequenceField Allows post processing of the calculated counter value. --- docs/changelog.rst | 1 + docs/upgrade.rst | 7 +++++++ mongoengine/fields.py | 33 ++++++++++++++++++--------------- tests/test_fields.py | 24 ++++++++++++++++++++++++ 4 files changed, 50 insertions(+), 15 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ca18d3e..550cc8d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Updated SequenceFields to allow post processing of the calculated counter value (MongoEngine/mongoengine#141) - Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) - Added support setting for read prefrence at a query level (MongoEngine/mongoengine#157) - Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index bf0a842..daf0912 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -57,6 +57,13 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: meta = {'allow_inheritance': True} +SequenceFields +-------------- + +:class:`~mongoengine.fields.SequenceField`s now inherit from `BaseField` to +allow flexible storage of the calculated value. As such MIN and MAX settings +are no longer handled. + 0.6 to 0.7 ========== diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 8aa7f64..e2ce33c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1329,7 +1329,7 @@ class GeoPointField(BaseField): self.error('Both values in point must be float or int') -class SequenceField(IntField): +class SequenceField(BaseField): """Provides a sequental counter see: http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers @@ -1341,15 +1341,26 @@ class SequenceField(IntField): cluster of machines, it is easier to create an object ID than have global, uniformly increasing sequence numbers. + Use any callable as `value_decorator` to transform calculated counter into + any value suitable for your needs, e.g. string or hexadecimal + representation of the default integer counter value. + .. versionadded:: 0.5 + + .. versionchanged:: 0.8 added `value_decorator` """ + _auto_gen = True + COLLECTION_NAME = 'mongoengine.counters' + VALUE_DECORATOR = int def __init__(self, collection_name=None, db_alias=None, - sequence_name=None, *args, **kwargs): - self.collection_name = collection_name or 'mongoengine.counters' + sequence_name=None, value_decorator=None, *args, **kwargs): + self.collection_name = collection_name or self.COLLECTION_NAME self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name + self.value_decorator = (callable(value_decorator) and + value_decorator or self.VALUE_DECORATOR) return super(SequenceField, self).__init__(*args, **kwargs) def generate(self): @@ -1364,24 +1375,16 @@ class SequenceField(IntField): update={"$inc": {"next": 1}}, new=True, upsert=True) - return counter['next'] + return self.value_decorator(counter['next']) def __get__(self, instance, owner): - - if instance is None: - return self - - if not instance._data: - return - - value = instance._data.get(self.name) - - if not value and instance._initialised: + value = super(SequenceField, self).__get__(instance, owner) + if value is None and instance._initialised: value = self.generate() instance._data[self.name] = value instance._mark_as_changed(self.name) - return int(value) if value else None + return value def __set__(self, instance, value): diff --git a/tests/test_fields.py b/tests/test_fields.py index 1c13a58..f1a36ed 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- from __future__ import with_statement +import sys +sys.path[0:0] = [""] + import datetime import os import unittest @@ -2184,6 +2187,27 @@ class FieldTest(unittest.TestCase): c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) self.assertEqual(c['next'], 10) + def test_sequence_field_value_decorator(self): + class Person(Document): + id = SequenceField(primary_key=True, value_decorator=str) + name = StringField() + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + + for x in xrange(10): + p = Person(name="Person %s" % x) + p.save() + + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + ids = [i.id for i in Person.objects] + self.assertEqual(ids, map(str, range(1, 11))) + + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() From 9ca96e4e17f4d954abd4163c5a3e13b0ff094f96 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 13:51:02 +0000 Subject: [PATCH 017/464] Added none() to queryset (MongoEngine/mongoengine#127) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 4 ++++ tests/test_queryset.py | 8 ++++++++ 3 files changed, 13 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 550cc8d..9bd822b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Added none() to queryset (MongoEngine/mongoengine#127) - Updated SequenceFields to allow post processing of the calculated counter value (MongoEngine/mongoengine#141) - Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) - Added support setting for read prefrence at a query level (MongoEngine/mongoengine#157) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index cf4b4f8..65c71e1 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -489,6 +489,10 @@ class QuerySet(object): self._iter = False self._cursor.rewind() + def none(self): + """Helper that just returns a list""" + return [] + def count(self): """Count the selected elements in the query. """ diff --git a/tests/test_queryset.py b/tests/test_queryset.py index dcb2524..a3e64d2 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -252,6 +252,14 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() + def test_none(self): + class A(Document): + pass + + A.drop_collection() + A().save() + self.assertEqual(A.objects.none(), []) + def test_chaining(self): class A(Document): pass From 8706fbe461c1bb3f5ea8d9ee23434a6aeaf86fc5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 15:04:45 +0000 Subject: [PATCH 018/464] Updated index creation now tied to Document class ((MongoEngine/mongoengine#102) --- docs/changelog.rst | 1 + docs/upgrade.rst | 9 +- mongoengine/document.py | 83 ++++++++++++- mongoengine/queryset/queryset.py | 114 +++--------------- tests/document/indexes.py | 10 +- .../test_convert_to_new_inheritance_model.py | 2 +- tests/migration/turn_off_inheritance.py | 2 +- tests/test_queryset.py | 3 +- 8 files changed, 115 insertions(+), 109 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9bd822b..ca450f1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Updated index creation now tied to Document class ((MongoEngine/mongoengine#102) - Added none() to queryset (MongoEngine/mongoengine#127) - Updated SequenceFields to allow post processing of the calculated counter value (MongoEngine/mongoengine#141) - Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index daf0912..44c69be 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -40,7 +40,7 @@ inherited classes like so: collection.drop_index(index) # 5. Recreate indexes - Animal.objects._ensure_indexes() + Animal.ensure_indexes() Document Definition @@ -56,6 +56,13 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: meta = {'allow_inheritance': True} +Indexes +------- + +Index methods are no longer tied to querysets but rather to the document class. +Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist. +They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / +:func:`~mongoengine.Document.ensure_index`. SequenceFields -------------- diff --git a/mongoengine/document.py b/mongoengine/document.py index fcf8256..cda3a9c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -7,7 +7,7 @@ from bson.dbref import DBRef from mongoengine import signals, queryset from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, - BaseDict, BaseList) + BaseDict, BaseList, ALLOW_INHERITANCE) from queryset import OperationError, NotUniqueError from connection import get_db, DEFAULT_CONNECTION_NAME @@ -163,6 +163,8 @@ class Document(BaseDocument): ) else: cls._collection = db[collection_name] + if cls._meta.get('auto_create_index', True): + cls.ensure_indexes() return cls._collection def save(self, safe=True, force_insert=False, validate=True, clean=True, @@ -418,9 +420,86 @@ class Document(BaseDocument): """Drops the entire collection associated with this :class:`~mongoengine.Document` type from the database. """ + cls._collection = None db = cls._get_db() db.drop_collection(cls._get_collection_name()) - queryset.QuerySet._reset_already_indexed(cls) + + @classmethod + def ensure_index(cls, key_or_list, drop_dups=False, background=False, + **kwargs): + """Ensure that the given indexes are in place. + + :param key_or_list: a single index key or a list of index keys (to + construct a multi-field index); keys may be prefixed with a **+** + or a **-** to determine the index ordering + """ + index_spec = cls._build_index_spec(key_or_list) + index_spec = index_spec.copy() + fields = index_spec.pop('fields') + index_spec['drop_dups'] = drop_dups + index_spec['background'] = background + index_spec.update(kwargs) + + return cls._get_collection().ensure_index(fields, **index_spec) + + @classmethod + def ensure_indexes(cls): + """Checks the document meta data and ensures all the indexes exist. + + .. note:: You can disable automatic index creation by setting + `auto_create_index` to False in the documents meta data + """ + background = cls._meta.get('index_background', False) + drop_dups = cls._meta.get('index_drop_dups', False) + index_opts = cls._meta.get('index_opts') or {} + index_cls = cls._meta.get('index_cls', True) + + collection = cls._get_collection() + + # determine if an index which we are creating includes + # _cls as its first field; if so, we can avoid creating + # an extra index on _cls, as mongodb will use the existing + # index to service queries against _cls + cls_indexed = False + + def includes_cls(fields): + first_field = None + if len(fields): + if isinstance(fields[0], basestring): + first_field = fields[0] + elif isinstance(fields[0], (list, tuple)) and len(fields[0]): + first_field = fields[0][0] + return first_field == '_cls' + + # Ensure indexes created by uniqueness constraints + for index in cls._meta['unique_indexes']: + cls_indexed = cls_indexed or includes_cls(index) + collection.ensure_index(index, unique=True, background=background, + drop_dups=drop_dups, **index_opts) + + # Ensure document-defined indexes are created + if cls._meta['index_specs']: + index_spec = cls._meta['index_specs'] + for spec in index_spec: + spec = spec.copy() + fields = spec.pop('fields') + cls_indexed = cls_indexed or includes_cls(fields) + opts = index_opts.copy() + opts.update(spec) + collection.ensure_index(fields, background=background, **opts) + + # If _cls is being used (for polymorphism), it needs an index, + # only if another index doesn't begin with _cls + if (index_cls and not cls_indexed and + cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): + collection.ensure_index('_cls', background=background, + **index_opts) + + # Add geo indicies + for field in cls._geo_indices(): + index_spec = [(field.db_field, pymongo.GEO2D)] + collection.ensure_index(index_spec, background=background, + **index_opts) class DynamicDocument(Document): diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 65c71e1..1122123 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,11 +1,12 @@ -import pprint -import re import copy import itertools import operator +import pprint +import re +import warnings -import pymongo from bson.code import Code +import pymongo from pymongo.common import validate_read_preference from mongoengine import signals @@ -37,8 +38,6 @@ class QuerySet(object): """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ - - __already_indexed = set() __dereference = False def __init__(self, document, collection): @@ -95,24 +94,6 @@ class QuerySet(object): self._mongo_query.update(self._initial_query) return self._mongo_query - def ensure_index(self, key_or_list, drop_dups=False, background=False, - **kwargs): - """Ensure that the given indexes are in place. - - :param key_or_list: a single index key or a list of index keys (to - construct a multi-field index); keys may be prefixed with a **+** - or a **-** to determine the index ordering - """ - index_spec = self._document._build_index_spec(key_or_list) - index_spec = index_spec.copy() - fields = index_spec.pop('fields') - index_spec['drop_dups'] = drop_dups - index_spec['background'] = background - index_spec.update(kwargs) - - self._collection.ensure_index(fields, **index_spec) - return self - def __call__(self, q_obj=None, class_check=True, slave_okay=False, read_preference=None, **query): """Filter the selected documents by calling the @@ -150,87 +131,26 @@ class QuerySet(object): """Returns all documents.""" return self.__call__() + def ensure_index(self, **kwargs): + """Deprecated use :func:`~Document.ensure_index`""" + msg = ("Doc.objects()._ensure_index() is deprecated. " + "Use Doc.ensure_index() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_index(**kwargs) + return self + def _ensure_indexes(self): - """Checks the document meta data and ensures all the indexes exist. - - .. note:: You can disable automatic index creation by setting - `auto_create_index` to False in the documents meta data - """ - background = self._document._meta.get('index_background', False) - drop_dups = self._document._meta.get('index_drop_dups', False) - index_opts = self._document._meta.get('index_opts') or {} - index_cls = self._document._meta.get('index_cls', True) - - # determine if an index which we are creating includes - # _cls as its first field; if so, we can avoid creating - # an extra index on _cls, as mongodb will use the existing - # index to service queries against _cls - cls_indexed = False - - def includes_cls(fields): - first_field = None - if len(fields): - if isinstance(fields[0], basestring): - first_field = fields[0] - elif isinstance(fields[0], (list, tuple)) and len(fields[0]): - first_field = fields[0][0] - return first_field == '_cls' - - # Ensure indexes created by uniqueness constraints - for index in self._document._meta['unique_indexes']: - cls_indexed = cls_indexed or includes_cls(index) - self._collection.ensure_index(index, unique=True, - background=background, drop_dups=drop_dups, **index_opts) - - # Ensure document-defined indexes are created - if self._document._meta['index_specs']: - index_spec = self._document._meta['index_specs'] - for spec in index_spec: - spec = spec.copy() - fields = spec.pop('fields') - cls_indexed = cls_indexed or includes_cls(fields) - opts = index_opts.copy() - opts.update(spec) - self._collection.ensure_index(fields, - background=background, **opts) - - # If _cls is being used (for polymorphism), it needs an index, - # only if another index doesn't begin with _cls - if index_cls and '_cls' in self._query and not cls_indexed: - self._collection.ensure_index('_cls', - background=background, **index_opts) - - # Add geo indicies - for field in self._document._geo_indices(): - index_spec = [(field.db_field, pymongo.GEO2D)] - self._collection.ensure_index(index_spec, - background=background, **index_opts) - - @classmethod - def _reset_already_indexed(cls, document=None): - """Helper to reset already indexed, can be useful for testing purposes - """ - if document: - cls.__already_indexed.discard(document) - cls.__already_indexed.clear() + """Deprecated use :func:`~Document.ensure_indexes`""" + msg = ("Doc.objects()._ensure_indexes() is deprecated. " + "Use Doc.ensure_indexes() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_indexes() @property def _collection(self): """Property that returns the collection object. This allows us to perform operations only if the collection is accessed. """ - if self._document not in QuerySet.__already_indexed: - # Ensure collection exists - db = self._document._get_db() - if self._collection_obj.name not in db.collection_names(): - self._document._collection = None - self._collection_obj = self._document._get_collection() - - QuerySet.__already_indexed.add(self._document) - - if self._document._meta.get('auto_create_index', True): - self._ensure_indexes() - return self._collection_obj @property diff --git a/tests/document/indexes.py b/tests/document/indexes.py index a6b74cd..8f83afc 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -80,7 +80,7 @@ class InstanceTest(unittest.TestCase): ('addDate', -1)]}] self.assertEqual(expected_specs, BlogPost._meta['index_specs']) - BlogPost.objects._ensure_indexes() + BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') # NB: there is no index on _cls by itself, since @@ -100,7 +100,7 @@ class InstanceTest(unittest.TestCase): BlogPost.drop_collection() - ExtendedBlogPost.objects._ensure_indexes() + ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] for expected in expected_specs: @@ -141,7 +141,7 @@ class InstanceTest(unittest.TestCase): [{'fields': [('keywords', 1)]}]) # Force index creation - MyDoc.objects._ensure_indexes() + MyDoc.ensure_indexes() self.assertEqual(MyDoc._meta['index_specs'], [{'fields': [('keywords', 1)]}]) @@ -189,7 +189,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual([{'fields': [('location.point', '2d')]}], Place._meta['index_specs']) - Place.objects()._ensure_indexes() + Place.ensure_indexes() info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('location.point', '2d')] in info) @@ -335,7 +335,7 @@ class InstanceTest(unittest.TestCase): recursive_obj = EmbeddedDocumentField(RecursiveObject) meta = {'allow_inheritance': True} - RecursiveDocument.objects._ensure_indexes() + RecursiveDocument.ensure_indexes() info = RecursiveDocument._get_collection().index_information() self.assertEqual(info.keys(), ['_id_', '_cls_1']) diff --git a/tests/migration/test_convert_to_new_inheritance_model.py b/tests/migration/test_convert_to_new_inheritance_model.py index 0ef37f7..d4337bf 100644 --- a/tests/migration/test_convert_to_new_inheritance_model.py +++ b/tests/migration/test_convert_to_new_inheritance_model.py @@ -48,4 +48,4 @@ class ConvertToNewInheritanceModel(unittest.TestCase): collection.drop_index(index) # 5. Recreate indexes - Animal.objects._ensure_indexes() + Animal.ensure_indexes() diff --git a/tests/migration/turn_off_inheritance.py b/tests/migration/turn_off_inheritance.py index 5d0f7d7..ee461a8 100644 --- a/tests/migration/turn_off_inheritance.py +++ b/tests/migration/turn_off_inheritance.py @@ -59,4 +59,4 @@ class TurnOffInheritanceTest(unittest.TestCase): collection.drop_index(index) # 5. Recreate indexes - Animal.objects._ensure_indexes() + Animal.ensure_indexes() diff --git a/tests/test_queryset.py b/tests/test_queryset.py index a3e64d2..378b489 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -3078,7 +3078,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual([1, 2, 3], numbers) Number.drop_collection() - def test_ensure_index(self): """Ensure that manual creation of indexes works. """ @@ -3086,7 +3085,7 @@ class QuerySetTest(unittest.TestCase): message = StringField() meta = {'allow_inheritance': True} - Comment.objects.ensure_index('message') + Comment.ensure_index('message') info = Comment.objects._collection.index_information() info = [(value['key'], From e7c0da38c2b512fabe1e3dc885a4aaf9919bcdef Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 15:09:11 +0000 Subject: [PATCH 019/464] Better implementation for none - MongoEngine/mongoengine#127 --- mongoengine/queryset/queryset.py | 6 ++++-- tests/test_queryset.py | 4 +++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 1122123..058bdd8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -39,6 +39,7 @@ class QuerySet(object): providing :class:`~mongoengine.Document` objects as the results. """ __dereference = False + __none = False def __init__(self, document, collection): self._document = document @@ -391,7 +392,7 @@ class QuerySet(object): """ self._iter = True try: - if self._limit == 0: + if self._limit == 0 or self.__none: raise StopIteration if self._scalar: return self._get_scalar(self._document._from_son( @@ -411,7 +412,8 @@ class QuerySet(object): def none(self): """Helper that just returns a list""" - return [] + self.__none = True + return self def count(self): """Count the selected elements in the query. diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 378b489..09b6b3f 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -258,7 +258,9 @@ class QuerySetTest(unittest.TestCase): A.drop_collection() A().save() - self.assertEqual(A.objects.none(), []) + + self.assertEqual(list(A.objects.none()), []) + self.assertEqual(list(A.objects.none().all()), []) def test_chaining(self): class A(Document): From 4b45c0cd14182b3abf4b1af8cc9f2296e7bdbcda Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Nov 2012 15:15:04 +0000 Subject: [PATCH 020/464] Removed deprecation warning #55 --- mongoengine/queryset/manager.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 7376e3c..08d4d3a 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -54,8 +54,4 @@ def queryset_manager(func): function should return a :class:`~mongoengine.queryset.QuerySet`, probably the same one that was passed in, but modified in some way. """ - if func.func_code.co_argcount == 1: - import warnings - msg = 'Methods decorated with queryset_manager should take 2 arguments' - warnings.warn(msg, DeprecationWarning) return QuerySetManager(func) From b8d53a6f0d69c81e0566f5cadbe9237e1f530131 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 8 Nov 2012 12:04:14 +0000 Subject: [PATCH 021/464] Added json serialisation support - Added to_json and from_json to Document (MongoEngine/mongoengine#1) - Added to_json and from_json to QuerySet (MongoEngine/mongoengine#131) --- docs/changelog.rst | 4 +- mongoengine/base/document.py | 10 ++++ mongoengine/queryset/queryset.py | 10 ++++ tests/document/__init__.py | 5 +- tests/document/instance.py | 2 +- tests/document/json_serialisation.py | 81 ++++++++++++++++++++++++++++ tests/test_fields.py | 39 ++++++++------ tests/test_queryset.py | 70 +++++++++++++++++++++++- 8 files changed, 201 insertions(+), 20 deletions(-) create mode 100644 tests/document/json_serialisation.py diff --git a/docs/changelog.rst b/docs/changelog.rst index ca450f1..26108b5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,9 @@ Changelog Changes in 0.8 ============== -- Updated index creation now tied to Document class ((MongoEngine/mongoengine#102) +- Added to_json and from_json to Document (MongoEngine/mongoengine#1) +- Added to_json and from_json to QuerySet (MongoEngine/mongoengine#131) +- Updated index creation now tied to Document class (MongoEngine/mongoengine#102) - Added none() to queryset (MongoEngine/mongoengine#127) - Updated SequenceFields to allow post processing of the calculated counter value (MongoEngine/mongoengine#141) - Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 46f5320..939c9fb 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -2,6 +2,7 @@ import operator from functools import partial import pymongo +from bson import json_util from bson.dbref import DBRef from mongoengine import signals @@ -253,6 +254,15 @@ class BaseDocument(object): if errors: raise ValidationError('ValidationError', errors=errors) + def to_json(self): + """Converts a document to JSON""" + return json_util.dumps(self.to_mongo()) + + @classmethod + def from_json(cls, json_data): + """Converts json data to an unsaved document instance""" + return cls._from_son(json_util.loads(json_data)) + def __expand_dynamic_values(self, name, value): """expand any dynamic values to their correct types / values""" if not isinstance(value, (dict, list, tuple)): diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 058bdd8..3c44f01 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -6,6 +6,7 @@ import re import warnings from bson.code import Code +from bson import json_util import pymongo from pymongo.common import validate_read_preference @@ -1216,6 +1217,15 @@ class QuerySet(object): max_depth += 1 return self._dereference(self, max_depth=max_depth) + def to_json(self): + """Converts a queryset to JSON""" + return json_util.dumps(self._collection_obj.find(self._query)) + + def from_json(self, json_data): + """Converts json data to unsaved objects""" + son_data = json_util.loads(json_data) + return [self._document._from_son(data) for data in son_data] + @property def _dereference(self): if not self.__dereference: diff --git a/tests/document/__init__.py b/tests/document/__init__.py index 1ef2520..7774ee1 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -1,4 +1,6 @@ -# TODO EXPLICT IMPORTS +import sys +sys.path[0:0] = [""] +import unittest from class_methods import * from delta import * @@ -6,6 +8,7 @@ from dynamic import * from indexes import * from inheritance import * from instance import * +from json_serialisation import * if __name__ == '__main__': unittest.main() diff --git a/tests/document/instance.py b/tests/document/instance.py index 2e07eb2..2118575 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -346,7 +346,7 @@ class InstanceTest(unittest.TestCase): meta = {'shard_key': ('superphylum',)} Animal.drop_collection() - doc = Animal(superphylum = 'Deuterostomia') + doc = Animal(superphylum='Deuterostomia') doc.save() doc.reload() Animal.drop_collection() diff --git a/tests/document/json_serialisation.py b/tests/document/json_serialisation.py new file mode 100644 index 0000000..dbc09d8 --- /dev/null +++ b/tests/document/json_serialisation.py @@ -0,0 +1,81 @@ +import sys +sys.path[0:0] = [""] + +import unittest +import uuid + +from nose.plugins.skip import SkipTest +from datetime import datetime +from bson import ObjectId + +import pymongo + +from mongoengine import * + +__all__ = ("TestJson",) + + +class TestJson(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + def test_json_simple(self): + + class Embedded(EmbeddedDocument): + string = StringField() + + class Doc(Document): + string = StringField() + embedded_field = EmbeddedDocumentField(Embedded) + + doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) + + self.assertEqual(doc, Doc.from_json(doc.to_json())) + + def test_json_complex(self): + + if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: + raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") + + class EmbeddedDoc(EmbeddedDocument): + pass + + class Simple(Document): + pass + + class Doc(Document): + string_field = StringField(default='1') + int_field = IntField(default=1) + float_field = FloatField(default=1.1) + boolean_field = BooleanField(default=True) + datetime_field = DateTimeField(default=datetime.now) + embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, + default=lambda: EmbeddedDoc()) + list_field = ListField(default=lambda: [1, 2, 3]) + dict_field = DictField(default=lambda: {"hello": "world"}) + objectid_field = ObjectIdField(default=ObjectId) + reference_field = ReferenceField(Simple, default=lambda: + Simple().save()) + map_field = MapField(IntField(), default=lambda: {"simple": 1}) + decimal_field = DecimalField(default=1.0) + complex_datetime_field = ComplexDateTimeField(default=datetime.now) + url_field = URLField(default="http://mongoengine.org") + dynamic_field = DynamicField(default=1) + generic_reference_field = GenericReferenceField( + default=lambda: Simple().save()) + sorted_list_field = SortedListField(IntField(), + default=lambda: [1, 2, 3]) + email_field = EmailField(default="ross@example.com") + geo_point_field = GeoPointField(default=lambda: [1, 2]) + sequence_field = SequenceField() + uuid_field = UUIDField(default=uuid.uuid4) + generic_embedded_document_field = GenericEmbeddedDocumentField( + default=lambda: EmbeddedDoc()) + + doc = Doc() + self.assertEqual(doc, Doc.from_json(doc.to_json())) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_fields.py b/tests/test_fields.py index f1a36ed..69cce87 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -606,7 +606,8 @@ class FieldTest(unittest.TestCase): name = StringField() class CategoryList(Document): - categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True) + categories = SortedListField(EmbeddedDocumentField(Category), + ordering='count', reverse=True) name = StringField() catlist = CategoryList(name="Top categories") @@ -1616,8 +1617,9 @@ class FieldTest(unittest.TestCase): """Ensure that value is in a container of allowed values. """ class Shirt(Document): - size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) + size = StringField(max_length=3, choices=( + ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), + ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) Shirt.drop_collection() @@ -1633,12 +1635,15 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() def test_choices_get_field_display(self): - """Test dynamic helper for returning the display value of a choices field. + """Test dynamic helper for returning the display value of a choices + field. """ class Shirt(Document): - size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) - style = StringField(max_length=3, choices=(('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') + size = StringField(max_length=3, choices=( + ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), + ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) + style = StringField(max_length=3, choices=( + ('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') Shirt.drop_collection() @@ -1665,7 +1670,8 @@ class FieldTest(unittest.TestCase): """Ensure that value is in a container of allowed values. """ class Shirt(Document): - size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) + size = StringField(max_length=3, + choices=('S', 'M', 'L', 'XL', 'XXL')) Shirt.drop_collection() @@ -1681,11 +1687,15 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() def test_simple_choices_get_field_display(self): - """Test dynamic helper for returning the display value of a choices field. + """Test dynamic helper for returning the display value of a choices + field. """ class Shirt(Document): - size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) - style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small') + size = StringField(max_length=3, + choices=('S', 'M', 'L', 'XL', 'XXL')) + style = StringField(max_length=3, + choices=('Small', 'Baggy', 'wide'), + default='Small') Shirt.drop_collection() @@ -1736,7 +1746,7 @@ class FieldTest(unittest.TestCase): self.assertTrue(putfile == result) self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) - result.the_file.delete() # Remove file from GridFS + result.the_file.delete() # Remove file from GridFS PutFile.objects.delete() # Ensure file-like objects are stored @@ -1801,7 +1811,6 @@ class FieldTest(unittest.TestCase): the_file = FileField() DemoFile.objects.create() - def test_file_field_no_default(self): class GridDocument(Document): @@ -1817,7 +1826,6 @@ class FieldTest(unittest.TestCase): doc_a = GridDocument() doc_a.save() - doc_b = GridDocument.objects.with_id(doc_a.id) doc_b.the_file.replace(f, filename='doc_b') doc_b.save() @@ -1859,7 +1867,7 @@ class FieldTest(unittest.TestCase): # Second instance test_file_dupe = TestFile() - data = test_file_dupe.the_file.read() # Should be None + data = test_file_dupe.the_file.read() # Should be None self.assertTrue(test_file.name != test_file_dupe.name) self.assertTrue(test_file.the_file.read() != data) @@ -2328,7 +2336,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(error_dict['comments'][1]['content'], u'Field is required') - post.comments[1].content = 'here we go' post.validate() diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 09b6b3f..9dfe9a2 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -1,7 +1,10 @@ from __future__ import with_statement import sys sys.path[0:0] = [""] + import unittest +import uuid +from nose.plugins.skip import SkipTest from datetime import datetime, timedelta @@ -74,7 +77,6 @@ class QuerySetTest(unittest.TestCase): def test_generic_reference(): list(BlogPost.objects(author2__name="test")) - def test_find(self): """Ensure that a query returns a valid set of results. """ @@ -3672,6 +3674,72 @@ class QueryFieldListTest(unittest.TestCase): self.assertRaises(ConfigurationError, Bar.objects, read_preference='Primary') + def test_json_simple(self): + + class Embedded(EmbeddedDocument): + string = StringField() + + class Doc(Document): + string = StringField() + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection() + Doc(string="Hi", embedded_field=Embedded(string="Hi")).save() + Doc(string="Bye", embedded_field=Embedded(string="Bye")).save() + + Doc().save() + json_data = Doc.objects.to_json() + doc_objects = list(Doc.objects) + + self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + + def test_json_complex(self): + if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: + raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") + + class EmbeddedDoc(EmbeddedDocument): + pass + + class Simple(Document): + pass + + class Doc(Document): + string_field = StringField(default='1') + int_field = IntField(default=1) + float_field = FloatField(default=1.1) + boolean_field = BooleanField(default=True) + datetime_field = DateTimeField(default=datetime.now) + embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, + default=lambda: EmbeddedDoc()) + list_field = ListField(default=lambda: [1, 2, 3]) + dict_field = DictField(default=lambda: {"hello": "world"}) + objectid_field = ObjectIdField(default=ObjectId) + reference_field = ReferenceField(Simple, default=lambda: + Simple().save()) + map_field = MapField(IntField(), default=lambda: {"simple": 1}) + decimal_field = DecimalField(default=1.0) + complex_datetime_field = ComplexDateTimeField(default=datetime.now) + url_field = URLField(default="http://mongoengine.org") + dynamic_field = DynamicField(default=1) + generic_reference_field = GenericReferenceField( + default=lambda: Simple().save()) + sorted_list_field = SortedListField(IntField(), + default=lambda: [1, 2, 3]) + email_field = EmailField(default="ross@example.com") + geo_point_field = GeoPointField(default=lambda: [1, 2]) + sequence_field = SequenceField() + uuid_field = UUIDField(default=uuid.uuid4) + generic_embedded_document_field = GenericEmbeddedDocumentField( + default=lambda: EmbeddedDoc()) + + Simple.drop_collection() + Doc.drop_collection() + + Doc().save() + json_data = Doc.objects.to_json() + doc_objects = list(Doc.objects) + + self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) if __name__ == '__main__': From 363e50abbe1db318472de82ad583c98cef3e61c3 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 8 Nov 2012 14:46:56 +0000 Subject: [PATCH 022/464] Updated documents with embedded documents can be created in a single operation (MongoEngine/mongoengine#6) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 18 ++++++++++++++++-- mongoengine/common.py | 5 +++-- tests/document/instance.py | 36 ++++++++++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 26108b5..778a047 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Added support for creating documents with embedded documents in a single operation (MongoEngine/mongoengine#6) - Added to_json and from_json to Document (MongoEngine/mongoengine#1) - Added to_json and from_json to QuerySet (MongoEngine/mongoengine#131) - Updated index creation now tied to Document class (MongoEngine/mongoengine#102) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 939c9fb..2dd4b03 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -28,7 +28,14 @@ class BaseDocument(object): _dynamic_lock = True _initialised = False - def __init__(self, **values): + def __init__(self, __auto_convert=True, **values): + """ + Initialise a document or embedded document + + :param __auto_convert: Try and will cast python objects to Object types + :param values: A dictionary of values for the document + """ + signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} @@ -50,9 +57,16 @@ class BaseDocument(object): elif self._dynamic: dynamic_data[key] = value else: + FileField = _import_class('FileField') for key, value in values.iteritems(): key = self._reverse_db_field_map.get(key, key) + if (value is not None and __auto_convert and + key in self._fields): + field = self._fields.get(key) + if not isinstance(field, FileField): + value = field.to_python(value) setattr(self, key, value) + # Set any get_fieldname_display methods self.__set_field_display() @@ -487,7 +501,7 @@ class BaseDocument(object): % (cls._class_name, errors)) raise InvalidDocumentError(msg) - obj = cls(**data) + obj = cls(__auto_convert=False, **data) obj._changed_fields = changed_fields obj._created = False return obj diff --git a/mongoengine/common.py b/mongoengine/common.py index c76801c..a8422c0 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -9,8 +9,9 @@ def _import_class(cls_name): doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', 'MapReduceDocument') field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', - 'GenericReferenceField', 'GenericEmbeddedDocumentField', - 'GeoPointField', 'ReferenceField', 'StringField') + 'FileField', 'GenericReferenceField', + 'GenericEmbeddedDocumentField', 'GeoPointField', + 'ReferenceField', 'StringField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) diff --git a/tests/document/instance.py b/tests/document/instance.py index 2118575..8fb4fd7 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -2005,5 +2005,41 @@ class ValidatorErrorTest(unittest.TestCase): self.assertRaises(OperationError, change_shard_key) + def test_kwargs_simple(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + doc = EmbeddedDocumentField(Embedded) + + classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) + dict_doc = Doc(**{"doc_name": "my doc", + "doc": {"name": "embedded doc"}}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) + + def test_kwargs_complex(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + docs = ListField(EmbeddedDocumentField(Embedded)) + + classic_doc = Doc(doc_name="my doc", docs=[ + Embedded(name="embedded doc1"), + Embedded(name="embedded doc2")]) + dict_doc = Doc(**{"doc_name": "my doc", + "docs": [{"name": "embedded doc1"}, + {"name": "embedded doc2"}]}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) + + if __name__ == '__main__': unittest.main() From 1a93b9b2263a71f179a35760b84b5d740fdd4f57 Mon Sep 17 00:00:00 2001 From: helduel Date: Thu, 8 Nov 2012 16:30:29 +0100 Subject: [PATCH 023/464] More precise "created" keyword argument signals If a document has a user given id value, the post_save signal always got the "created" keyword argument with False value (unless force_insert is True). This patch uses the result of getlasterror to check whether the save was an update or not. --- mongoengine/document.py | 19 +++++++++++++++---- tests/test_signals.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b3afaf..694d1ed 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -233,13 +233,24 @@ class Document(BaseDocument): actual_key = self._db_field_map.get(k, k) select_dict[actual_key] = doc[actual_key] + def is_new_object(last_error): + if last_error is not None: + updated = last_error.get("updatedExisting") + if updated is not None: + return not updated + return created + upsert = self._created if updates: - collection.update(select_dict, {"$set": updates}, - upsert=upsert, safe=safe, **write_options) + last_error = collection.update(select_dict, + {"$set": updates}, upsert=upsert, safe=safe, + **write_options) + created = is_new_object(last_error) if removals: - collection.update(select_dict, {"$unset": removals}, - upsert=upsert, safe=safe, **write_options) + last_error = collection.update(select_dict, + {"$unset": removals}, upsert=upsert, safe=safe, + **write_options) + created = created or is_new_object(last_error) warn_cascade = not cascade and 'cascade' not in self._meta cascade = (self._meta.get('cascade', True) diff --git a/tests/test_signals.py b/tests/test_signals.py index d119924..2ca820d 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -108,6 +108,20 @@ class SignalTests(unittest.TestCase): signal_output.append('post_delete Another signal, %s' % document) self.Another = Another + + class ExplicitId(Document): + id = IntField(primary_key=True) + + @classmethod + def post_save(cls, sender, document, **kwargs): + if 'created' in kwargs: + if kwargs['created']: + signal_output.append('Is created') + else: + signal_output.append('Is updated') + + self.ExplicitId = ExplicitId + self.ExplicitId.objects.delete() # Save up the number of connected signals so that we can check at the end # that all the signals we register get properly unregistered self.pre_signals = ( @@ -137,6 +151,8 @@ class SignalTests(unittest.TestCase): signals.pre_delete.connect(Another.pre_delete, sender=Another) signals.post_delete.connect(Another.post_delete, sender=Another) + signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) + def tearDown(self): signals.pre_init.disconnect(self.Author.pre_init) signals.post_init.disconnect(self.Author.post_init) @@ -154,6 +170,8 @@ class SignalTests(unittest.TestCase): signals.post_save.disconnect(self.Another.post_save) signals.pre_save.disconnect(self.Another.pre_save) + signals.post_save.disconnect(self.ExplicitId.post_save) + # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_init.receivers), @@ -166,6 +184,8 @@ class SignalTests(unittest.TestCase): len(signals.post_bulk_insert.receivers), ) + self.ExplicitId.objects.delete() + self.assertEqual(self.pre_signals, post_signals) def test_model_signals(self): @@ -228,3 +248,12 @@ class SignalTests(unittest.TestCase): ]) self.Author.objects.delete() + + def test_signals_with_explicit_doc_ids(self): + """ Model saves must have a created flag the first time.""" + ei = self.ExplicitId(id=123) + # post save must received the created flag, even if there's already + # an object id present + self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + # second time, it must be an update + self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) From f265915aa227f941fa07fae2df85155be9e0f3d1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 8 Nov 2012 16:35:20 +0000 Subject: [PATCH 024/464] Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 12 ++++++++++-- tests/test_queryset.py | 19 +++++++++++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 778a047..e8d3d57 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) - Added support for creating documents with embedded documents in a single operation (MongoEngine/mongoengine#6) - Added to_json and from_json to Document (MongoEngine/mongoengine#1) - Added to_json and from_json to QuerySet (MongoEngine/mongoengine#131) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 3c44f01..bfd15a8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -824,8 +824,16 @@ class QuerySet(object): if not write_options: write_options = {} - update = transform.update(self._document, **update) query = self._query + update = transform.update(self._document, **update) + + # If doing an atomic upsert on an inheritable class + # then ensure we add _cls to the update operation + if upsert and '_cls' in query: + if '$set' in update: + update["$set"]["_cls"] = self._document._class_name + else: + update["$set"] = {"_cls": self._document._class_name} try: ret = self._collection.update(query, update, multi=multi, @@ -852,7 +860,7 @@ class QuerySet(object): .. versionadded:: 0.2 """ - return self.update(safe_update=True, upsert=False, multi=False, + return self.update(safe_update=True, upsert=upsert, multi=False, write_options=None, **update) def __iter__(self): diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 9dfe9a2..a86920e 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -3741,6 +3741,25 @@ class QueryFieldListTest(unittest.TestCase): self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + def test_upsert_includes_cls(self): + """Upserts should include _cls information for inheritable classes + """ + + class Test(Document): + test = StringField() + + Test.drop_collection() + Test.objects(test='foo').update_one(upsert=True, set__test='foo') + self.assertFalse('_cls' in Test._collection.find_one()) + + class Test(Document): + meta = {'allow_inheritance': True} + test = StringField() + + Test.drop_collection() + + Test.objects(test='foo').update_one(upsert=True, set__test='foo') + self.assertTrue('_cls' in Test._collection.find_one()) if __name__ == '__main__': unittest.main() From dfdc0d92c3f95e9b6788c14c8fb9facaa4bfbfc9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 8 Nov 2012 16:40:58 +0000 Subject: [PATCH 025/464] Updated docs --- docs/guide/defining-documents.rst | 2 +- docs/guide/document-instances.rst | 2 +- docs/upgrade.rst | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index ea8e05b..9abea9b 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -597,7 +597,7 @@ Working with existing data As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and easily get working with existing data. Just define the document to match the expected schema in your database. If you have wildly varying schemas then -a :class:`~mongoengine.DynamicDocument` might be more appropriate. +a :class:`~mongoengine.DynamicDocument` might be more appropriate. :: # Will work with data in an existing collection named 'cmsPage' class Page(Document): diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index b3bf687..e8e7d63 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -64,7 +64,7 @@ document values for example:: .. note:: Cleaning is only called if validation is turned on and when calling -:meth:`~mongoengine.Document.save`. + :meth:`~mongoengine.Document.save`. Cascading Saves --------------- diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 44c69be..bf48527 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -14,7 +14,7 @@ Data Model The inheritance model has changed, we no longer need to store an array of :attr:`types` with the model we can just use the classname in :attr:`_cls`. This means that you will have to update your indexes for each of your -inherited classes like so: +inherited classes like so: :: # 1. Declaration of the class class Animal(Document): @@ -49,7 +49,7 @@ Document Definition The default for inheritance has changed - its now off by default and :attr:`_cls` will not be stored automatically with the class. So if you extend your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` -you will need to declare :attr:`allow_inheritance` in the meta data like so: +you will need to declare :attr:`allow_inheritance` in the meta data like so: :: class Animal(Document): name = StringField() @@ -67,7 +67,7 @@ They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / SequenceFields -------------- -:class:`~mongoengine.fields.SequenceField`s now inherit from `BaseField` to +:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to allow flexible storage of the calculated value. As such MIN and MAX settings are no longer handled. From 787fc1cd8ba115aa1186952833a29e8ee3b0d45e Mon Sep 17 00:00:00 2001 From: yak Date: Tue, 13 Nov 2012 13:02:07 +0100 Subject: [PATCH 026/464] bug fix for RefferenceField.to_mongo when dbref=False --- mongoengine/fields.py | 2 +- tests/test_fields.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 01d3fc6..ee02906 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -777,7 +777,7 @@ class ReferenceField(BaseField): def to_mongo(self, document): if isinstance(document, DBRef): if not self.dbref: - return DBRef.id + return document.id return document elif not self.dbref and isinstance(document, basestring): return document diff --git a/tests/test_fields.py b/tests/test_fields.py index 9806550..abc50a3 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1104,6 +1104,15 @@ class FieldTest(unittest.TestCase): p = Person.objects.get(name="Ross") self.assertEqual(p.parent, p1) + + def test_dbref_to_mongo(self): + class Person(Document): + name = StringField() + parent = ReferenceField('self', dbref=False) + + p1 = Person._from_son({'name':"Yakxxx", 'parent': "50a234ea469ac1eda42d347d"}) + mongoed = p1.to_mongo() + self.assertIsInstance(mongoed['parent'], ObjectId) def test_objectid_reference_fields(self): From 0da2dfd191143dcf11d3281cea7b3da2c790c99d Mon Sep 17 00:00:00 2001 From: yak Date: Tue, 13 Nov 2012 13:04:05 +0100 Subject: [PATCH 027/464] addition to AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 6ba2f88..bae1dcb 100644 --- a/AUTHORS +++ b/AUTHORS @@ -124,3 +124,4 @@ that much better: * Stefan Wójcik * dimonb * Garry Polley + * Jakub Kot From 28ef54986d0847dac129f5bb4b66e644e4fe947b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 21 Nov 2012 16:53:06 +0000 Subject: [PATCH 028/464] Deprecated `get_or_create` (MongoEngine/mongoengine#35) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e8d3d57..c3c6340 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Deprecated `get_or_create` (MongoEngine/mongoengine#35) - Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) - Added support for creating documents with embedded documents in a single operation (MongoEngine/mongoengine#6) - Added to_json and from_json to Document (MongoEngine/mongoengine#1) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index bfd15a8..dde7d55 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -232,10 +232,11 @@ class QuerySet(object): dictionary of default values for the new document may be provided as a keyword argument called :attr:`defaults`. - .. warning:: This requires two separate operations and therefore a + .. note:: This requires two separate operations and therefore a race condition exists. Because there are no transactions in mongoDB other approaches should be investigated, to ensure you - don't accidently duplicate data when using this method. + don't accidently duplicate data when using this method. This is + now scheduled to be removed before 1.0 :param write_options: optional extra keyword arguments used if we have to create a new document. @@ -244,9 +245,14 @@ class QuerySet(object): :param auto_save: if the object is to be saved automatically if not found. + .. deprecated:: 0.8 .. versionchanged:: 0.6 - added `auto_save` .. versionadded:: 0.3 """ + msg = ("get_or_create is scheduled to be deprecated. The approach is " + "flawed without transactions. Upserts should be preferred.") + raise DeprecationWarning(msg) + defaults = query.get('defaults', {}) if 'defaults' in query: del query['defaults'] From aa5a9ff1f428027a5eb6adec54fa6f596ae4e32d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 21 Nov 2012 17:03:32 +0000 Subject: [PATCH 029/464] Documentation update for document errors (MongoEngine/mongoengine#124) --- docs/changelog.rst | 1 + docs/guide/querying.rst | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c3c6340..a13a5f1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Documentation update for document errors (MongoEngine/mongoengine#124) - Deprecated `get_or_create` (MongoEngine/mongoengine#35) - Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) - Added support for creating documents with embedded documents in a single operation (MongoEngine/mongoengine#6) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 1449801..d582943 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -179,9 +179,11 @@ Retrieving unique results ------------------------- To retrieve a result that should be unique in the collection, use :meth:`~mongoengine.queryset.QuerySet.get`. This will raise -:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query, -and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one -document matched the query. +:class:`~mongoengine.queryset.DoesNotExist` if +no document matches the query, and +:class:`~mongoengine.queryset.MultipleObjectsReturned` +if more than one document matched the query. These exceptions are merged into +your document defintions eg: `MyDoc.DoesNotExist` A variation of this method exists, :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new From 003454573ce86614344e11e6252d9accaa392101 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 21 Nov 2012 17:14:53 +0000 Subject: [PATCH 030/464] Making django user sparse (MongoEngine/mongoengine#128) --- mongoengine/django/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 65afacf..1685f6f 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -70,7 +70,7 @@ class User(Document): meta = { 'allow_inheritance': True, 'indexes': [ - {'fields': ['username'], 'unique': True} + {'fields': ['username'], 'unique': True, 'sparse': True} ] } From 2c0fc142a385f791e6cc67b5bf624eb6e2df82ed Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 26 Nov 2012 21:04:06 +0000 Subject: [PATCH 031/464] Updated travis.yml --- .travis.yml | 2 +- tests/test_dereference.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c10a1f3..1aa9774 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,4 +26,4 @@ notifications: branches: only: - master - - 0.7 \ No newline at end of file + - 0.8 \ No newline at end of file diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 7b149db..0eb891c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -42,6 +42,12 @@ class FieldTest(unittest.TestCase): group_obj = Group.objects.first() self.assertEqual(q, 1) + len(group_obj._data['members']) + self.assertEqual(q, 1) + + len(group_obj.members) + self.assertEqual(q, 2) + [m for m in group_obj.members] self.assertEqual(q, 2) From 66c6d14f7ac5838f48664b48a1903cb8e72b558f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 10:50:22 +0000 Subject: [PATCH 032/464] Trying to fix seesaw test on travis --- tests/test_all_warnings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_all_warnings.py b/tests/test_all_warnings.py index 9b38fa6..8297e45 100644 --- a/tests/test_all_warnings.py +++ b/tests/test_all_warnings.py @@ -53,7 +53,7 @@ class TestWarnings(unittest.TestCase): p2.parent = p1 p2.save(cascade=False) - self.assertEqual(len(self.warning_list), 1) + self.assertTrue(len(self.warning_list) > 0) warning = self.warning_list[0] self.assertEqual(FutureWarning, warning["category"]) self.assertTrue("ReferenceFields will default to using ObjectId" From 9f5ab8149f4085a305f9a7b86ce3d4e9ab497514 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 11:06:55 +0000 Subject: [PATCH 033/464] Adding some debugging --- tests/test_all_warnings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_all_warnings.py b/tests/test_all_warnings.py index 8297e45..7ef1f21 100644 --- a/tests/test_all_warnings.py +++ b/tests/test_all_warnings.py @@ -77,6 +77,8 @@ class TestWarnings(unittest.TestCase): p2.save() self.assertEqual(len(self.warning_list), 1) + if len(self.warning_list) > 1: + print self.warning_list warning = self.warning_list[0] self.assertEqual(FutureWarning, warning["category"]) self.assertTrue("Cascading saves will default to off in 0.8" From 653c4259eebc0f37e1e2b82d5510d5c4c9475f87 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 11:59:34 +0000 Subject: [PATCH 034/464] Fixed handling for old style types --- docs/changelog.rst | 4 ++++ mongoengine/__init__.py | 2 +- mongoengine/base.py | 9 ++++----- python-mongoengine.spec | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index aac24c6..7457eeb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,10 @@ Changelog ========= +Changes in 0.7.7 +================ +- Fix handling for old style _types + Changes in 0.7.6 ================ - Unicode fix for repr (MongoEngine/mongoengine#133) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index cdfbfff..9f1f552 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -12,7 +12,7 @@ from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) -VERSION = (0, 7, 6) +VERSION = (0, 7, 7) def get_version(): diff --git a/mongoengine/base.py b/mongoengine/base.py index fa12e35..208e0e5 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -121,11 +121,10 @@ class ValidationError(AssertionError): def get_document(name): doc = _document_registry.get(name, None) if not doc: - # Possible old style names - end = ".%s" % name - possible_match = [k for k in _document_registry.keys() - if k.endswith(end)] - if len(possible_match) == 1: + # Possible old style name + end = name.split('.')[-1] + possible_match = [k for k in _document_registry.keys() if k == end] + if len(possible_match) == 1 and end != name: doc = _document_registry.get(possible_match.pop(), None) if not doc: raise NotRegistered(""" diff --git a/python-mongoengine.spec b/python-mongoengine.spec index d796f99..9a376ec 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.6 +Version: 0.7.7 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 027b3d36de9a42d31757353150dd39a94a6da584 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:01:58 +0000 Subject: [PATCH 035/464] Fixed deprecation warning --- mongoengine/queryset/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index dde7d55..3acee36 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -251,7 +251,7 @@ class QuerySet(object): """ msg = ("get_or_create is scheduled to be deprecated. The approach is " "flawed without transactions. Upserts should be preferred.") - raise DeprecationWarning(msg) + warnings.warn(msg, DeprecationWarning) defaults = query.get('defaults', {}) if 'defaults' in query: From b5e868655e5ff056150b358510f8aea4a8125881 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:49 +0000 Subject: [PATCH 036/464] Updated travis.yml --- .travis.yml | 2 +- tests/test_dereference.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c10a1f3..1aa9774 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,4 +26,4 @@ notifications: branches: only: - master - - 0.7 \ No newline at end of file + - 0.8 \ No newline at end of file diff --git a/tests/test_dereference.py b/tests/test_dereference.py index c9631eb..8f61792 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -42,6 +42,12 @@ class FieldTest(unittest.TestCase): group_obj = Group.objects.first() self.assertEqual(q, 1) + len(group_obj._data['members']) + self.assertEqual(q, 1) + + len(group_obj.members) + self.assertEqual(q, 2) + [m for m in group_obj.members] self.assertEqual(q, 2) From 59e7617e82a89a0afe61f24180f8873efbb316bf Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:49 +0000 Subject: [PATCH 037/464] Trying to fix seesaw test on travis --- tests/all_warnings/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 4609c5a..7d4db0d 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -45,7 +45,7 @@ class AllWarnings(unittest.TestCase): p2.parent = p1 p2.save(cascade=False) - self.assertEqual(len(self.warning_list), 1) + self.assertTrue(len(self.warning_list) > 0) warning = self.warning_list[0] self.assertEqual(FutureWarning, warning["category"]) self.assertTrue("ReferenceFields will default to using ObjectId" From b849c719a8c336d388a5a341e2ffb9afd9bf86eb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:50 +0000 Subject: [PATCH 038/464] Adding some debugging --- tests/all_warnings/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 7d4db0d..220b0bb 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -69,6 +69,8 @@ class AllWarnings(unittest.TestCase): p2.save() self.assertEqual(len(self.warning_list), 1) + if len(self.warning_list) > 1: + print self.warning_list warning = self.warning_list[0] self.assertEqual(FutureWarning, warning["category"]) self.assertTrue("Cascading saves will default to off in 0.8" From 68e4a27aaff24ca60b851da71f5c4aff45e87341 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:50 +0000 Subject: [PATCH 039/464] Fixed handling for old style types --- docs/changelog.rst | 4 ++++ python-mongoengine.spec | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a13a5f1..756b1cd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -19,6 +19,10 @@ Changes in 0.8 - Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) +Changes in 0.7.7 +================ +- Fix handling for old style _types + Changes in 0.7.6 ================ - Unicode fix for repr (MongoEngine/mongoengine#133) diff --git a/python-mongoengine.spec b/python-mongoengine.spec index d796f99..9a376ec 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.6 +Version: 0.7.7 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From f9dd051ec90c9fc17d2cc1221d43e6549427b915 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:50 +0000 Subject: [PATCH 040/464] Merged get_document fix --- mongoengine/base/common.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index 82728d1..dc43d40 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -9,11 +9,10 @@ _document_registry = {} def get_document(name): doc = _document_registry.get(name, None) - if not doc: - # Possible old style names - end = ".%s" % name - possible_match = [k for k in _document_registry.keys() - if k.endswith(end)] + if not doc and '.' in name: + # Possible old style name + end = name.split('.')[-1] + possible_match = [k for k in _document_registry.keys() if k == end] if len(possible_match) == 1: doc = _document_registry.get(possible_match.pop(), None) if not doc: From 3598fe0fb45d969ef936fedc33db4a63371266a8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:02:50 +0000 Subject: [PATCH 041/464] Adding _collection to _cls --- mongoengine/base/metaclasses.py | 3 +++ tests/document/instance.py | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index e68ec13..c6c4db1 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -169,6 +169,9 @@ class DocumentMetaclass(type): "field name" % field.name) raise InvalidDocumentError(msg) + if issubclass(new_class, Document): + new_class._collection = None + # Add class to the _document_registry _document_registry[new_class._class_name] = new_class diff --git a/tests/document/instance.py b/tests/document/instance.py index 8fb4fd7..de677e2 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1762,6 +1762,31 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()]) self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()]) + def test_db_alias_overrides(self): + """db_alias can be overriden + """ + # Register a connection with db_alias testdb-2 + register_connection('testdb-2', 'mongoenginetest2') + + class A(Document): + """Uses default db_alias + """ + name = StringField() + meta = {"allow_inheritance": True} + + class B(A): + """Uses testdb-2 db_alias + """ + meta = {"db_alias": "testdb-2"} + + A.objects.all() + + self.assertEquals('testdb-2', B._meta.get('db_alias')) + self.assertEquals('mongoenginetest', + A._get_collection().database.name) + self.assertEquals('mongoenginetest2', + B._get_collection().database.name) + def test_db_alias_propagates(self): """db_alias propagates? """ From 219b28c97b846b90e0d755558be526bb51773a92 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:04:57 +0000 Subject: [PATCH 042/464] Updated docs regarding 3598fe0fb45d969ef936fedc33db4a63371266a8 Fixed db_alias and inherited Documents (MongoEngine/mongoengine#143) --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 6ba2f88..5801981 100644 --- a/AUTHORS +++ b/AUTHORS @@ -124,3 +124,4 @@ that much better: * Stefan Wójcik * dimonb * Garry Polley + * James Slagle diff --git a/docs/changelog.rst b/docs/changelog.rst index 756b1cd..a56f33a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8 ============== +- Fixed db_alias and inherited Documents (MongoEngine/mongoengine#143) - Documentation update for document errors (MongoEngine/mongoengine#124) - Deprecated `get_or_create` (MongoEngine/mongoengine#35) - Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) From f6f7c12f0ecadb7ae50f29287622b9a42507dacb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 27 Nov 2012 14:37:13 +0000 Subject: [PATCH 043/464] Added test case checking type with dbref=False Ensures when dbref=False the data is stored as the same type as the primary key of the item stored. MongoEngine/mongoengine#160 --- tests/test_dereference.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 8f61792..41f8aeb 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -123,6 +123,27 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() + def test_list_item_dereference_dref_false_stores_as_type(self): + """Ensure that DBRef items are stored as their type + """ + class User(Document): + my_id = IntField(primary_key=True) + name = StringField() + + class Group(Document): + members = ListField(ReferenceField(User, dbref=False)) + + User.drop_collection() + Group.drop_collection() + + user = User(my_id=1, name='user 1').save() + + Group(members=User.objects).save() + group = Group.objects.first() + + self.assertEqual(Group._get_collection().find_one()['members'], [1]) + self.assertEqual(group.members, [user]) + def test_handle_old_style_references(self): """Ensure that DBRef items in ListFields are dereferenced. """ From 9d52e1865931025ec4c10212db70b0b727e2a91a Mon Sep 17 00:00:00 2001 From: Peter Teichman Date: Wed, 21 Nov 2012 13:22:10 -0500 Subject: [PATCH 044/464] Don't freeze the current query state when calling .order_by() This changes order_by() to eliminate its reference to self._cursor. This meant that any parameters built by QuerySet that followed an order_by() clause were ignored. --- mongoengine/queryset.py | 6 ++++-- tests/test_queryset.py | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 5c7b9c8..58b1959 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -586,11 +586,13 @@ class QuerySet(object): if self._where_clause: self._cursor_obj.where(self._where_clause) - # apply default ordering if self._ordering: + # Apply query ordering self._cursor_obj.sort(self._ordering) elif self._document._meta['ordering']: + # Otherwise, apply the ordering from the document model self.order_by(*self._document._meta['ordering']) + self._cursor_obj.sort(self._ordering) if self._limit is not None: self._cursor_obj.limit(self._limit - (self._skip or 0)) @@ -1274,7 +1276,7 @@ class QuerySet(object): key_list.append((key, direction)) self._ordering = key_list - self._cursor.sort(key_list) + return self def explain(self, format=False): diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 8f846ea..48d2a26 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -1793,6 +1793,22 @@ class QuerySetTest(unittest.TestCase): ages = [p.age for p in self.Person.objects.order_by('-name')] self.assertEqual(ages, [30, 40, 20]) + def test_order_by_chaining(self): + """Ensure that an order_by query chains properly and allows .only() + """ + self.Person(name="User A", age=20).save() + self.Person(name="User B", age=40).save() + self.Person(name="User C", age=30).save() + + only_age = self.Person.objects.order_by('-age').only('age') + + names = [p.name for p in only_age] + ages = [p.age for p in only_age] + + # The .only('age') clause should mean that all names are None + self.assertEqual(names, [None, None, None]) + self.assertEqual(ages, [40, 30, 20]) + def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or denormalise""" From 3bdc9a2f0940cf03d6f72212e4db3148f0a0cfb7 Mon Sep 17 00:00:00 2001 From: Adrian Scott Date: Thu, 29 Nov 2012 20:53:09 -0500 Subject: [PATCH 045/464] session collection parameter; encoding optional Added a parameter for the name of the session collection; Added the option to not encode session_data, which is useful for expiring sessions of users when a password is changed, etc.; these upgrades provided by SocialVilla Inc. --- mongoengine/django/sessions.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index f178342..0330ee1 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -15,13 +15,20 @@ MONGOENGINE_SESSION_DB_ALIAS = getattr( settings, 'MONGOENGINE_SESSION_DB_ALIAS', DEFAULT_CONNECTION_NAME) +MONGOENGINE_SESSION_COLLECTION = getattr( + settings, 'MONGOENGINE_SESSION_COLLECTION', + 'django_session') + +MONGOENGINE_SESSION_DATA_ENCODE = getattr( + settings, 'MONGOENGINE_SESSION_DATA_ENCODE', + True) class MongoSession(Document): session_key = fields.StringField(primary_key=True, max_length=40) - session_data = fields.StringField() + session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE else fields.DictField() expire_date = fields.DateTimeField() - meta = {'collection': 'django_session', + meta = {'collection': MONGOENGINE_SESSION_COLLECTION, 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, 'allow_inheritance': False} @@ -34,7 +41,10 @@ class SessionStore(SessionBase): try: s = MongoSession.objects(session_key=self.session_key, expire_date__gt=datetime.now())[0] - return self.decode(force_unicode(s.session_data)) + if MONGOENGINE_SESSION_DATA_ENCODE: + return self.decode(force_unicode(s.session_data)) + else: + return s.session_data except (IndexError, SuspiciousOperation): self.create() return {} @@ -57,7 +67,10 @@ class SessionStore(SessionBase): if self.session_key is None: self._session_key = self._get_new_session_key() s = MongoSession(session_key=self.session_key) - s.session_data = self.encode(self._get_session(no_load=must_create)) + if MONGOENGINE_SESSION_DATA_ENCODE: + s.session_data = self.encode(self._get_session(no_load=must_create)) + else: + s.session_data = self._get_session(no_load=must_create) s.expire_date = self.get_expiry_date() try: s.save(force_insert=must_create, safe=True) From b10d76cf4b396edf5473a386717a31831b749500 Mon Sep 17 00:00:00 2001 From: Adrian Scott Date: Thu, 29 Nov 2012 21:28:03 -0500 Subject: [PATCH 046/464] split line to meet 79 char max line limit --- mongoengine/django/sessions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 0330ee1..ca7b01f 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -25,7 +25,8 @@ MONGOENGINE_SESSION_DATA_ENCODE = getattr( class MongoSession(Document): session_key = fields.StringField(primary_key=True, max_length=40) - session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE else fields.DictField() + session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ + else fields.DictField() expire_date = fields.DateTimeField() meta = {'collection': MONGOENGINE_SESSION_COLLECTION, From 4fe87b40da989bdd8caecc56fe48e025f0061ade Mon Sep 17 00:00:00 2001 From: Adrian Scott Date: Thu, 29 Nov 2012 21:49:54 -0500 Subject: [PATCH 047/464] added comments --- mongoengine/django/sessions.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index ca7b01f..810b626 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -15,10 +15,12 @@ MONGOENGINE_SESSION_DB_ALIAS = getattr( settings, 'MONGOENGINE_SESSION_DB_ALIAS', DEFAULT_CONNECTION_NAME) +# a setting for the name of the collection used to store sessions MONGOENGINE_SESSION_COLLECTION = getattr( settings, 'MONGOENGINE_SESSION_COLLECTION', 'django_session') +# a setting for whether session data is stored encoded or not MONGOENGINE_SESSION_DATA_ENCODE = getattr( settings, 'MONGOENGINE_SESSION_DATA_ENCODE', True) From 376d1c97ab9bce3df6f060bc18e9edd6c64a3e87 Mon Sep 17 00:00:00 2001 From: Shaun Duncan Date: Tue, 4 Dec 2012 13:08:49 -0500 Subject: [PATCH 048/464] EmailField should honor StringField validation as well --- mongoengine/fields.py | 1 + tests/test_fields.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 1f86560..368c000 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -141,6 +141,7 @@ class EmailField(StringField): def validate(self, value): if not EmailField.EMAIL_REGEX.match(value): self.error('Invalid Mail-address: %s' % value) + super(EmailField, self).validate(value) class IntField(BaseField): diff --git a/tests/test_fields.py b/tests/test_fields.py index 68c79b5..db23d93 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -2114,6 +2114,18 @@ class FieldTest(unittest.TestCase): post.comments[1].content = 'here we go' post.validate() + def test_email_field_honors_regex(self): + class User(Document): + email = EmailField(regex=r'\w+@example.com') + + # Fails regex validation + user = User(email='me@foo.com') + self.assertRaises(ValidationError, user.validate) + + # Passes regex validation + user = User(email='me@example.com') + self.assertTrue(user.validate() is None) + if __name__ == '__main__': unittest.main() From 94adc207ad86883eb22d592a5844fb82e62c4b6f Mon Sep 17 00:00:00 2001 From: Jorge Bastida Date: Fri, 7 Dec 2012 11:20:27 +0000 Subject: [PATCH 049/464] First as_pymongo implementation --- mongoengine/queryset.py | 12 ++++++++++++ tests/test_queryset.py | 16 ++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index c774322..4a27caf 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -353,6 +353,7 @@ class QuerySet(object): self._slave_okay = False self._iter = False self._scalar = [] + self._as_pymongo = False # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -1002,6 +1003,10 @@ class QuerySet(object): if self._scalar: return self._get_scalar(self._document._from_son( self._cursor.next())) + + if self._as_pymongo: + return self._cursor.next() + return self._document._from_son(self._cursor.next()) except StopIteration, e: self.rewind() @@ -1602,6 +1607,13 @@ class QuerySet(object): """An alias for scalar""" return self.scalar(*fields) + def as_pymongo(self): + """Instead of returning Document instances, return raw values from + pymongo. + """ + self._as_pymongo = True + return self + def _sub_js_fields(self, code): """When fields are specified with [~fieldname] syntax, where *fieldname* is the Python name of a field, *fieldname* will be diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 690df5e..1920d2f 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -3691,6 +3691,22 @@ class QueryFieldListTest(unittest.TestCase): ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) + def test_as_pymongo(self): + + class User(Document): + id = ObjectIdField('_id') + name = StringField() + age = IntField() + + User.drop_collection() + User(name="Bob Dole", age=89).save() + User(name="Barack Obama", age=51).save() + + users = [u for u in User.objects.only('name').as_pymongo()] + self.assertTrue(isinstance(users[0], dict)) + self.assertTrue(isinstance(users[1], dict)) + self.assertEqual(users[0]['name'], 'Bob Dole') + self.assertEqual(users[1]['name'], 'Barack Obama') if __name__ == '__main__': unittest.main() From bb15bf8d1349fdcef597f35517ed92939eb559e2 Mon Sep 17 00:00:00 2001 From: Adrian Scott Date: Fri, 7 Dec 2012 10:02:12 -0500 Subject: [PATCH 050/464] Update AUTHORS added me (Adrian Scott, issues 180, 181) --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 6ba2f88..80ea613 100644 --- a/AUTHORS +++ b/AUTHORS @@ -124,3 +124,4 @@ that much better: * Stefan Wójcik * dimonb * Garry Polley + * Adrian Scott From ad983dc279c25b0305dcd32f07cb380a87609f3e Mon Sep 17 00:00:00 2001 From: Jorge Bastida Date: Fri, 7 Dec 2012 15:42:10 +0000 Subject: [PATCH 051/464] Implement _get_as_pymongo --- mongoengine/queryset.py | 51 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 4a27caf..be60571 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -354,6 +354,7 @@ class QuerySet(object): self._iter = False self._scalar = [] self._as_pymongo = False + self._as_pymongo_coerce = False # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -1003,9 +1004,8 @@ class QuerySet(object): if self._scalar: return self._get_scalar(self._document._from_son( self._cursor.next())) - if self._as_pymongo: - return self._cursor.next() + return self._get_as_pymongo(self._cursor.next()) return self._document._from_son(self._cursor.next()) except StopIteration, e: @@ -1585,6 +1585,48 @@ class QuerySet(object): return tuple(data) + def _get_as_pymongo(self, row): + # Extract which fields paths we should follow if .fields(...) was + # used. If not, handle all fields. + if not getattr(self, '__as_pymongo_fields', None): + self.__as_pymongo_fields = [] + for field in self._loaded_fields.fields - set(['_cls', '_id', '_types']): + self.__as_pymongo_fields.append(field) + while '.' in field: + field, _ = field.rsplit('.', 1) + self.__as_pymongo_fields.append(field) + + all_fields = not self.__as_pymongo_fields + + def clean(data, path=None): + path = path or '' + + if isinstance(data, dict): + new_data = {} + for key, value in data.iteritems(): + new_path = '%s.%s' % (path, key) if path else key + if all_fields or new_path in self.__as_pymongo_fields: + new_data[key] = clean(value, path=new_path) + data = new_data + elif isinstance(data, list): + data = [clean(d, path=path) for d in data] + else: + if self._as_pymongo_coerce: + # If we need to coerce types, we need to determine the + # type of this field and use the corresponding .to_python(...) + from mongoengine.fields import EmbeddedDocumentField + obj = self._document + for chunk in path.split('.'): + obj = getattr(obj, chunk, None) + if obj is None: + break + elif isinstance(obj, EmbeddedDocumentField): + obj = obj.document_type + if obj and data is not None: + data = obj.to_python(data) + return data + return clean(row) + def scalar(self, *fields): """Instead of returning Document instances, return either a specific value or a tuple of values in order. @@ -1607,11 +1649,14 @@ class QuerySet(object): """An alias for scalar""" return self.scalar(*fields) - def as_pymongo(self): + def as_pymongo(self, coerce_types=False): """Instead of returning Document instances, return raw values from pymongo. + + :param coerce_type: Field types (if applicable) would be use to coerce types. """ self._as_pymongo = True + self._as_pymongo_coerce = coerce_types return self def _sub_js_fields(self, code): From d5ec3c6a31f0409731f11a3e2fefc4b9b7184a4c Mon Sep 17 00:00:00 2001 From: Jorge Bastida Date: Fri, 7 Dec 2012 15:59:09 +0000 Subject: [PATCH 052/464] Add as_pymongo to __getitem__ and in_bulk --- mongoengine/queryset.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index be60571..d12f3bb 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -988,6 +988,9 @@ class QuerySet(object): for doc in docs: doc_map[doc['_id']] = self._get_scalar( self._document._from_son(doc)) + elif self._as_pymongo: + for doc in docs: + doc_map[doc['_id']] = self._get_as_pymongo(doc) else: for doc in docs: doc_map[doc['_id']] = self._document._from_son(doc) @@ -1189,6 +1192,8 @@ class QuerySet(object): if self._scalar: return self._get_scalar(self._document._from_son( self._cursor[key])) + if self._as_pymongo: + return self._get_as_pymongo(self._cursor.next()) return self._document._from_son(self._cursor[key]) raise AttributeError From e62c35b040b9bcc3b9d99e6ba87b1fa34b5a5af1 Mon Sep 17 00:00:00 2001 From: Jorge Bastida Date: Fri, 7 Dec 2012 16:21:31 +0000 Subject: [PATCH 053/464] Add more tests --- tests/test_queryset.py | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 1920d2f..09a4823 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -3693,20 +3693,36 @@ class QueryFieldListTest(unittest.TestCase): def test_as_pymongo(self): + from decimal import Decimal + class User(Document): id = ObjectIdField('_id') name = StringField() age = IntField() + price = DecimalField() User.drop_collection() - User(name="Bob Dole", age=89).save() - User(name="Barack Obama", age=51).save() + User(name="Bob Dole", age=89, price=Decimal('1.11')).save() + User(name="Barack Obama", age=51, price=Decimal('2.22')).save() - users = [u for u in User.objects.only('name').as_pymongo()] - self.assertTrue(isinstance(users[0], dict)) - self.assertTrue(isinstance(users[1], dict)) - self.assertEqual(users[0]['name'], 'Bob Dole') - self.assertEqual(users[1]['name'], 'Barack Obama') + users = User.objects.only('name', 'price').as_pymongo() + results = list(users) + self.assertTrue(isinstance(results[0], dict)) + self.assertTrue(isinstance(results[1], dict)) + self.assertEqual(results[0]['name'], 'Bob Dole') + self.assertEqual(results[0]['price'], '1.11') + self.assertEqual(results[1]['name'], 'Barack Obama') + self.assertEqual(results[1]['price'], '2.22') + + # Test coerce_types + users = User.objects.only('name', 'price').as_pymongo(coerce_types=True) + results = list(users) + self.assertTrue(isinstance(results[0], dict)) + self.assertTrue(isinstance(results[1], dict)) + self.assertEqual(results[0]['name'], 'Bob Dole') + self.assertEqual(results[0]['price'], Decimal('1.11')) + self.assertEqual(results[1]['name'], 'Barack Obama') + self.assertEqual(results[1]['price'], Decimal('2.22')) if __name__ == '__main__': unittest.main() From 452cd125faf161feff10f79fe080405074d21650 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 08:11:35 +0000 Subject: [PATCH 054/464] Updated Changelog --- docs/changelog.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7457eeb..355449f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,11 @@ Changelog ========= + +Changes in 0.7.8 +================ +- Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) + Changes in 0.7.7 ================ - Fix handling for old style _types From 6997e0247665605ba872f82254db11328739829d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 08:23:41 +0000 Subject: [PATCH 055/464] Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) --- AUTHORS | 2 +- docs/changelog.rst | 2 ++ mongoengine/queryset.py | 2 +- tests/test_queryset.py | 4 ++++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/AUTHORS b/AUTHORS index 6ba2f88..5e6f9e7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -106,7 +106,7 @@ that much better: * Adam Reeve * Anthony Nemitz * deignacio - * shaunduncan + * Shaun Duncan * Meir Kriheli * Andrey Fedoseev * aparajita diff --git a/docs/changelog.rst b/docs/changelog.rst index 355449f..3dff3bd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,8 @@ Changelog Changes in 0.7.8 ================ +- Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) +- Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) - Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) Changes in 0.7.7 diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index d12f3bb..f0609ff 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -929,7 +929,7 @@ class QuerySet(object): if not isinstance(doc, self._document): msg = "Some documents inserted aren't instances of %s" % str(self._document) raise OperationError(msg) - if doc.pk: + if doc.pk and not doc._created: msg = "Some documents have ObjectIds use doc.update() instead" raise OperationError(msg) raw.append(doc.to_mongo()) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 09a4823..aed606d 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -591,6 +591,10 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(OperationError, throw_operation_error) + # Test can insert new doc + new_post = Blog(title="code", id=ObjectId()) + Blog.objects.insert(new_post) + # test handles other classes being inserted def throw_operation_error_wrong_doc(): class Author(Document): From 260d9377f537590e3d0c1cfe0dfe08dff5401784 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 08:26:42 +0000 Subject: [PATCH 056/464] Updated Changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3dff3bd..5fcf619 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.7.8 ================ +- Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) - Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) - Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) - Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) From 90d22c2a28dc0946457565ca250b443c89fbe220 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 08:50:21 +0000 Subject: [PATCH 057/464] Update AUTHORS & Changelog (MongoEngine/mongoengine#176) --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 1c57463..330256c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -125,3 +125,4 @@ that much better: * dimonb * Garry Polley * Adrian Scott + * Peter Teichman diff --git a/docs/changelog.rst b/docs/changelog.rst index 5fcf619..c9535a0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.7.8 ================ +- Fix query chaining with .order_by() (MongoEngine/mongoengine#176) - Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) - Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) - Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) From 9236f365fa95b5006fe50702c29dd118d888f2e5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 09:11:31 +0000 Subject: [PATCH 058/464] Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) --- docs/changelog.rst | 1 + mongoengine/fields.py | 13 +++++++++++-- tests/test_fields.py | 22 ++++++++++++++++++++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c9535a0..92770af 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.7.8 ================ +- Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) - Fix query chaining with .order_by() (MongoEngine/mongoengine#176) - Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) - Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9c0bede..3f413b2 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1338,7 +1338,7 @@ class SequenceField(IntField): .. versionadded:: 0.5 """ - def __init__(self, collection_name=None, db_alias = None, sequence_name = None, *args, **kwargs): + def __init__(self, collection_name=None, db_alias=None, sequence_name=None, *args, **kwargs): self.collection_name = collection_name or 'mongoengine.counters' self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name @@ -1348,7 +1348,7 @@ class SequenceField(IntField): """ Generate and Increment the counter """ - sequence_name = self.sequence_name or self.owner_document._get_collection_name() + sequence_name = self.get_sequence_name() sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_and_modify(query={"_id": sequence_id}, @@ -1357,6 +1357,15 @@ class SequenceField(IntField): upsert=True) return counter['next'] + def get_sequence_name(self): + if self.sequence_name: + return self.sequence_name + owner = self.owner_document + if issubclass(owner, Document): + return owner._get_collection_name() + else: + return owner._class_name + def __get__(self, instance, owner): if instance is None: diff --git a/tests/test_fields.py b/tests/test_fields.py index 88e82f1..fdcc308 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -2175,6 +2175,28 @@ class FieldTest(unittest.TestCase): c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) self.assertEqual(c['next'], 10) + def test_embedded_sequence_field(self): + class Comment(EmbeddedDocument): + id = SequenceField() + content = StringField(required=True) + + class Post(Document): + title = StringField(required=True) + comments = ListField(EmbeddedDocumentField(Comment)) + + self.db['mongoengine.counters'].drop() + Post.drop_collection() + + Post(title="MongoEngine", + comments=[Comment(content="NoSQL Rocks"), + Comment(content="MongoEngine Rocks")]).save() + + c = self.db['mongoengine.counters'].find_one({'_id': 'Comment.id'}) + self.assertEqual(c['next'], 2) + post = Post.objects.first() + self.assertEqual(1, post.comments[0].id) + self.assertEqual(2, post.comments[1].id) + def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() From 1bc2d2ec37ffe608ad56d7e790d087e6dfeda1bc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 09:54:20 +0000 Subject: [PATCH 059/464] Version Bump --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 9f1f552..08cabaa 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -12,7 +12,7 @@ from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) -VERSION = (0, 7, 7) +VERSION = (0, 7, 8) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 9a376ec..f175546 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.7 +Version: 0.7.8 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 6ff1bd9b3cb623bc6c864e0856a7ec37b427fd65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johnny=20Bergstr=C3=B6m?= Date: Mon, 10 Dec 2012 11:01:08 +0100 Subject: [PATCH 060/464] Corrected user guide link in README --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index ae6bd0e..5eab502 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ About MongoEngine is a Python Object-Document Mapper for working with MongoDB. Documentation available at http://mongoengine-odm.rtfd.org - there is currently a `tutorial `_, a `user guide -`_ and an `API reference +`_ and an `API reference `_. Installation From b15c3f6a3f1f54023cb7ca9855db938373f012c2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 10:33:55 +0000 Subject: [PATCH 061/464] Update AUTHORS Sorry Jorge! --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 864c95c..82a1dfa 100644 --- a/AUTHORS +++ b/AUTHORS @@ -127,3 +127,4 @@ that much better: * Adrian Scott * Peter Teichman * Jakub Kot + * Jorge Bastida From 3b3738b36ba007460a9085472c5013cabc664956 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 10 Dec 2012 15:16:31 +0000 Subject: [PATCH 062/464] 0.7.9 --- docs/changelog.rst | 4 ++++ mongoengine/__init__.py | 2 +- mongoengine/base.py | 8 +++++--- mongoengine/fields.py | 3 ++- python-mongoengine.spec | 2 +- tests/test_document.py | 16 ++++++++++++++-- tests/test_fields.py | 2 +- 7 files changed, 28 insertions(+), 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 92770af..d93bf13 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,10 @@ Changelog ========= +Changes in 0.7.9 +================ +- Better fix handling for old style _types +- Embedded SequenceFields follow collection naming convention Changes in 0.7.8 ================ diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 08cabaa..b67512d 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -12,7 +12,7 @@ from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) -VERSION = (0, 7, 8) +VERSION = (0, 7, 9) def get_version(): diff --git a/mongoengine/base.py b/mongoengine/base.py index 208e0e5..013afe7 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -122,9 +122,11 @@ def get_document(name): doc = _document_registry.get(name, None) if not doc: # Possible old style name - end = name.split('.')[-1] - possible_match = [k for k in _document_registry.keys() if k == end] - if len(possible_match) == 1 and end != name: + single_end = name.split('.')[-1] + compound_end = '.%s' % single_end + possible_match = [k for k in _document_registry.keys() + if k.endswith(compound_end) or k == single_end] + if len(possible_match) == 1: doc = _document_registry.get(possible_match.pop(), None) if not doc: raise NotRegistered(""" diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 213c214..de484a1 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1364,7 +1364,8 @@ class SequenceField(IntField): if issubclass(owner, Document): return owner._get_collection_name() else: - return owner._class_name + return ''.join('_%s' % c if c.isupper() else c + for c in owner._class_name).strip('_').lower() def __get__(self, instance, owner): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index f175546..b1ec336 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.8 +Version: 0.7.9 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB diff --git a/tests/test_document.py b/tests/test_document.py index a09aaec..cd0ab8f 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -15,7 +15,7 @@ from datetime import datetime from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest from mongoengine import * -from mongoengine.base import NotRegistered, InvalidDocumentError +from mongoengine.base import NotRegistered, InvalidDocumentError, get_document from mongoengine.queryset import InvalidQueryError from mongoengine.connection import get_db, get_connection @@ -1336,7 +1336,6 @@ class DocumentTest(unittest.TestCase): User.drop_collection() - def test_document_not_registered(self): class Place(Document): @@ -1361,6 +1360,19 @@ class DocumentTest(unittest.TestCase): print Place.objects.all() self.assertRaises(NotRegistered, query_without_importing_nice_place) + def test_document_registry_regressions(self): + + class Location(Document): + name = StringField() + meta = {'allow_inheritance': True} + + class Area(Location): + location = ReferenceField('Location', dbref=True) + + Location.drop_collection() + + self.assertEquals(Area, get_document("Area")) + self.assertEquals(Area, get_document("Location.Area")) def test_creation(self): """Ensure that document may be created using keyword arguments. diff --git a/tests/test_fields.py b/tests/test_fields.py index 0483519..28af1b2 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -2201,7 +2201,7 @@ class FieldTest(unittest.TestCase): comments=[Comment(content="NoSQL Rocks"), Comment(content="MongoEngine Rocks")]).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'Comment.id'}) + c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) self.assertEqual(c['next'], 2) post = Post.objects.first() self.assertEqual(1, post.comments[0].id) From 148f8b8a3a2f53b2e086b48b2294b537224c4d33 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 17 Dec 2012 21:13:45 -0800 Subject: [PATCH 063/464] Only allow QNode instances to be passed as query objects --- mongoengine/queryset.py | 3 +++ tests/test_queryset.py | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 5a1aa71..e987da9 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -423,6 +423,9 @@ class QuerySet(object): """ query = Q(**query) if q_obj: + # make sure proper query object is passed + if not isinstance(q_obj, QNode): + raise InvalidQueryError('Not a query object: %s. Did you intend to use key=value?' % q_obj) query &= q_obj self._query_obj &= query self._mongo_query = None diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 55531a1..c49566c 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -1289,6 +1289,14 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2) self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3) + # Test invalid query objs + def wrong_query_objs(): + self.Person.objects('user1') + def wrong_query_objs_filter(): + self.Person.objects('user1') + self.assertRaises(InvalidQueryError, wrong_query_objs) + self.assertRaises(InvalidQueryError, wrong_query_objs_filter) + def test_q_regex(self): """Ensure that Q objects can be queried using regexes. """ From 3e8f02c64babd024c39a913a946f3b6e3dc6fd75 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 11:39:19 +0000 Subject: [PATCH 064/464] Merge sequence field changes --- mongoengine/fields.py | 3 +-- tests/test_fields.py | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 65996a4..fe94d35 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1368,8 +1368,7 @@ class SequenceField(BaseField): """ Generate and Increment the counter """ - sequence_name = (self.sequence_name or - self.owner_document._get_collection_name()) + sequence_name = self.get_sequence_name() sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_and_modify(query={"_id": sequence_id}, diff --git a/tests/test_fields.py b/tests/test_fields.py index 6e3dc8b..97a2d5f 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -2241,7 +2241,6 @@ class FieldTest(unittest.TestCase): Post(title="MongoEngine", comments=[Comment(content="NoSQL Rocks"), Comment(content="MongoEngine Rocks")]).save() - import ipdb; ipdb.set_trace(); c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) self.assertEqual(c['next'], 2) post = Post.objects.first() From 1a131ff1207cc116e5b90bcf3f70268dd6f0061f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:16:12 +0000 Subject: [PATCH 065/464] Only allow QNode instances to be passed as query objects (MongoEngine/mongoengine#199) --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 23afe6f..2519c89 100644 --- a/AUTHORS +++ b/AUTHORS @@ -129,3 +129,4 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida + * Stefan Wójcik \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 109940e..845f3a9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -19,6 +19,7 @@ Changes in 0.8 - Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) - Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) +- Only allow QNode instances to be passed as query objects (MongoEngine/mongoengine#199) Changes in 0.7.9 ================ From c528ac09d6ab25fdb2bf7972d51da42995a464e2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:29:46 +0000 Subject: [PATCH 066/464] Fix merge for QNode checks --- mongoengine/queryset/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 24836eb..fda8a75 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -17,7 +17,7 @@ from mongoengine.errors import (OperationError, NotUniqueError, from . import transform from .field_list import QueryFieldList -from .visitor import Q +from .visitor import Q, QNode __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') From 9cc02d4dbe27b6a8b12f958b4fae58573264dde0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:32:06 +0000 Subject: [PATCH 067/464] Dynamic fields are now validated on save (MongoEngine/mongoengine#153) (MongoEngine/mongoengine#154) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + mongoengine/base/document.py | 3 +++ mongoengine/fields.py | 4 ++++ tests/document/dynamic.py | 23 +++++++++++++++++++++++ 5 files changed, 33 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 2519c89..794f297 100644 --- a/AUTHORS +++ b/AUTHORS @@ -129,4 +129,5 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida - * Stefan Wójcik \ No newline at end of file + * Stefan Wójcik + * Pete Campton \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 845f3a9..fa0fe10 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -20,6 +20,7 @@ Changes in 0.8 - Inheritance is off by default (MongoEngine/mongoengine#122) - Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) - Only allow QNode instances to be passed as query objects (MongoEngine/mongoengine#199) +- Dynamic fields are now validated on save (MongoEngine/mongoengine#153) (MongoEngine/mongoengine#154) Changes in 0.7.9 ================ diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 2dd4b03..a3f10f5 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -245,6 +245,9 @@ class BaseDocument(object): # Get a list of tuples of field names and their current values fields = [(field, self._data.get(name)) for name, field in self._fields.items()] + if self._dynamic: + fields += [(field, self._data.get(name)) + for name, field in self._dynamic_fields.items()] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") diff --git a/mongoengine/fields.py b/mongoengine/fields.py index fe94d35..73c0db4 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -564,6 +564,10 @@ class DynamicField(BaseField): return StringField().prepare_query_value(op, value) return self.to_mongo(value) + def validate(self, value, clean=True): + if hasattr(value, "validate"): + value.validate(clean=clean) + class ListField(ComplexBaseField): """A list field that wraps a standard field, allowing multiple instances diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index d879b54..ca0db0a 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -122,6 +122,29 @@ class DynamicTest(unittest.TestCase): self.assertEqual(1, self.Person.objects(misc__hello='world').count()) + def test_complex_embedded_document_validation(self): + """Ensure embedded dynamic documents may be validated""" + class Embedded(DynamicEmbeddedDocument): + content = URLField() + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + + embedded_doc_1 = Embedded(content='http://mongoengine.org') + embedded_doc_1.validate() + + embedded_doc_2 = Embedded(content='this is not a url') + with self.assertRaises(ValidationError): + embedded_doc_2.validate() + + doc.embedded_field_1 = embedded_doc_1 + doc.embedded_field_2 = embedded_doc_2 + with self.assertRaises(ValidationError): + doc.validate() + def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" class Employee(self.Person): From 7f732459a130d2c0a870c1803c9694002f325e17 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:34:02 +0000 Subject: [PATCH 068/464] Updated tickets links as now default to MongoEngine/mongoengine --- docs/changelog.rst | 104 ++++++++++++++++++++++----------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index fa0fe10..8fc279e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,23 +4,23 @@ Changelog Changes in 0.8 ============== -- Fixed db_alias and inherited Documents (MongoEngine/mongoengine#143) -- Documentation update for document errors (MongoEngine/mongoengine#124) -- Deprecated `get_or_create` (MongoEngine/mongoengine#35) -- Updated inheritable objects created by upsert now contain _cls (MongoEngine/mongoengine#118) -- Added support for creating documents with embedded documents in a single operation (MongoEngine/mongoengine#6) -- Added to_json and from_json to Document (MongoEngine/mongoengine#1) -- Added to_json and from_json to QuerySet (MongoEngine/mongoengine#131) -- Updated index creation now tied to Document class (MongoEngine/mongoengine#102) -- Added none() to queryset (MongoEngine/mongoengine#127) -- Updated SequenceFields to allow post processing of the calculated counter value (MongoEngine/mongoengine#141) -- Added clean method to documents for pre validation data cleaning (MongoEngine/mongoengine#60) -- Added support setting for read prefrence at a query level (MongoEngine/mongoengine#157) -- Added _instance to EmbeddedDocuments pointing to the parent (MongoEngine/mongoengine#139) -- Inheritance is off by default (MongoEngine/mongoengine#122) -- Remove _types and just use _cls for inheritance (MongoEngine/mongoengine#148) -- Only allow QNode instances to be passed as query objects (MongoEngine/mongoengine#199) -- Dynamic fields are now validated on save (MongoEngine/mongoengine#153) (MongoEngine/mongoengine#154) +- Fixed db_alias and inherited Documents (#143) +- Documentation update for document errors (#124) +- Deprecated `get_or_create` (#35) +- Updated inheritable objects created by upsert now contain _cls (#118) +- Added support for creating documents with embedded documents in a single operation (#6) +- Added to_json and from_json to Document (#1) +- Added to_json and from_json to QuerySet (#131) +- Updated index creation now tied to Document class (#102) +- Added none() to queryset (#127) +- Updated SequenceFields to allow post processing of the calculated counter value (#141) +- Added clean method to documents for pre validation data cleaning (#60) +- Added support setting for read prefrence at a query level (#157) +- Added _instance to EmbeddedDocuments pointing to the parent (#139) +- Inheritance is off by default (#122) +- Remove _types and just use _cls for inheritance (#148) +- Only allow QNode instances to be passed as query objects (#199) +- Dynamic fields are now validated on save (#153) (#154) Changes in 0.7.9 ================ @@ -29,12 +29,12 @@ Changes in 0.7.9 Changes in 0.7.8 ================ -- Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) -- Fix query chaining with .order_by() (MongoEngine/mongoengine#176) -- Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) -- Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) -- Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) -- Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) +- Fix sequence fields in embedded documents (#166) +- Fix query chaining with .order_by() (#176) +- Added optional encoding and collection config for Django sessions (#180, #181, #183) +- Fixed EmailField so can add extra validation (#173, #174, #187) +- Fixed bulk inserts can now handle custom pk's (#192) +- Added as_pymongo method to return raw or cast results from pymongo (#193) Changes in 0.7.7 ================ @@ -42,70 +42,70 @@ Changes in 0.7.7 Changes in 0.7.6 ================ -- Unicode fix for repr (MongoEngine/mongoengine#133) -- Allow updates with match operators (MongoEngine/mongoengine#144) -- Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136) +- Unicode fix for repr (#133) +- Allow updates with match operators (#144) +- Updated URLField - now can have a override the regex (#136) - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573) -- Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) +- Fixed reload issue with ReferenceField where dbref=False (#138) Changes in 0.7.5 ================ -- ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134) - See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134) +- ReferenceFields with dbref=False use ObjectId instead of strings (#134) + See ticket for upgrade notes (#134) Changes in 0.7.4 ================ -- Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125) +- Fixed index inheritance issues - firmed up testcases (#123) (#125) Changes in 0.7.3 ================ -- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119) +- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119) Changes in 0.7.2 ================ -- Update index spec generation so its not destructive (MongoEngine/mongoengine#113) +- Update index spec generation so its not destructive (#113) Changes in 0.7.1 ================= -- Fixed index spec inheritance (MongoEngine/mongoengine#111) +- Fixed index spec inheritance (#111) Changes in 0.7.0 ================= -- Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) -- Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) -- Fixed Q object merge edge case (MongoEngine/mongoengine#109) +- Updated queryset.delete so you can use with skip / limit (#107) +- Updated index creation allows kwargs to be passed through refs (#104) +- Fixed Q object merge edge case (#109) - Fixed reloading on sharded documents (hmarr/mongoengine#569) -- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62) -- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92) -- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88) +- Added NotUniqueError for duplicate keys (#62) +- Added custom collection / sequence naming for SequenceFields (#92) +- Fixed UnboundLocalError in composite index with pk field (#88) - Updated ReferenceField's to optionally store ObjectId strings - this will become the default in 0.8 (MongoEngine/mongoengine#89) + this will become the default in 0.8 (#89) - Added FutureWarning - save will default to `cascade=False` in 0.8 -- Added example of indexing embedded document fields (MongoEngine/mongoengine#75) -- Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80) -- Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) +- Added example of indexing embedded document fields (#75) +- Fixed ImageField resizing when forcing size (#80) +- Add flexibility for fields handling bad data (#78) - Embedded Documents no longer handle meta definitions -- Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) +- Use weakref proxies in base lists / dicts (#74) - Improved queryset filtering (hmarr/mongoengine#554) - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) -- Fixed abstract classes and shard keys (MongoEngine/mongoengine#64) +- Fixed abstract classes and shard keys (#64) - Fixed Python 2.5 support - Added Python 3 support (thanks to Laine Heron) Changes in 0.6.20 ================= -- Added support for distinct and db_alias (MongoEngine/mongoengine#59) +- Added support for distinct and db_alias (#59) - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) -- Fixed BinaryField lookup re (MongoEngine/mongoengine#48) +- Fixed BinaryField lookup re (#48) Changes in 0.6.19 ================= -- Added Binary support to UUID (MongoEngine/mongoengine#47) -- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) -- Fixed BinaryField python value issue (MongoEngine/mongoengine#48) -- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) -- Fixed queryset manager issue (MongoEngine/mongoengine#52) +- Added Binary support to UUID (#47) +- Fixed MapField lookup for fields without declared lookups (#46) +- Fixed BinaryField python value issue (#48) +- Fixed SequenceField non numeric value lookup (#41) +- Fixed queryset manager issue (#52) - Fixed FileField comparision (hmarr/mongoengine#547) Changes in 0.6.18 From 25cdf16cc0ed045d6f69c6495d37dfe51b86c563 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:37:37 +0000 Subject: [PATCH 069/464] Updated travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1aa9774..c7cc271 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,8 +9,8 @@ python: - 3.2 env: - PYMONGO=dev + - PYMONGO=2.4.1 - PYMONGO=2.3 - - PYMONGO=2.2 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi From 420c3e0073c9a055b4db062863352b93a4b2911f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 12:51:42 +0000 Subject: [PATCH 070/464] Fixing for python2.5 closes #188 --- mongoengine/base/__init__.py | 10 +++++----- mongoengine/base/document.py | 6 +++--- mongoengine/base/fields.py | 4 ++-- mongoengine/base/metaclasses.py | 4 ++-- mongoengine/errors.py | 2 +- mongoengine/queryset/__init__.py | 10 +++++----- mongoengine/queryset/manager.py | 2 +- mongoengine/queryset/queryset.py | 8 +++++--- tests/__init__.py | 4 ++-- tests/document/dynamic.py | 6 ++---- 10 files changed, 28 insertions(+), 28 deletions(-) diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index 1d4a6eb..ce119b3 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -1,5 +1,5 @@ -from .common import * -from .datastructures import * -from .document import * -from .fields import * -from .metaclasses import * +from mongoengine.base.common import * +from mongoengine.base.datastructures import * +from mongoengine.base.document import * +from mongoengine.base.fields import * +from mongoengine.base.metaclasses import * diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index a3f10f5..affc20e 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -12,9 +12,9 @@ from mongoengine.errors import (ValidationError, InvalidDocumentError, from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type, to_str_keys_recursive) -from .common import get_document, ALLOW_INHERITANCE -from .datastructures import BaseDict, BaseList -from .fields import ComplexBaseField +from mongoengine.base.common import get_document, ALLOW_INHERITANCE +from mongoengine.base.datastructures import BaseDict, BaseList +from mongoengine.base.fields import ComplexBaseField __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 11719b5..a892fbd 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -7,8 +7,8 @@ from bson import DBRef, ObjectId from mongoengine.common import _import_class from mongoengine.errors import ValidationError -from .common import ALLOW_INHERITANCE -from .datastructures import BaseDict, BaseList +from mongoengine.base.common import ALLOW_INHERITANCE +from mongoengine.base.datastructures import BaseDict, BaseList __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField") diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index c6c4db1..af39e14 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -9,8 +9,8 @@ from mongoengine.queryset import (DO_NOTHING, DoesNotExist, MultipleObjectsReturned, QuerySet, QuerySetManager) -from .common import _document_registry, ALLOW_INHERITANCE -from .fields import BaseField, ComplexBaseField, ObjectIdField +from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE +from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') diff --git a/mongoengine/errors.py b/mongoengine/errors.py index eb72503..9cfcd1d 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,6 +1,6 @@ from collections import defaultdict -from .python_support import txt_type +from mongoengine.python_support import txt_type __all__ = ('NotRegistered', 'InvalidDocumentError', 'ValidationError') diff --git a/mongoengine/queryset/__init__.py b/mongoengine/queryset/__init__.py index f6feeab..026a7ac 100644 --- a/mongoengine/queryset/__init__.py +++ b/mongoengine/queryset/__init__.py @@ -1,11 +1,11 @@ from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, InvalidQueryError, OperationError, NotUniqueError) -from .field_list import * -from .manager import * -from .queryset import * -from .transform import * -from .visitor import * +from mongoengine.queryset.field_list import * +from mongoengine.queryset.manager import * +from mongoengine.queryset.queryset import * +from mongoengine.queryset.transform import * +from mongoengine.queryset.visitor import * __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + transform.__all__ + visitor.__all__) diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 08d4d3a..d9f9992 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -1,5 +1,5 @@ from functools import partial -from .queryset import QuerySet +from mongoengine.queryset.queryset import QuerySet __all__ = ('queryset_manager', 'QuerySetManager') diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index fda8a75..3ea9f23 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import + import copy import itertools import operator @@ -15,9 +17,9 @@ from mongoengine.common import _import_class from mongoengine.errors import (OperationError, NotUniqueError, InvalidQueryError) -from . import transform -from .field_list import QueryFieldList -from .visitor import Q, QNode +from mongoengine.queryset import transform +from mongoengine.queryset.field_list import QueryFieldList +from mongoengine.queryset.visitor import Q, QNode __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') diff --git a/tests/__init__.py b/tests/__init__.py index f2a43b0..ccc90f7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,2 +1,2 @@ -from .all_warnings import AllWarnings -from .document import * \ No newline at end of file +from all_warnings import AllWarnings +from document import * \ No newline at end of file diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index ca0db0a..4848b8f 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -137,13 +137,11 @@ class DynamicTest(unittest.TestCase): embedded_doc_1.validate() embedded_doc_2 = Embedded(content='this is not a url') - with self.assertRaises(ValidationError): - embedded_doc_2.validate() + self.assertRaises(ValidationError, embedded_doc_2.validate) doc.embedded_field_1 = embedded_doc_1 doc.embedded_field_2 = embedded_doc_2 - with self.assertRaises(ValidationError): - doc.validate() + self.assertRaises(ValidationError, doc.validate) def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" From 50b755db0c535fd96b374be88d79410ebc356838 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 13:35:37 +0000 Subject: [PATCH 071/464] Split out queryset tests --- tests/__init__.py | 3 +- tests/queryset/__init__.py | 5 + tests/queryset/field_list.py | 67 ++ .../queryset.py} | 571 +----------------- tests/queryset/transform.py | 148 +++++ tests/queryset/visitor.py | 310 ++++++++++ 6 files changed, 565 insertions(+), 539 deletions(-) create mode 100644 tests/queryset/__init__.py create mode 100644 tests/queryset/field_list.py rename tests/{test_queryset.py => queryset/queryset.py} (85%) create mode 100644 tests/queryset/transform.py create mode 100644 tests/queryset/visitor.py diff --git a/tests/__init__.py b/tests/__init__.py index ccc90f7..152a8ce 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,2 +1,3 @@ from all_warnings import AllWarnings -from document import * \ No newline at end of file +from document import * +from queryset import * \ No newline at end of file diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py new file mode 100644 index 0000000..93cb8c2 --- /dev/null +++ b/tests/queryset/__init__.py @@ -0,0 +1,5 @@ + +from transform import * +from field_list import * +from queryset import * +from visitor import * \ No newline at end of file diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py new file mode 100644 index 0000000..f3b457b --- /dev/null +++ b/tests/queryset/field_list.py @@ -0,0 +1,67 @@ +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import * +from mongoengine.queryset import QueryFieldList + +__all__ = ("QueryFieldListTest",) + +class QueryFieldListTest(unittest.TestCase): + + def test_empty(self): + q = QueryFieldList() + self.assertFalse(q) + + q = QueryFieldList(always_include=['_cls']) + self.assertFalse(q) + + def test_include_include(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'b': True}) + + def test_include_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': True}) + + def test_exclude_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) + + def test_exclude_include(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'c': True}) + + def test_always_include(self): + q = QueryFieldList(always_include=['x', 'y']) + q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + + def test_reset(self): + q = QueryFieldList(always_include=['x', 'y']) + q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + q.reset() + self.assertFalse(q) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) + + def test_using_a_slice(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a'], value={"$slice": 5}) + self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) diff --git a/tests/test_queryset.py b/tests/queryset/queryset.py similarity index 85% rename from tests/test_queryset.py rename to tests/queryset/queryset.py index 07725d4..e3e0215 100644 --- a/tests/test_queryset.py +++ b/tests/queryset/queryset.py @@ -18,12 +18,13 @@ from mongoengine import * from mongoengine.connection import get_connection from mongoengine.python_support import PY3 from mongoengine.tests import query_counter -from mongoengine.queryset import (Q, QuerySet, QuerySetManager, +from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, QueryFieldList, queryset_manager) -from mongoengine.queryset import transform from mongoengine.errors import InvalidQueryError +__all__ = ("QuerySetTest",) + class QuerySetTest(unittest.TestCase): @@ -47,22 +48,6 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(isinstance(self.Person.objects._collection, pymongo.collection.Collection)) - def test_transform_query(self): - """Ensure that the _transform_query function operates correctly. - """ - self.assertEqual(transform.query(name='test', age=30), - {'name': 'test', 'age': 30}) - self.assertEqual(transform.query(age__lt=30), - {'age': {'$lt': 30}}) - self.assertEqual(transform.query(age__gt=20, age__lt=50), - {'age': {'$gt': 20, '$lt': 50}}) - self.assertEqual(transform.query(age=20, age__gt=50), - {'age': 20}) - self.assertEqual(transform.query(friend__age__gte=30), - {'friend.age': {'$gte': 30}}) - self.assertEqual(transform.query(name__exists=True), - {'name': {'$exists': True}}) - def test_cannot_perform_joins_references(self): class BlogPost(Document): @@ -264,30 +249,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(list(A.objects.none()), []) self.assertEqual(list(A.objects.none().all()), []) - def test_chaining(self): - class A(Document): - pass - - class B(Document): - a = ReferenceField(A) - - A.drop_collection() - B.drop_collection() - - a1 = A().save() - a2 = A().save() - - B(a=a1).save() - - # Works - q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query - - # Doesn't work - q2 = B.objects.filter(a__in=[a1, a2]) - q2 = q2.filter(a=a1)._query - - self.assertEqual(q1, q2) - def test_update_write_options(self): """Test that passing write_options works""" @@ -830,48 +791,30 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(obj, person) obj = self.Person.objects(name__contains='Van').first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__contains='van')).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__contains='Van')).first() - self.assertEqual(obj, None) # Test icontains obj = self.Person.objects(name__icontains='Van').first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__icontains='Van')).first() - self.assertEqual(obj, person) # Test startswith obj = self.Person.objects(name__startswith='Guido').first() self.assertEqual(obj, person) obj = self.Person.objects(name__startswith='guido').first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__startswith='Guido')).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__startswith='guido')).first() - self.assertEqual(obj, None) # Test istartswith obj = self.Person.objects(name__istartswith='guido').first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__istartswith='guido')).first() - self.assertEqual(obj, person) # Test endswith obj = self.Person.objects(name__endswith='Rossum').first() self.assertEqual(obj, person) obj = self.Person.objects(name__endswith='rossuM').first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__endswith='Rossum')).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__endswith='rossuM')).first() - self.assertEqual(obj, None) # Test iendswith obj = self.Person.objects(name__iendswith='rossuM').first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__iendswith='rossuM')).first() - self.assertEqual(obj, person) # Test exact obj = self.Person.objects(name__exact='Guido van Rossum').first() @@ -880,28 +823,18 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(obj, None) obj = self.Person.objects(name__exact='Guido van Rossu').first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__exact='Guido van Rossum')).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__exact='Guido van rossum')).first() - self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__exact='Guido van Rossu')).first() - self.assertEqual(obj, None) # Test iexact obj = self.Person.objects(name__iexact='gUIDO VAN rOSSUM').first() self.assertEqual(obj, person) obj = self.Person.objects(name__iexact='gUIDO VAN rOSSU').first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name__iexact='gUIDO VAN rOSSUM')).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__iexact='gUIDO VAN rOSSU')).first() - self.assertEqual(obj, None) # Test unsafe expressions person = self.Person(name='Guido van Rossum [.\'Geek\']') person.save() - obj = self.Person.objects(Q(name__icontains='[.\'Geek')).first() + obj = self.Person.objects(name__icontains='[.\'Geek').first() self.assertEqual(obj, person) def test_not(self): @@ -944,14 +877,14 @@ class QuerySetTest(unittest.TestCase): blog_3.save() blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1", - is_published = True, - published_date=datetime(2010, 1, 5, 0, 0 ,0)) + is_published=True, + published_date=datetime(2010, 1, 5, 0, 0, 0)) blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2", - is_published = True, - published_date=datetime(2010, 1, 6, 0, 0 ,0)) + is_published=True, + published_date=datetime(2010, 1, 6, 0, 0, 0)) blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3", - is_published = True, - published_date=datetime(2010, 1, 7, 0, 0 ,0)) + is_published=True, + published_date=datetime(2010, 1, 7, 0, 0, 0)) blog_post_1.save() blog_post_2.save() @@ -971,21 +904,6 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() Blog.drop_collection() - def test_raw_and_merging(self): - class Doc(Document): - meta = {'allow_inheritance': False} - - raw_query = Doc.objects(__raw__={'deleted': False, - 'scraped': 'yes', - '$nor': [{'views.extracted': 'no'}, - {'attachments.views.extracted':'no'}] - })._query - - expected = {'deleted': False, 'scraped': 'yes', - '$nor': [{'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'}]} - self.assertEqual(expected, raw_query) - def test_ordering(self): """Ensure default ordering is applied and can be overridden. """ @@ -1000,11 +918,11 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_1 = BlogPost(title="Blog Post #1", - published_date=datetime(2010, 1, 5, 0, 0 ,0)) + published_date=datetime(2010, 1, 5, 0, 0, 0)) blog_post_2 = BlogPost(title="Blog Post #2", - published_date=datetime(2010, 1, 6, 0, 0 ,0)) + published_date=datetime(2010, 1, 6, 0, 0, 0)) blog_post_3 = BlogPost(title="Blog Post #3", - published_date=datetime(2010, 1, 7, 0, 0 ,0)) + published_date=datetime(2010, 1, 7, 0, 0, 0)) blog_post_1.save() blog_post_2.save() @@ -1310,151 +1228,6 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - def test_q(self): - """Ensure that Q objects may be used to query for documents. - """ - class BlogPost(Document): - title = StringField() - publish_date = DateTimeField() - published = BooleanField() - - BlogPost.drop_collection() - - post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) - post1.save() - - post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) - post2.save() - - post3 = BlogPost(title='Test 3', published=True) - post3.save() - - post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) - post4.save() - - post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) - post5.save() - - post6 = BlogPost(title='Test 1', published=False) - post6.save() - - # Check ObjectId lookup works - obj = BlogPost.objects(id=post1.id).first() - self.assertEqual(obj, post1) - - # Check Q object combination with one does not exist - q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) - posts = [post.id for post in q] - - published_posts = (post2, post3) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) - posts = [post.id for post in q] - published_posts = (post1, post2, post3, post5, post6) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - # Check Q object combination - date = datetime(2010, 1, 10) - q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) - posts = [post.id for post in q] - - published_posts = (post1, post2, post3, post4) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - self.assertFalse(any(obj.id in posts for obj in [post5, post6])) - - BlogPost.drop_collection() - - # Check the 'in' operator - self.Person(name='user1', age=20).save() - self.Person(name='user2', age=20).save() - self.Person(name='user3', age=30).save() - self.Person(name='user4', age=40).save() - - self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2) - self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3) - - # Test invalid query objs - def wrong_query_objs(): - self.Person.objects('user1') - def wrong_query_objs_filter(): - self.Person.objects('user1') - self.assertRaises(InvalidQueryError, wrong_query_objs) - self.assertRaises(InvalidQueryError, wrong_query_objs_filter) - - def test_q_regex(self): - """Ensure that Q objects can be queried using regexes. - """ - person = self.Person(name='Guido van Rossum') - person.save() - - import re - obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name=re.compile('^gui'))).first() - self.assertEqual(obj, None) - - obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first() - self.assertEqual(obj, person) - - obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() - self.assertEqual(obj, person) - - obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() - self.assertEqual(obj, None) - - def test_q_lists(self): - """Ensure that Q objects query ListFields correctly. - """ - class BlogPost(Document): - tags = ListField(StringField()) - - BlogPost.drop_collection() - - BlogPost(tags=['python', 'mongo']).save() - BlogPost(tags=['python']).save() - - self.assertEqual(len(BlogPost.objects(Q(tags='mongo'))), 1) - self.assertEqual(len(BlogPost.objects(Q(tags='python'))), 2) - - BlogPost.drop_collection() - - def test_raw_query_and_Q_objects(self): - """ - Test raw plays nicely - """ - class Foo(Document): - name = StringField() - a = StringField() - b = StringField() - c = StringField() - - meta = { - 'allow_inheritance': False - } - - query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query - self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) - - q1 = {'$or': [{'a': 1}, {'b': 1}]} - query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query - self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) - - def test_q_merge_queries_edge_case(self): - - class User(Document): - email = EmailField(required=False) - name = StringField() - - User.drop_collection() - pk = ObjectId() - User(email='example@example.com', pk=pk).save() - - self.assertEqual(1, User.objects.filter( - Q(email='example@example.com') | - Q(name='John Doe') - ).limit(2).filter(pk=pk).count()) def test_exec_js_query(self): """Ensure that queries are properly formed for use in exec_js. @@ -1491,13 +1264,6 @@ class QuerySetTest(unittest.TestCase): c = BlogPost.objects(published=False).exec_js(js_func, 'hits') self.assertEqual(c, 1) - # Ensure that Q object queries work - c = BlogPost.objects(Q(published=True)).exec_js(js_func, 'hits') - self.assertEqual(c, 2) - - c = BlogPost.objects(Q(published=False)).exec_js(js_func, 'hits') - self.assertEqual(c, 1) - BlogPost.drop_collection() def test_exec_js_field_sub(self): @@ -2558,56 +2324,6 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - def test_query_field_name(self): - """Ensure that the correct field name is used when querying. - """ - class Comment(EmbeddedDocument): - content = StringField(db_field='commentContent') - - class BlogPost(Document): - title = StringField(db_field='postTitle') - comments = ListField(EmbeddedDocumentField(Comment), - db_field='postComments') - - - BlogPost.drop_collection() - - data = {'title': 'Post 1', 'comments': [Comment(content='test')]} - post = BlogPost(**data) - post.save() - - self.assertTrue('postTitle' in - BlogPost.objects(title=data['title'])._query) - self.assertFalse('title' in - BlogPost.objects(title=data['title'])._query) - self.assertEqual(len(BlogPost.objects(title=data['title'])), 1) - - self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) - self.assertEqual(len(BlogPost.objects(pk=post.id)), 1) - - self.assertTrue('postComments.commentContent' in - BlogPost.objects(comments__content='test')._query) - self.assertEqual(len(BlogPost.objects(comments__content='test')), 1) - - BlogPost.drop_collection() - - def test_query_pk_field_name(self): - """Ensure that the correct "primary key" field name is used when querying - """ - class BlogPost(Document): - title = StringField(primary_key=True, db_field='postTitle') - - BlogPost.drop_collection() - - data = { 'title':'Post 1' } - post = BlogPost(**data) - post.save() - - self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) - self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) - self.assertEqual(len(BlogPost.objects(pk=data['title'])), 1) - - BlogPost.drop_collection() def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. @@ -3446,227 +3162,6 @@ class QuerySetTest(unittest.TestCase): else: self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) - -class QTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - - def test_empty_q(self): - """Ensure that empty Q objects won't hurt. - """ - q1 = Q() - q2 = Q(age__gte=18) - q3 = Q() - q4 = Q(name='test') - q5 = Q() - - class Person(Document): - name = StringField() - age = IntField() - - query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]} - self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) - - query = {'age': {'$gte': 18}, 'name': 'test'} - self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) - - def test_q_with_dbref(self): - """Ensure Q objects handle DBRefs correctly""" - connect(db='mongoenginetest') - - class User(Document): - pass - - class Post(Document): - created_user = ReferenceField(User) - - user = User.objects.create() - Post.objects.create(created_user=user) - - self.assertEqual(Post.objects.filter(created_user=user).count(), 1) - self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1) - - def test_and_combination(self): - """Ensure that Q-objects correctly AND together. - """ - class TestDoc(Document): - x = IntField() - y = StringField() - - # Check than an error is raised when conflicting queries are anded - def invalid_combination(): - query = Q(x__lt=7) & Q(x__lt=3) - query.to_query(TestDoc) - self.assertRaises(InvalidQueryError, invalid_combination) - - # Check normal cases work without an error - query = Q(x__lt=7) & Q(x__gt=3) - - q1 = Q(x__lt=7) - q2 = Q(x__gt=3) - query = (q1 & q2).to_query(TestDoc) - self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}}) - - # More complex nested example - query = Q(x__lt=100) & Q(y__ne='NotMyString') - query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) - mongo_query = { - 'x': {'$lt': 100, '$gt': -100}, - 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, - } - self.assertEqual(query.to_query(TestDoc), mongo_query) - - def test_or_combination(self): - """Ensure that Q-objects correctly OR together. - """ - class TestDoc(Document): - x = IntField() - - q1 = Q(x__lt=3) - q2 = Q(x__gt=7) - query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, { - '$or': [ - {'x': {'$lt': 3}}, - {'x': {'$gt': 7}}, - ] - }) - - def test_and_or_combination(self): - """Ensure that Q-objects handle ANDing ORed components. - """ - class TestDoc(Document): - x = IntField() - y = BooleanField() - - query = (Q(x__gt=0) | Q(x__exists=False)) - query &= Q(x__lt=100) - self.assertEqual(query.to_query(TestDoc), { - '$or': [ - {'x': {'$lt': 100, '$gt': 0}}, - {'x': {'$lt': 100, '$exists': False}}, - ] - }) - - q1 = (Q(x__gt=0) | Q(x__exists=False)) - q2 = (Q(x__lt=100) | Q(y=True)) - query = (q1 & q2).to_query(TestDoc) - - self.assertEqual(['$or'], query.keys()) - conditions = [ - {'x': {'$lt': 100, '$gt': 0}}, - {'x': {'$lt': 100, '$exists': False}}, - {'x': {'$gt': 0}, 'y': True}, - {'x': {'$exists': False}, 'y': True}, - ] - self.assertEqual(len(conditions), len(query['$or'])) - for condition in conditions: - self.assertTrue(condition in query['$or']) - - def test_or_and_or_combination(self): - """Ensure that Q-objects handle ORing ANDed ORed components. :) - """ - class TestDoc(Document): - x = IntField() - y = BooleanField() - - q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) - q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) - query = (q1 | q2).to_query(TestDoc) - - self.assertEqual(['$or'], query.keys()) - conditions = [ - {'x': {'$gt': 0}, 'y': True}, - {'x': {'$gt': 0}, 'y': {'$exists': False}}, - {'x': {'$lt': 100}, 'y':False}, - {'x': {'$lt': 100}, 'y': {'$exists': False}}, - ] - self.assertEqual(len(conditions), len(query['$or'])) - for condition in conditions: - self.assertTrue(condition in query['$or']) - - - def test_q_clone(self): - - class TestDoc(Document): - x = IntField() - - TestDoc.drop_collection() - for i in xrange(1, 101): - t = TestDoc(x=i) - t.save() - - # Check normal cases work without an error - test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) - - self.assertEqual(test.count(), 3) - - test2 = test.clone() - self.assertEqual(test2.count(), 3) - self.assertFalse(test2 == test) - - test2.filter(x=6) - self.assertEqual(test2.count(), 1) - self.assertEqual(test.count(), 3) - -class QueryFieldListTest(unittest.TestCase): - def test_empty(self): - q = QueryFieldList() - self.assertFalse(q) - - q = QueryFieldList(always_include=['_cls']) - self.assertFalse(q) - - def test_include_include(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': True, 'b': True}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'b': True}) - - def test_include_exclude(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': True, 'b': True}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': True}) - - def test_exclude_exclude(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) - - def test_exclude_include(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'c': True}) - - def test_always_include(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) - - def test_reset(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) - q.reset() - self.assertFalse(q) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) - - def test_using_a_slice(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a'], value={"$slice": 5}) - self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) - def test_elem_match(self): class Foo(EmbeddedDocument): shape = StringField() @@ -3691,6 +3186,26 @@ class QueryFieldListTest(unittest.TestCase): ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) + def test_upsert_includes_cls(self): + """Upserts should include _cls information for inheritable classes + """ + + class Test(Document): + test = StringField() + + Test.drop_collection() + Test.objects(test='foo').update_one(upsert=True, set__test='foo') + self.assertFalse('_cls' in Test._collection.find_one()) + + class Test(Document): + meta = {'allow_inheritance': True} + test = StringField() + + Test.drop_collection() + + Test.objects(test='foo').update_one(upsert=True, set__test='foo') + self.assertTrue('_cls' in Test._collection.find_one()) + def test_read_preference(self): class Bar(Document): pass @@ -3769,26 +3284,6 @@ class QueryFieldListTest(unittest.TestCase): self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) - def test_upsert_includes_cls(self): - """Upserts should include _cls information for inheritable classes - """ - - class Test(Document): - test = StringField() - - Test.drop_collection() - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertFalse('_cls' in Test._collection.find_one()) - - class Test(Document): - meta = {'allow_inheritance': True} - test = StringField() - - Test.drop_collection() - - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertTrue('_cls' in Test._collection.find_one()) - def test_as_pymongo(self): from decimal import Decimal diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py new file mode 100644 index 0000000..666b345 --- /dev/null +++ b/tests/queryset/transform.py @@ -0,0 +1,148 @@ +from __future__ import with_statement +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import * +from mongoengine.queryset import Q +from mongoengine.queryset import transform + +__all__ = ("TransformTest",) + + +class TransformTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + def test_transform_query(self): + """Ensure that the _transform_query function operates correctly. + """ + self.assertEqual(transform.query(name='test', age=30), + {'name': 'test', 'age': 30}) + self.assertEqual(transform.query(age__lt=30), + {'age': {'$lt': 30}}) + self.assertEqual(transform.query(age__gt=20, age__lt=50), + {'age': {'$gt': 20, '$lt': 50}}) + self.assertEqual(transform.query(age=20, age__gt=50), + {'age': 20}) + self.assertEqual(transform.query(friend__age__gte=30), + {'friend.age': {'$gte': 30}}) + self.assertEqual(transform.query(name__exists=True), + {'name': {'$exists': True}}) + + def test_query_field_name(self): + """Ensure that the correct field name is used when querying. + """ + class Comment(EmbeddedDocument): + content = StringField(db_field='commentContent') + + class BlogPost(Document): + title = StringField(db_field='postTitle') + comments = ListField(EmbeddedDocumentField(Comment), + db_field='postComments') + + BlogPost.drop_collection() + + data = {'title': 'Post 1', 'comments': [Comment(content='test')]} + post = BlogPost(**data) + post.save() + + self.assertTrue('postTitle' in + BlogPost.objects(title=data['title'])._query) + self.assertFalse('title' in + BlogPost.objects(title=data['title'])._query) + self.assertEqual(len(BlogPost.objects(title=data['title'])), 1) + + self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) + self.assertEqual(len(BlogPost.objects(pk=post.id)), 1) + + self.assertTrue('postComments.commentContent' in + BlogPost.objects(comments__content='test')._query) + self.assertEqual(len(BlogPost.objects(comments__content='test')), 1) + + BlogPost.drop_collection() + + def test_query_pk_field_name(self): + """Ensure that the correct "primary key" field name is used when + querying + """ + class BlogPost(Document): + title = StringField(primary_key=True, db_field='postTitle') + + BlogPost.drop_collection() + + data = {'title': 'Post 1'} + post = BlogPost(**data) + post.save() + + self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) + self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) + self.assertEqual(len(BlogPost.objects(pk=data['title'])), 1) + + BlogPost.drop_collection() + + def test_chaining(self): + class A(Document): + pass + + class B(Document): + a = ReferenceField(A) + + A.drop_collection() + B.drop_collection() + + a1 = A().save() + a2 = A().save() + + B(a=a1).save() + + # Works + q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query + + # Doesn't work + q2 = B.objects.filter(a__in=[a1, a2]) + q2 = q2.filter(a=a1)._query + + self.assertEqual(q1, q2) + + def test_raw_query_and_Q_objects(self): + """ + Test raw plays nicely + """ + class Foo(Document): + name = StringField() + a = StringField() + b = StringField() + c = StringField() + + meta = { + 'allow_inheritance': False + } + + query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query + self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) + + q1 = {'$or': [{'a': 1}, {'b': 1}]} + query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query + self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) + + def test_raw_and_merging(self): + class Doc(Document): + meta = {'allow_inheritance': False} + + raw_query = Doc.objects(__raw__={'deleted': False, + 'scraped': 'yes', + '$nor': [{'views.extracted': 'no'}, + {'attachments.views.extracted':'no'}] + })._query + + expected = {'deleted': False, 'scraped': 'yes', + '$nor': [{'views.extracted': 'no'}, + {'attachments.views.extracted': 'no'}]} + self.assertEqual(expected, raw_query) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py new file mode 100644 index 0000000..71c3561 --- /dev/null +++ b/tests/queryset/visitor.py @@ -0,0 +1,310 @@ +from __future__ import with_statement +import sys +sys.path[0:0] = [""] + +import unittest + +from bson import ObjectId +from datetime import datetime + +from mongoengine import * +from mongoengine.queryset import Q +from mongoengine.errors import InvalidQueryError + +__all__ = ("QTest",) + + +class QTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + class Person(Document): + name = StringField() + age = IntField() + meta = {'allow_inheritance': True} + + Person.drop_collection() + self.Person = Person + + def test_empty_q(self): + """Ensure that empty Q objects won't hurt. + """ + q1 = Q() + q2 = Q(age__gte=18) + q3 = Q() + q4 = Q(name='test') + q5 = Q() + + class Person(Document): + name = StringField() + age = IntField() + + query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]} + self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) + + query = {'age': {'$gte': 18}, 'name': 'test'} + self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) + + def test_q_with_dbref(self): + """Ensure Q objects handle DBRefs correctly""" + connect(db='mongoenginetest') + + class User(Document): + pass + + class Post(Document): + created_user = ReferenceField(User) + + user = User.objects.create() + Post.objects.create(created_user=user) + + self.assertEqual(Post.objects.filter(created_user=user).count(), 1) + self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1) + + def test_and_combination(self): + """Ensure that Q-objects correctly AND together. + """ + class TestDoc(Document): + x = IntField() + y = StringField() + + # Check than an error is raised when conflicting queries are anded + def invalid_combination(): + query = Q(x__lt=7) & Q(x__lt=3) + query.to_query(TestDoc) + self.assertRaises(InvalidQueryError, invalid_combination) + + # Check normal cases work without an error + query = Q(x__lt=7) & Q(x__gt=3) + + q1 = Q(x__lt=7) + q2 = Q(x__gt=3) + query = (q1 & q2).to_query(TestDoc) + self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}}) + + # More complex nested example + query = Q(x__lt=100) & Q(y__ne='NotMyString') + query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) + mongo_query = { + 'x': {'$lt': 100, '$gt': -100}, + 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, + } + self.assertEqual(query.to_query(TestDoc), mongo_query) + + def test_or_combination(self): + """Ensure that Q-objects correctly OR together. + """ + class TestDoc(Document): + x = IntField() + + q1 = Q(x__lt=3) + q2 = Q(x__gt=7) + query = (q1 | q2).to_query(TestDoc) + self.assertEqual(query, { + '$or': [ + {'x': {'$lt': 3}}, + {'x': {'$gt': 7}}, + ] + }) + + def test_and_or_combination(self): + """Ensure that Q-objects handle ANDing ORed components. + """ + class TestDoc(Document): + x = IntField() + y = BooleanField() + + query = (Q(x__gt=0) | Q(x__exists=False)) + query &= Q(x__lt=100) + self.assertEqual(query.to_query(TestDoc), { + '$or': [ + {'x': {'$lt': 100, '$gt': 0}}, + {'x': {'$lt': 100, '$exists': False}}, + ] + }) + + q1 = (Q(x__gt=0) | Q(x__exists=False)) + q2 = (Q(x__lt=100) | Q(y=True)) + query = (q1 & q2).to_query(TestDoc) + + self.assertEqual(['$or'], query.keys()) + conditions = [ + {'x': {'$lt': 100, '$gt': 0}}, + {'x': {'$lt': 100, '$exists': False}}, + {'x': {'$gt': 0}, 'y': True}, + {'x': {'$exists': False}, 'y': True}, + ] + self.assertEqual(len(conditions), len(query['$or'])) + for condition in conditions: + self.assertTrue(condition in query['$or']) + + def test_or_and_or_combination(self): + """Ensure that Q-objects handle ORing ANDed ORed components. :) + """ + class TestDoc(Document): + x = IntField() + y = BooleanField() + + q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) + q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) + query = (q1 | q2).to_query(TestDoc) + + self.assertEqual(['$or'], query.keys()) + conditions = [ + {'x': {'$gt': 0}, 'y': True}, + {'x': {'$gt': 0}, 'y': {'$exists': False}}, + {'x': {'$lt': 100}, 'y':False}, + {'x': {'$lt': 100}, 'y': {'$exists': False}}, + ] + self.assertEqual(len(conditions), len(query['$or'])) + for condition in conditions: + self.assertTrue(condition in query['$or']) + + def test_q_clone(self): + + class TestDoc(Document): + x = IntField() + + TestDoc.drop_collection() + for i in xrange(1, 101): + t = TestDoc(x=i) + t.save() + + # Check normal cases work without an error + test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) + + self.assertEqual(test.count(), 3) + + test2 = test.clone() + self.assertEqual(test2.count(), 3) + self.assertFalse(test2 == test) + + test2.filter(x=6) + self.assertEqual(test2.count(), 1) + self.assertEqual(test.count(), 3) + + def test_q(self): + """Ensure that Q objects may be used to query for documents. + """ + class BlogPost(Document): + title = StringField() + publish_date = DateTimeField() + published = BooleanField() + + BlogPost.drop_collection() + + post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) + post1.save() + + post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) + post2.save() + + post3 = BlogPost(title='Test 3', published=True) + post3.save() + + post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) + post4.save() + + post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) + post5.save() + + post6 = BlogPost(title='Test 1', published=False) + post6.save() + + # Check ObjectId lookup works + obj = BlogPost.objects(id=post1.id).first() + self.assertEqual(obj, post1) + + # Check Q object combination with one does not exist + q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) + posts = [post.id for post in q] + + published_posts = (post2, post3) + self.assertTrue(all(obj.id in posts for obj in published_posts)) + + q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) + posts = [post.id for post in q] + published_posts = (post1, post2, post3, post5, post6) + self.assertTrue(all(obj.id in posts for obj in published_posts)) + + # Check Q object combination + date = datetime(2010, 1, 10) + q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) + posts = [post.id for post in q] + + published_posts = (post1, post2, post3, post4) + self.assertTrue(all(obj.id in posts for obj in published_posts)) + + self.assertFalse(any(obj.id in posts for obj in [post5, post6])) + + BlogPost.drop_collection() + + # Check the 'in' operator + self.Person(name='user1', age=20).save() + self.Person(name='user2', age=20).save() + self.Person(name='user3', age=30).save() + self.Person(name='user4', age=40).save() + + self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2) + self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3) + + # Test invalid query objs + def wrong_query_objs(): + self.Person.objects('user1') + def wrong_query_objs_filter(): + self.Person.objects('user1') + self.assertRaises(InvalidQueryError, wrong_query_objs) + self.assertRaises(InvalidQueryError, wrong_query_objs_filter) + + def test_q_regex(self): + """Ensure that Q objects can be queried using regexes. + """ + person = self.Person(name='Guido van Rossum') + person.save() + + import re + obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() + self.assertEqual(obj, person) + obj = self.Person.objects(Q(name=re.compile('^gui'))).first() + self.assertEqual(obj, None) + + obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first() + self.assertEqual(obj, person) + + obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() + self.assertEqual(obj, person) + + obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() + self.assertEqual(obj, None) + + def test_q_lists(self): + """Ensure that Q objects query ListFields correctly. + """ + class BlogPost(Document): + tags = ListField(StringField()) + + BlogPost.drop_collection() + + BlogPost(tags=['python', 'mongo']).save() + BlogPost(tags=['python']).save() + + self.assertEqual(len(BlogPost.objects(Q(tags='mongo'))), 1) + self.assertEqual(len(BlogPost.objects(Q(tags='python'))), 2) + + BlogPost.drop_collection() + + def test_q_merge_queries_edge_case(self): + + class User(Document): + email = EmailField(required=False) + name = StringField() + + User.drop_collection() + pk = ObjectId() + User(email='example@example.com', pk=pk).save() + + self.assertEqual(1, User.objects.filter( + Q(email='example@example.com') | + Q(name='John Doe') + ).limit(2).filter(pk=pk).count()) \ No newline at end of file From 42f506adc6875f31c5081b1067b13c16f71bdb29 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 13:58:04 +0000 Subject: [PATCH 072/464] Updates to test suite --- tests/queryset/field_list.py | 271 ++++++++++++++++++++++++++++++++++- tests/queryset/queryset.py | 251 -------------------------------- tests/queryset/visitor.py | 5 +- 3 files changed, 274 insertions(+), 253 deletions(-) diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index f3b457b..6a9c6a9 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -6,7 +6,8 @@ import unittest from mongoengine import * from mongoengine.queryset import QueryFieldList -__all__ = ("QueryFieldListTest",) +__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") + class QueryFieldListTest(unittest.TestCase): @@ -65,3 +66,271 @@ class QueryFieldListTest(unittest.TestCase): q = QueryFieldList() q += QueryFieldList(fields=['a'], value={"$slice": 5}) self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) + + +class OnlyExcludeAllTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + class Person(Document): + name = StringField() + age = IntField() + meta = {'allow_inheritance': True} + + Person.drop_collection() + self.Person = Person + + def test_only(self): + """Ensure that QuerySet.only only returns the requested fields. + """ + person = self.Person(name='test', age=25) + person.save() + + obj = self.Person.objects.only('name').get() + self.assertEqual(obj.name, person.name) + self.assertEqual(obj.age, None) + + obj = self.Person.objects.only('age').get() + self.assertEqual(obj.name, None) + self.assertEqual(obj.age, person.age) + + obj = self.Person.objects.only('name', 'age').get() + self.assertEqual(obj.name, person.name) + self.assertEqual(obj.age, person.age) + + # Check polymorphism still works + class Employee(self.Person): + salary = IntField(db_field='wage') + + employee = Employee(name='test employee', age=40, salary=30000) + employee.save() + + obj = self.Person.objects(id=employee.id).only('age').get() + self.assertTrue(isinstance(obj, Employee)) + + # Check field names are looked up properly + obj = Employee.objects(id=employee.id).only('salary').get() + self.assertEqual(obj.salary, employee.salary) + self.assertEqual(obj.name, None) + + def test_only_with_subfields(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + post = BlogPost(content='Had a good coffee today...') + post.author = User(name='Test User') + post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post.save() + + obj = BlogPost.objects.only('author.name',).get() + self.assertEqual(obj.content, None) + self.assertEqual(obj.author.email, None) + self.assertEqual(obj.author.name, 'Test User') + self.assertEqual(obj.comments, []) + + obj = BlogPost.objects.only('content', 'comments.title',).get() + self.assertEqual(obj.content, 'Had a good coffee today...') + self.assertEqual(obj.author, None) + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[1].title, 'Coffee') + self.assertEqual(obj.comments[0].text, None) + self.assertEqual(obj.comments[1].text, None) + + obj = BlogPost.objects.only('comments',).get() + self.assertEqual(obj.content, None) + self.assertEqual(obj.author, None) + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[1].title, 'Coffee') + self.assertEqual(obj.comments[0].text, 'Great post!') + self.assertEqual(obj.comments[1].text, 'I hate coffee') + + BlogPost.drop_collection() + + def test_exclude(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + post = BlogPost(content='Had a good coffee today...') + post.author = User(name='Test User') + post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post.save() + + obj = BlogPost.objects.exclude('author', 'comments.text').get() + self.assertEqual(obj.author, None) + self.assertEqual(obj.content, 'Had a good coffee today...') + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[0].text, None) + + BlogPost.drop_collection() + + def test_exclude_only_combining(self): + class Attachment(EmbeddedDocument): + name = StringField() + content = StringField() + + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + attachments = ListField(EmbeddedDocumentField(Attachment)) + + Email.drop_collection() + email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email.attachments = [ + Attachment(name='file1.doc', content='ABC'), + Attachment(name='file2.doc', content='XYZ'), + ] + email.save() + + obj = Email.objects.exclude('content_type').exclude('body').get() + self.assertEqual(obj.sender, 'me') + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, 'From Russia with Love') + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() + self.assertEqual(obj.sender, None) + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, None) + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() + self.assertEqual(obj.attachments[0].name, 'file1.doc') + self.assertEqual(obj.attachments[0].content, None) + self.assertEqual(obj.sender, None) + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, None) + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + Email.drop_collection() + + def test_all_fields(self): + + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + + Email.drop_collection() + + email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email.save() + + obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() + self.assertEqual(obj.sender, 'me') + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, 'From Russia with Love') + self.assertEqual(obj.body, 'Hello!') + self.assertEqual(obj.content_type, 'text/plain') + + Email.drop_collection() + + def test_slicing_fields(self): + """Ensure that query slicing an array works. + """ + class Numbers(Document): + n = ListField(IntField()) + + Numbers.drop_collection() + + numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(slice__n=3).get() + self.assertEqual(numbers.n, [0, 1, 2]) + + # last three + numbers = Numbers.objects.fields(slice__n=-3).get() + self.assertEqual(numbers.n, [-3, -2, -1]) + + # skip 2, limit 3 + numbers = Numbers.objects.fields(slice__n=[2, 3]).get() + self.assertEqual(numbers.n, [2, 3, 4]) + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() + self.assertEqual(numbers.n, [-5, -4, -3, -2]) + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() + self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) + + # skip to fifth from last, limit 10 dict method + numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() + self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) + + def test_slicing_nested_fields(self): + """Ensure that query slicing an embedded array works. + """ + + class EmbeddedNumber(EmbeddedDocument): + n = ListField(IntField()) + + class Numbers(Document): + embedded = EmbeddedDocumentField(EmbeddedNumber) + + Numbers.drop_collection() + + numbers = Numbers() + numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(slice__embedded__n=3).get() + self.assertEqual(numbers.embedded.n, [0, 1, 2]) + + # last three + numbers = Numbers.objects.fields(slice__embedded__n=-3).get() + self.assertEqual(numbers.embedded.n, [-3, -2, -1]) + + # skip 2, limit 3 + numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() + self.assertEqual(numbers.embedded.n, [2, 3, 4]) + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() + self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() + self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + + # skip to fifth from last, limit 10 dict method + numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() + self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index e3e0215..bad3d36 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -939,257 +939,6 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - def test_only(self): - """Ensure that QuerySet.only only returns the requested fields. - """ - person = self.Person(name='test', age=25) - person.save() - - obj = self.Person.objects.only('name').get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, None) - - obj = self.Person.objects.only('age').get() - self.assertEqual(obj.name, None) - self.assertEqual(obj.age, person.age) - - obj = self.Person.objects.only('name', 'age').get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, person.age) - - # Check polymorphism still works - class Employee(self.Person): - salary = IntField(db_field='wage') - - employee = Employee(name='test employee', age=40, salary=30000) - employee.save() - - obj = self.Person.objects(id=employee.id).only('age').get() - self.assertTrue(isinstance(obj, Employee)) - - # Check field names are looked up properly - obj = Employee.objects(id=employee.id).only('salary').get() - self.assertEqual(obj.salary, employee.salary) - self.assertEqual(obj.name, None) - - def test_only_with_subfields(self): - class User(EmbeddedDocument): - name = StringField() - email = StringField() - - class Comment(EmbeddedDocument): - title = StringField() - text = StringField() - - class BlogPost(Document): - content = StringField() - author = EmbeddedDocumentField(User) - comments = ListField(EmbeddedDocumentField(Comment)) - - BlogPost.drop_collection() - - post = BlogPost(content='Had a good coffee today...') - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] - post.save() - - obj = BlogPost.objects.only('author.name',).get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author.email, None) - self.assertEqual(obj.author.name, 'Test User') - self.assertEqual(obj.comments, []) - - obj = BlogPost.objects.only('content', 'comments.title',).get() - self.assertEqual(obj.content, 'Had a good coffee today...') - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') - self.assertEqual(obj.comments[0].text, None) - self.assertEqual(obj.comments[1].text, None) - - obj = BlogPost.objects.only('comments',).get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') - self.assertEqual(obj.comments[0].text, 'Great post!') - self.assertEqual(obj.comments[1].text, 'I hate coffee') - - BlogPost.drop_collection() - - def test_exclude(self): - class User(EmbeddedDocument): - name = StringField() - email = StringField() - - class Comment(EmbeddedDocument): - title = StringField() - text = StringField() - - class BlogPost(Document): - content = StringField() - author = EmbeddedDocumentField(User) - comments = ListField(EmbeddedDocumentField(Comment)) - - BlogPost.drop_collection() - - post = BlogPost(content='Had a good coffee today...') - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] - post.save() - - obj = BlogPost.objects.exclude('author', 'comments.text').get() - self.assertEqual(obj.author, None) - self.assertEqual(obj.content, 'Had a good coffee today...') - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[0].text, None) - - BlogPost.drop_collection() - - def test_exclude_only_combining(self): - class Attachment(EmbeddedDocument): - name = StringField() - content = StringField() - - class Email(Document): - sender = StringField() - to = StringField() - subject = StringField() - body = StringField() - content_type = StringField() - attachments = ListField(EmbeddedDocumentField(Attachment)) - - Email.drop_collection() - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') - email.attachments = [ - Attachment(name='file1.doc', content='ABC'), - Attachment(name='file2.doc', content='XYZ'), - ] - email.save() - - obj = Email.objects.exclude('content_type').exclude('body').get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() - self.assertEqual(obj.attachments[0].name, 'file1.doc') - self.assertEqual(obj.attachments[0].content, None) - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - Email.drop_collection() - - def test_all_fields(self): - - class Email(Document): - sender = StringField() - to = StringField() - subject = StringField() - body = StringField() - content_type = StringField() - - Email.drop_collection() - - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') - email.save() - - obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') - self.assertEqual(obj.body, 'Hello!') - self.assertEqual(obj.content_type, 'text/plain') - - Email.drop_collection() - - def test_slicing_fields(self): - """Ensure that query slicing an array works. - """ - class Numbers(Document): - n = ListField(IntField()) - - Numbers.drop_collection() - - numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) - numbers.save() - - # first three - numbers = Numbers.objects.fields(slice__n=3).get() - self.assertEqual(numbers.n, [0, 1, 2]) - - # last three - numbers = Numbers.objects.fields(slice__n=-3).get() - self.assertEqual(numbers.n, [-3, -2, -1]) - - # skip 2, limit 3 - numbers = Numbers.objects.fields(slice__n=[2, 3]).get() - self.assertEqual(numbers.n, [2, 3, 4]) - - # skip to fifth from last, limit 4 - numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2]) - - # skip to fifth from last, limit 10 - numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) - - # skip to fifth from last, limit 10 dict method - numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) - - def test_slicing_nested_fields(self): - """Ensure that query slicing an embedded array works. - """ - - class EmbeddedNumber(EmbeddedDocument): - n = ListField(IntField()) - - class Numbers(Document): - embedded = EmbeddedDocumentField(EmbeddedNumber) - - Numbers.drop_collection() - - numbers = Numbers() - numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) - numbers.save() - - # first three - numbers = Numbers.objects.fields(slice__embedded__n=3).get() - self.assertEqual(numbers.embedded.n, [0, 1, 2]) - - # last three - numbers = Numbers.objects.fields(slice__embedded__n=-3).get() - self.assertEqual(numbers.embedded.n, [-3, -2, -1]) - - # skip 2, limit 3 - numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() - self.assertEqual(numbers.embedded.n, [2, 3, 4]) - - # skip to fifth from last, limit 4 - numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) - - # skip to fifth from last, limit 10 - numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) - - # skip to fifth from last, limit 10 dict method - numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) - def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 71c3561..82a9913 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -307,4 +307,7 @@ class QTest(unittest.TestCase): self.assertEqual(1, User.objects.filter( Q(email='example@example.com') | Q(name='John Doe') - ).limit(2).filter(pk=pk).count()) \ No newline at end of file + ).limit(2).filter(pk=pk).count()) + +if __name__ == '__main__': + unittest.main() From 3074dad293f7c0e7753fd194247675c9b955a011 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 13:58:22 +0000 Subject: [PATCH 073/464] Test mixing only, include and exclude #191 --- tests/queryset/field_list.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 6a9c6a9..9e71133 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -81,6 +81,40 @@ class OnlyExcludeAllTest(unittest.TestCase): Person.drop_collection() self.Person = Person + def test_mixing_only_exclude(self): + + class MyDoc(Document): + a = StringField() + b = StringField() + c = StringField() + d = StringField() + e = StringField() + f = StringField() + + include = ['a', 'b', 'c', 'd', 'e'] + exclude = ['d', 'e'] + only = ['b', 'c'] + + qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + self.assertEqual(qs._loaded_fields.as_dict(), + {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) + qs = qs.only(*only) + self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + qs = qs.exclude(*exclude) + self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + + qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + qs = qs.exclude(*exclude) + self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) + qs = qs.only(*only) + self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + + qs = MyDoc.objects.exclude(*exclude) + qs = qs.fields(**dict(((i, 1) for i in include))) + self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) + qs = qs.only(*only) + self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ From 1c10f3020b079247ce1d0bca269fd03635c2ca70 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 14:56:15 +0000 Subject: [PATCH 074/464] Added support for multiple slices Also made slicing chainable. (#170) (#190) (#191) --- docs/changelog.rst | 1 + mongoengine/queryset/field_list.py | 21 ++++++++++++++++++++- tests/queryset/field_list.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8fc279e..9e1cec8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -21,6 +21,7 @@ Changes in 0.8 - Remove _types and just use _cls for inheritance (#148) - Only allow QNode instances to be passed as query objects (#199) - Dynamic fields are now validated on save (#153) (#154) +- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191) Changes in 0.7.9 ================ diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 1c825fa..7b2b0cb 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -12,20 +12,31 @@ class QueryFieldList(object): self.fields = set(fields) self.always_include = set(always_include) self._id = None + self.slice = {} def __add__(self, f): - if not self.fields: + if isinstance(f.value, dict): + for field in f.fields: + self.slice[field] = f.value + if not self.fields: + self.fields = f.fields + elif not self.fields: self.fields = f.fields self.value = f.value + self.slice = {} elif self.value is self.ONLY and f.value is self.ONLY: + self._clean_slice() self.fields = self.fields.intersection(f.fields) elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: self.fields = self.fields.union(f.fields) + self._clean_slice() elif self.value is self.ONLY and f.value is self.EXCLUDE: self.fields -= f.fields + self._clean_slice() elif self.value is self.EXCLUDE and f.value is self.ONLY: self.value = self.ONLY self.fields = f.fields - self.fields + self._clean_slice() if '_id' in f.fields: self._id = f.value @@ -42,10 +53,18 @@ class QueryFieldList(object): def as_dict(self): field_list = dict((field, self.value) for field in self.fields) + if self.slice: + field_list.update(self.slice) if self._id is not None: field_list['_id'] = self._id return field_list def reset(self): self.fields = set([]) + self.slice = {} self.value = self.ONLY + + def _clean_slice(self): + if self.slice: + for field in set(self.slice.keys()) - self.fields: + del self.slice[field] diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 9e71133..4a8a72b 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -115,6 +115,35 @@ class OnlyExcludeAllTest(unittest.TestCase): qs = qs.only(*only) self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + def test_slicing(self): + + class MyDoc(Document): + a = ListField() + b = ListField() + c = ListField() + d = ListField() + e = ListField() + f = ListField() + + include = ['a', 'b', 'c', 'd', 'e'] + exclude = ['d', 'e'] + only = ['b', 'c'] + + qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + qs = qs.exclude(*exclude) + qs = qs.only(*only) + qs = qs.fields(slice__b=5) + self.assertEqual(qs._loaded_fields.as_dict(), + {'b': {'$slice': 5}, 'c': 1}) + + qs = qs.fields(slice__c=[5, 1]) + self.assertEqual(qs._loaded_fields.as_dict(), + {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) + + qs = qs.exclude('c') + self.assertEqual(qs._loaded_fields.as_dict(), + {'b': {'$slice': 5}}) + def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ From f335591045e01ed12b12dca4964c3a8c2200a35f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 16:55:14 +0000 Subject: [PATCH 075/464] Fix index build_spec #177 --- docs/changelog.rst | 4 +- mongoengine/base/document.py | 2 +- mongoengine/fields.py | 2 +- tests/document/indexes.py | 108 +++++++++++++++++++++++------------ 4 files changed, 76 insertions(+), 40 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9e1cec8..279abc9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,8 +2,8 @@ Changelog ========= -Changes in 0.8 -============== +Changes in 0.8.X +================ - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) - Deprecated `get_or_create` (#35) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index affc20e..93bde8e 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -525,7 +525,7 @@ class BaseDocument(object): # Check to see if we need to include _cls allow_inheritance = cls._meta.get('allow_inheritance', - ALLOW_INHERITANCE) != False + ALLOW_INHERITANCE) include_cls = allow_inheritance and not spec.get('sparse', False) for key in spec['fields']: diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 73c0db4..3f9810f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -749,7 +749,7 @@ class ReferenceField(BaseField): if dbref is None: msg = ("ReferenceFields will default to using ObjectId " - " strings in 0.8, set DBRef=True if this isn't desired") + "in 0.8, set DBRef=True if this isn't desired") warnings.warn(msg, FutureWarning) self.dbref = dbref if dbref is not None else True # To change in 0.8 diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 8f83afc..9ebd9cb 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -1,31 +1,23 @@ # -*- coding: utf-8 -*- from __future__ import with_statement -import bson -import os -import pickle -import pymongo -import sys import unittest -import uuid -import warnings +import sys + +sys.path[0:0] = [""] + +import os +import pymongo from nose.plugins.skip import SkipTest from datetime import datetime -from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest - from mongoengine import * -from mongoengine.errors import (NotRegistered, InvalidDocumentError, - InvalidQueryError) -from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db, get_connection -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') - -__all__ = ("InstanceTest", ) +__all__ = ("IndexesTest", ) -class InstanceTest(unittest.TestCase): +class IndexesTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') @@ -47,20 +39,59 @@ class InstanceTest(unittest.TestCase): continue self.db.drop_collection(collection) - def test_indexes_document(self, ): + def ztest_indexes_document(self, ): """Ensure that indexes are used when meta[indexes] is specified for Documents """ - index_test(Document) + self.index_test(Document) def test_indexes_dynamic_document(self, ): """Ensure that indexes are used when meta[indexes] is specified for Dynamic Documents """ - index_test(DynamicDocument) + self.index_test(DynamicDocument) def index_test(self, InheritFrom): + class BlogPost(InheritFrom): + date = DateTimeField(db_field='addDate', default=datetime.now) + category = StringField() + tags = ListField(StringField()) + meta = { + 'indexes': [ + '-date', + 'tags', + ('category', '-date') + ] + } + + expected_specs = [{'fields': [('addDate', -1)]}, + {'fields': [('tags', 1)]}, + {'fields': [('category', 1), ('addDate', -1)]}] + self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + + BlogPost.ensure_indexes() + info = BlogPost.objects._collection.index_information() + # _id, '-date', 'tags', ('cat', 'date') + self.assertEqual(len(info), 4) + info = [value['key'] for key, value in info.iteritems()] + for expected in expected_specs: + self.assertTrue(expected['fields'] in info) + + def test_indexes_document_inheritance(self): + """Ensure that indexes are used when meta[indexes] is specified for + Documents + """ + self.index_test_inheritance(Document) + + def test_indexes_dynamic_document_inheritance(self): + """Ensure that indexes are used when meta[indexes] is specified for + Dynamic Documents + """ + self.index_test_inheritance(DynamicDocument) + + def index_test_inheritance(self, InheritFrom): + class BlogPost(InheritFrom): date = DateTimeField(db_field='addDate', default=datetime.now) category = StringField() @@ -217,7 +248,7 @@ class InstanceTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() # _id, '-date' - self.assertEqual(len(info), 3) + self.assertEqual(len(info), 2) # Indexes are lazy so use list() to perform query list(BlogPost.objects) @@ -265,7 +296,6 @@ class InstanceTest(unittest.TestCase): } user_guid = StringField(required=True) - User.drop_collection() u = User(user_guid='123') @@ -295,7 +325,7 @@ class InstanceTest(unittest.TestCase): BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - self.assertEqual(info.keys(), ['_cls_1_date.yr_-1', '_id_']) + self.assertEqual(info.keys(), ['date.yr_-1', '_id_']) BlogPost.drop_collection() def test_list_embedded_document_index(self): @@ -318,7 +348,7 @@ class InstanceTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() # we don't use _cls in with list fields by default - self.assertEqual(info.keys(), ['_id_', '_cls_1_tags.tag_1']) + self.assertEqual(info.keys(), ['_id_', 'tags.tag_1']) post1 = BlogPost(title="Embedded Indexes tests in place", tags=[Tag(name="about"), Tag(name="time")] @@ -347,7 +377,7 @@ class InstanceTest(unittest.TestCase): class Parent(Document): name = StringField() - location = ReferenceField(Location) + location = ReferenceField(Location, dbref=False) Location.drop_collection() Parent.drop_collection() @@ -396,8 +426,7 @@ class InstanceTest(unittest.TestCase): meta = { 'indexes': [ ['categories', 'id'] - ], - 'allow_inheritance': False + ] } title = StringField(required=True) @@ -498,15 +527,18 @@ class InstanceTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title='test1', + sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title='test2', + sub=SubDocument(year=2010, slug='another-slug')) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + post3 = BlogPost(title='test3', + sub=SubDocument(year=2010, slug='test')) self.assertRaises(NotUniqueError, post3.save) BlogPost.drop_collection() @@ -525,19 +557,23 @@ class InstanceTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title='test1', + sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title='test2', + sub=SubDocument(year=2010, slug='another-slug')) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + post3 = BlogPost(title='test3', + sub=SubDocument(year=2010, slug='test')) self.assertRaises(NotUniqueError, post3.save) # Now there will be two docs with the same title and year - post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) + post3 = BlogPost(title='test1', + sub=SubDocument(year=2009, slug='test-1')) self.assertRaises(NotUniqueError, post3.save) BlogPost.drop_collection() @@ -566,7 +602,7 @@ class InstanceTest(unittest.TestCase): list(Log.objects) info = Log.objects._collection.index_information() self.assertEqual(3600, - info['_cls_1_created_1']['expireAfterSeconds']) + info['created_1']['expireAfterSeconds']) def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by @@ -586,7 +622,7 @@ class InstanceTest(unittest.TestCase): cust_dupe = Customer(cust_id=1) try: cust_dupe.save() - raise AssertionError, "We saved a dupe!" + raise AssertionError("We saved a dupe!") except NotUniqueError: pass Customer.drop_collection() @@ -630,7 +666,7 @@ class InstanceTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] - index_item = [('_cls', 1), ('_id', 1), ('comments.comment_id', 1)] + index_item = [('_id', 1), ('comments.comment_id', 1)] self.assertTrue(index_item in info) if __name__ == '__main__': From 485b811bd00c4c96486b8537347bbc33f05b87d1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 19 Dec 2012 17:05:27 +0000 Subject: [PATCH 076/464] Test case for embedded docs and 2d indexes #183 --- tests/document/indexes.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 9ebd9cb..285d8c6 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -225,6 +225,30 @@ class IndexesTest(unittest.TestCase): info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('location.point', '2d')] in info) + def test_explicit_geo2d_index_embedded(self): + """Ensure that geo2d indexes work when created via meta[indexes] + """ + class EmbeddedLocation(EmbeddedDocument): + location = DictField() + + class Place(Document): + current = DictField( + field=EmbeddedDocumentField('EmbeddedLocation')) + meta = { + 'allow_inheritance': True, + 'indexes': [ + '*current.location.point', + ] + } + + self.assertEqual([{'fields': [('current.location.point', '2d')]}], + Place._meta['index_specs']) + + Place.ensure_indexes() + info = Place._get_collection().index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('current.location.point', '2d')] in info) + def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. From c5b047d0cde0e40c844183a5e5f0c500e63ff8f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 11:55:05 +0000 Subject: [PATCH 077/464] Fixed GridFSProxy __getattr__ behaviour (#196) --- AUTHORS | 3 +- docs/changelog.rst | 1 + docs/guide/gridfs.rst | 16 +- mongoengine/fields.py | 2 +- tests/document/instance.py | 3 +- tests/fields/__init__.py | 2 + tests/{test_fields.py => fields/fields.py} | 305 +---------------- tests/fields/file.py | 370 +++++++++++++++++++++ tests/{document => fields}/mongoengine.png | Bin 9 files changed, 384 insertions(+), 318 deletions(-) create mode 100644 tests/fields/__init__.py rename tests/{test_fields.py => fields/fields.py} (87%) create mode 100644 tests/fields/file.py rename tests/{document => fields}/mongoengine.png (100%) diff --git a/AUTHORS b/AUTHORS index 794f297..b49ddab 100644 --- a/AUTHORS +++ b/AUTHORS @@ -130,4 +130,5 @@ that much better: * Jakub Kot * Jorge Bastida * Stefan Wójcik - * Pete Campton \ No newline at end of file + * Pete Campton + * Martyn Smith \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 279abc9..352d0c7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -22,6 +22,7 @@ Changes in 0.8.X - Only allow QNode instances to be passed as query objects (#199) - Dynamic fields are now validated on save (#153) (#154) - Added support for multiple slices and made slicing chainable. (#170) (#190) (#191) +- Fixed GridFSProxy __getattr__ behaviour (#196) Changes in 0.7.9 ================ diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 9c80a99..1125947 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -18,20 +18,10 @@ a document is created to store details about animals, including a photo:: family = StringField() photo = FileField() - marmot = Animal('Marmota', 'Sciuridae') - - marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk - marmot.photo = marmot_photo # Store photo in the document - marmot.photo.content_type = 'image/jpeg' # Store metadata - - marmot.save() - -Another way of writing to a :class:`~mongoengine.FileField` is to use the -:func:`put` method. This allows for metadata to be stored in the same call as -the file:: - - marmot.photo.put(marmot_photo, content_type='image/jpeg') + marmot = Animal(genus='Marmota', family='Sciuridae') + marmot_photo = open('marmot.jpg', 'r') + marmot.photo.put(marmot_photo, content_type = 'image/jpeg') marmot.save() Retrieval diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 3f9810f..1e1a5ce 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -969,7 +969,7 @@ class GridFSProxy(object): if name in attrs: return self.__getattribute__(name) obj = self.get() - if name in dir(obj): + if hasattr(obj, name): return getattr(obj, name) raise AttributeError diff --git a/tests/document/instance.py b/tests/document/instance.py index 5e29dc3..0054480 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -19,7 +19,8 @@ from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db from mongoengine.base import get_document -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), + '../fields/mongoengine.png') __all__ = ("InstanceTest",) diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py new file mode 100644 index 0000000..86dfa84 --- /dev/null +++ b/tests/fields/__init__.py @@ -0,0 +1,2 @@ +from fields import * +from file import * \ No newline at end of file diff --git a/tests/test_fields.py b/tests/fields/fields.py similarity index 87% rename from tests/test_fields.py rename to tests/fields/fields.py index 97a2d5f..a96ff0b 100644 --- a/tests/test_fields.py +++ b/tests/fields/fields.py @@ -4,24 +4,21 @@ import sys sys.path[0:0] = [""] import datetime -import os import unittest import uuid -import tempfile from decimal import Decimal from bson import Binary, DBRef, ObjectId -import gridfs -from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db from mongoengine.base import _document_registry from mongoengine.errors import NotRegistered -from mongoengine.python_support import PY3, b, StringIO, bin_type +from mongoengine.python_support import PY3, b, bin_type + +__all__ = ("FieldTest", ) -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'document/mongoengine.png') class FieldTest(unittest.TestCase): @@ -1728,302 +1725,6 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() - def test_file_fields(self): - """Ensure that file fields can be written to and their data retrieved - """ - class PutFile(Document): - the_file = FileField() - - class StreamFile(Document): - the_file = FileField() - - class SetFile(Document): - the_file = FileField() - - text = b('Hello, World!') - more_text = b('Foo Bar') - content_type = 'text/plain' - - PutFile.drop_collection() - StreamFile.drop_collection() - SetFile.drop_collection() - - putfile = PutFile() - putfile.the_file.put(text, content_type=content_type) - putfile.save() - putfile.validate() - result = PutFile.objects.first() - self.assertTrue(putfile == result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.delete() # Remove file from GridFS - PutFile.objects.delete() - - # Ensure file-like objects are stored - putfile = PutFile() - putstring = StringIO() - putstring.write(text) - putstring.seek(0) - putfile.the_file.put(putstring, content_type=content_type) - putfile.save() - putfile.validate() - result = PutFile.objects.first() - self.assertTrue(putfile == result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.delete() - - streamfile = StreamFile() - streamfile.the_file.new_file(content_type=content_type) - streamfile.the_file.write(text) - streamfile.the_file.write(more_text) - streamfile.the_file.close() - streamfile.save() - streamfile.validate() - result = StreamFile.objects.first() - self.assertTrue(streamfile == result) - self.assertEqual(result.the_file.read(), text + more_text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) - result.the_file.delete() - - # Ensure deleted file returns None - self.assertTrue(result.the_file.read() == None) - - setfile = SetFile() - setfile.the_file = text - setfile.save() - setfile.validate() - result = SetFile.objects.first() - self.assertTrue(setfile == result) - self.assertEqual(result.the_file.read(), text) - - # Try replacing file with new one - result.the_file.replace(more_text) - result.save() - result.validate() - result = SetFile.objects.first() - self.assertTrue(setfile == result) - self.assertEqual(result.the_file.read(), more_text) - result.the_file.delete() - - PutFile.drop_collection() - StreamFile.drop_collection() - SetFile.drop_collection() - - # Make sure FileField is optional and not required - class DemoFile(Document): - the_file = FileField() - DemoFile.objects.create() - - def test_file_field_no_default(self): - - class GridDocument(Document): - the_file = FileField() - - GridDocument.drop_collection() - - with tempfile.TemporaryFile() as f: - f.write(b("Hello World!")) - f.flush() - - # Test without default - doc_a = GridDocument() - doc_a.save() - - doc_b = GridDocument.objects.with_id(doc_a.id) - doc_b.the_file.replace(f, filename='doc_b') - doc_b.save() - self.assertNotEqual(doc_b.the_file.grid_id, None) - - # Test it matches - doc_c = GridDocument.objects.with_id(doc_b.id) - self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) - - # Test with default - doc_d = GridDocument(the_file=b('')) - doc_d.save() - - doc_e = GridDocument.objects.with_id(doc_d.id) - self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) - - doc_e.the_file.replace(f, filename='doc_e') - doc_e.save() - - doc_f = GridDocument.objects.with_id(doc_e.id) - self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) - - db = GridDocument._get_db() - grid_fs = gridfs.GridFS(db) - self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) - - def test_file_uniqueness(self): - """Ensure that each instance of a FileField is unique - """ - class TestFile(Document): - name = StringField() - the_file = FileField() - - # First instance - test_file = TestFile() - test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!')) - test_file.save() - - # Second instance - test_file_dupe = TestFile() - data = test_file_dupe.the_file.read() # Should be None - - self.assertTrue(test_file.name != test_file_dupe.name) - self.assertTrue(test_file.the_file.read() != data) - - TestFile.drop_collection() - - def test_file_boolean(self): - """Ensure that a boolean test of a FileField indicates its presence - """ - class TestFile(Document): - the_file = FileField() - - test_file = TestFile() - self.assertFalse(bool(test_file.the_file)) - test_file.the_file = b('Hello, World!') - test_file.the_file.content_type = 'text/plain' - test_file.save() - self.assertTrue(bool(test_file.the_file)) - - TestFile.drop_collection() - - def test_file_cmp(self): - """Test comparing against other types""" - class TestFile(Document): - the_file = FileField() - - test_file = TestFile() - self.assertFalse(test_file.the_file in [{"test": 1}]) - - def test_image_field(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField() - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - - w, h = t.image.size - self.assertEqual(w, 371) - self.assertEqual(h, 76) - - t.image.delete() - - def test_image_field_resize(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(size=(185, 37)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - w, h = t.image.size - - self.assertEqual(w, 185) - self.assertEqual(h, 37) - - t.image.delete() - - def test_image_field_resize_force(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(size=(185, 37, True)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - w, h = t.image.size - - self.assertEqual(w, 185) - self.assertEqual(h, 37) - - t.image.delete() - - def test_image_field_thumbnail(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(thumbnail_size=(92, 18)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.thumbnail.format, 'PNG') - self.assertEqual(t.image.thumbnail.width, 92) - self.assertEqual(t.image.thumbnail.height, 18) - - t.image.delete() - - def test_file_multidb(self): - register_connection('test_files', 'test_files') - class TestFile(Document): - name = StringField() - the_file = FileField(db_alias="test_files", - collection_name="macumba") - - TestFile.drop_collection() - - # delete old filesystem - get_db("test_files").macumba.files.drop() - get_db("test_files").macumba.chunks.drop() - - # First instance - test_file = TestFile() - test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!'), - name="hello.txt") - test_file.save() - - data = get_db("test_files").macumba.files.find_one() - self.assertEqual(data.get('name'), 'hello.txt') - - test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), - b('Hello, World!')) - def test_geo_indexes(self): """Ensure that indexes are created automatically for GeoPointFields. """ diff --git a/tests/fields/file.py b/tests/fields/file.py new file mode 100644 index 0000000..17d9ec3 --- /dev/null +++ b/tests/fields/file.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +from __future__ import with_statement +import sys +sys.path[0:0] = [""] + +import datetime +import os +import unittest +import uuid +import tempfile + +from decimal import Decimal + +from bson import Binary, DBRef, ObjectId +import gridfs + +from nose.plugins.skip import SkipTest +from mongoengine import * +from mongoengine.connection import get_db +from mongoengine.base import _document_registry +from mongoengine.errors import NotRegistered +from mongoengine.python_support import PY3, b, StringIO, bin_type + +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') + + +class FileTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def tearDown(self): + self.db.drop_collection('fs.files') + self.db.drop_collection('fs.chunks') + + def test_file_field_optional(self): + # Make sure FileField is optional and not required + class DemoFile(Document): + the_file = FileField() + DemoFile.objects.create() + + def test_file_fields(self): + """Ensure that file fields can be written to and their data retrieved + """ + + class PutFile(Document): + the_file = FileField() + + PutFile.drop_collection() + + text = b('Hello, World!') + more_text = b('Foo Bar') + content_type = 'text/plain' + + putfile = PutFile() + putfile.the_file.put(text, content_type=content_type) + putfile.save() + putfile.validate() + result = PutFile.objects.first() + self.assertTrue(putfile == result) + self.assertEqual(result.the_file.read(), text) + self.assertEqual(result.the_file.content_type, content_type) + result.the_file.delete() # Remove file from GridFS + PutFile.objects.delete() + + # Ensure file-like objects are stored + PutFile.drop_collection() + + putfile = PutFile() + putstring = StringIO() + putstring.write(text) + putstring.seek(0) + putfile.the_file.put(putstring, content_type=content_type) + putfile.save() + putfile.validate() + result = PutFile.objects.first() + self.assertTrue(putfile == result) + self.assertEqual(result.the_file.read(), text) + self.assertEqual(result.the_file.content_type, content_type) + result.the_file.delete() + + def test_file_fields_stream(self): + """Ensure that file fields can be written to and their data retrieved + """ + class StreamFile(Document): + the_file = FileField() + + StreamFile.drop_collection() + + text = b('Hello, World!') + more_text = b('Foo Bar') + content_type = 'text/plain' + + streamfile = StreamFile() + streamfile.the_file.new_file(content_type=content_type) + streamfile.the_file.write(text) + streamfile.the_file.write(more_text) + streamfile.the_file.close() + streamfile.save() + streamfile.validate() + result = StreamFile.objects.first() + self.assertTrue(streamfile == result) + self.assertEqual(result.the_file.read(), text + more_text) + self.assertEqual(result.the_file.content_type, content_type) + result.the_file.seek(0) + self.assertEqual(result.the_file.tell(), 0) + self.assertEqual(result.the_file.read(len(text)), text) + self.assertEqual(result.the_file.tell(), len(text)) + self.assertEqual(result.the_file.read(len(more_text)), more_text) + self.assertEqual(result.the_file.tell(), len(text + more_text)) + result.the_file.delete() + + # Ensure deleted file returns None + self.assertTrue(result.the_file.read() == None) + + def test_file_fields_set(self): + + class SetFile(Document): + the_file = FileField() + + text = b('Hello, World!') + more_text = b('Foo Bar') + + SetFile.drop_collection() + + setfile = SetFile() + setfile.the_file = text + setfile.save() + + result = SetFile.objects.first() + self.assertTrue(setfile == result) + self.assertEqual(result.the_file.read(), text) + + # Try replacing file with new one + result.the_file.replace(more_text) + result.save() + result.validate() + result = SetFile.objects.first() + self.assertTrue(setfile == result) + self.assertEqual(result.the_file.read(), more_text) + result.the_file.delete() + + def test_file_field_no_default(self): + + class GridDocument(Document): + the_file = FileField() + + GridDocument.drop_collection() + + with tempfile.TemporaryFile() as f: + f.write(b("Hello World!")) + f.flush() + + # Test without default + doc_a = GridDocument() + doc_a.save() + + doc_b = GridDocument.objects.with_id(doc_a.id) + doc_b.the_file.replace(f, filename='doc_b') + doc_b.save() + self.assertNotEqual(doc_b.the_file.grid_id, None) + + # Test it matches + doc_c = GridDocument.objects.with_id(doc_b.id) + self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) + + # Test with default + doc_d = GridDocument(the_file=b('')) + doc_d.save() + + doc_e = GridDocument.objects.with_id(doc_d.id) + self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) + + doc_e.the_file.replace(f, filename='doc_e') + doc_e.save() + + doc_f = GridDocument.objects.with_id(doc_e.id) + self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) + + db = GridDocument._get_db() + grid_fs = gridfs.GridFS(db) + self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) + + def test_file_uniqueness(self): + """Ensure that each instance of a FileField is unique + """ + class TestFile(Document): + name = StringField() + the_file = FileField() + + # First instance + test_file = TestFile() + test_file.name = "Hello, World!" + test_file.the_file.put(b('Hello, World!')) + test_file.save() + + # Second instance + test_file_dupe = TestFile() + data = test_file_dupe.the_file.read() # Should be None + + self.assertTrue(test_file.name != test_file_dupe.name) + self.assertTrue(test_file.the_file.read() != data) + + TestFile.drop_collection() + + def test_file_saving(self): + """Ensure you can add meta data to file""" + + class Animal(Document): + genus = StringField() + family = StringField() + photo = FileField() + + Animal.drop_collection() + marmot = Animal(genus='Marmota', family='Sciuridae') + + marmot_photo = open(TEST_IMAGE_PATH, 'r') # Retrieve a photo from disk + marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') + marmot.photo.close() + marmot.save() + + marmot = Animal.objects.get() + self.assertEqual(marmot.photo.content_type, 'image/jpeg') + self.assertEqual(marmot.photo.foo, 'bar') + + def test_file_boolean(self): + """Ensure that a boolean test of a FileField indicates its presence + """ + class TestFile(Document): + the_file = FileField() + TestFile.drop_collection() + + test_file = TestFile() + self.assertFalse(bool(test_file.the_file)) + test_file.the_file.put(b('Hello, World!'), content_type='text/plain') + test_file.save() + self.assertTrue(bool(test_file.the_file)) + + test_file = TestFile.objects.first() + self.assertEqual(test_file.the_file.content_type, "text/plain") + + def test_file_cmp(self): + """Test comparing against other types""" + class TestFile(Document): + the_file = FileField() + + test_file = TestFile() + self.assertFalse(test_file.the_file in [{"test": 1}]) + + def test_image_field(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + + class TestImage(Document): + image = ImageField() + + TestImage.drop_collection() + + t = TestImage() + t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.save() + + t = TestImage.objects.first() + + self.assertEqual(t.image.format, 'PNG') + + w, h = t.image.size + self.assertEqual(w, 371) + self.assertEqual(h, 76) + + t.image.delete() + + def test_image_field_resize(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + + class TestImage(Document): + image = ImageField(size=(185, 37)) + + TestImage.drop_collection() + + t = TestImage() + t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.save() + + t = TestImage.objects.first() + + self.assertEqual(t.image.format, 'PNG') + w, h = t.image.size + + self.assertEqual(w, 185) + self.assertEqual(h, 37) + + t.image.delete() + + def test_image_field_resize_force(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + + class TestImage(Document): + image = ImageField(size=(185, 37, True)) + + TestImage.drop_collection() + + t = TestImage() + t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.save() + + t = TestImage.objects.first() + + self.assertEqual(t.image.format, 'PNG') + w, h = t.image.size + + self.assertEqual(w, 185) + self.assertEqual(h, 37) + + t.image.delete() + + def test_image_field_thumbnail(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + + class TestImage(Document): + image = ImageField(thumbnail_size=(92, 18)) + + TestImage.drop_collection() + + t = TestImage() + t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.save() + + t = TestImage.objects.first() + + self.assertEqual(t.image.thumbnail.format, 'PNG') + self.assertEqual(t.image.thumbnail.width, 92) + self.assertEqual(t.image.thumbnail.height, 18) + + t.image.delete() + + def test_file_multidb(self): + register_connection('test_files', 'test_files') + + class TestFile(Document): + name = StringField() + the_file = FileField(db_alias="test_files", + collection_name="macumba") + + TestFile.drop_collection() + + # delete old filesystem + get_db("test_files").macumba.files.drop() + get_db("test_files").macumba.chunks.drop() + + # First instance + test_file = TestFile() + test_file.name = "Hello, World!" + test_file.the_file.put(b('Hello, World!'), + name="hello.txt") + test_file.save() + + data = get_db("test_files").macumba.files.find_one() + self.assertEqual(data.get('name'), 'hello.txt') + + test_file = TestFile.objects.first() + self.assertEqual(test_file.the_file.read(), + b('Hello, World!')) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/document/mongoengine.png b/tests/fields/mongoengine.png similarity index 100% rename from tests/document/mongoengine.png rename to tests/fields/mongoengine.png From 286beca6c5798c0d5c25e7981a8a0c448743eb58 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 12:07:15 +0000 Subject: [PATCH 078/464] Added Marcelo Anton to authors #152 --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index b49ddab..4b3a297 100644 --- a/AUTHORS +++ b/AUTHORS @@ -131,4 +131,5 @@ that much better: * Jorge Bastida * Stefan Wójcik * Pete Campton - * Martyn Smith \ No newline at end of file + * Martyn Smith + * Marcelo Anton \ No newline at end of file From 0c2fb6807e425638cc92a92a0ef058932a9b1c05 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 12:14:32 +0000 Subject: [PATCH 079/464] Added Aleksey Porfirov to AUTHORS #151 --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 4b3a297..989fd68 100644 --- a/AUTHORS +++ b/AUTHORS @@ -132,4 +132,5 @@ that much better: * Stefan Wójcik * Pete Campton * Martyn Smith - * Marcelo Anton \ No newline at end of file + * Marcelo Anton + * Aleksey Porfirov \ No newline at end of file From bf74d7537cafc9f9baf15a87eb8c2d7e569dd5c9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 16:20:01 +0000 Subject: [PATCH 080/464] Fix Django timezone support - update field for callable #151 --- mongoengine/fields.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 1e1a5ce..f6c0311 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -300,6 +300,8 @@ class DateTimeField(BaseField): return value if isinstance(value, datetime.date): return datetime.datetime(value.year, value.month, value.day) + if callable(value): + return value() # Attempt to parse a datetime: # value = smart_str(value) From 3aff4610394e31c64ac2a3ed2f256fd1f3e90da6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 16:29:27 +0000 Subject: [PATCH 081/464] Fix test discovery --- tests/document/indexes.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 285d8c6..445cfe2 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -39,19 +39,19 @@ class IndexesTest(unittest.TestCase): continue self.db.drop_collection(collection) - def ztest_indexes_document(self, ): + def test_indexes_document(self): """Ensure that indexes are used when meta[indexes] is specified for Documents """ - self.index_test(Document) + self._index_test(Document) - def test_indexes_dynamic_document(self, ): + def test_indexes_dynamic_document(self): """Ensure that indexes are used when meta[indexes] is specified for Dynamic Documents """ - self.index_test(DynamicDocument) + self._index_test(DynamicDocument) - def index_test(self, InheritFrom): + def _index_test(self, InheritFrom): class BlogPost(InheritFrom): date = DateTimeField(db_field='addDate', default=datetime.now) @@ -78,19 +78,7 @@ class IndexesTest(unittest.TestCase): for expected in expected_specs: self.assertTrue(expected['fields'] in info) - def test_indexes_document_inheritance(self): - """Ensure that indexes are used when meta[indexes] is specified for - Documents - """ - self.index_test_inheritance(Document) - - def test_indexes_dynamic_document_inheritance(self): - """Ensure that indexes are used when meta[indexes] is specified for - Dynamic Documents - """ - self.index_test_inheritance(DynamicDocument) - - def index_test_inheritance(self, InheritFrom): + def _index_test_inheritance(self, InheritFrom): class BlogPost(InheritFrom): date = DateTimeField(db_field='addDate', default=datetime.now) @@ -137,6 +125,18 @@ class IndexesTest(unittest.TestCase): for expected in expected_specs: self.assertTrue(expected['fields'] in info) + def test_indexes_document_inheritance(self): + """Ensure that indexes are used when meta[indexes] is specified for + Documents + """ + self._index_test_inheritance(Document) + + def test_indexes_dynamic_document_inheritance(self): + """Ensure that indexes are used when meta[indexes] is specified for + Dynamic Documents + """ + self._index_test_inheritance(DynamicDocument) + def test_inherited_index(self): """Ensure index specs are inhertited correctly""" @@ -301,6 +301,7 @@ class IndexesTest(unittest.TestCase): meta = { 'indexes': ['name'], } + Person.drop_collection() Person(name="test", user_guid='123').save() From 1cdf71b647f31a8c7cf05b0256a9795038c8808d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 16:35:09 +0000 Subject: [PATCH 082/464] Simplified Q objects Removed QueryTreeTransformerVisitor (#98) (#171) --- docs/changelog.rst | 1 + mongoengine/queryset/visitor.py | 92 ++------------------------------- tests/queryset/visitor.py | 72 +++++++++++++++++--------- 3 files changed, 53 insertions(+), 112 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f934f5e..b9ab42c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -24,6 +24,7 @@ Changes in 0.8.X - Added support for multiple slices and made slicing chainable. (#170) (#190) (#191) - Fixed GridFSProxy __getattr__ behaviour (#196) - Fix Django timezone support (#151) +- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) Changes in 0.7.9 ================ diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 94d6a5e..8932a54 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -55,57 +55,6 @@ class SimplificationVisitor(QNodeVisitor): return combined_query -class QueryTreeTransformerVisitor(QNodeVisitor): - """Transforms the query tree in to a form that may be used with MongoDB. - """ - - def visit_combination(self, combination): - if combination.operation == combination.AND: - # MongoDB doesn't allow us to have too many $or operations in our - # queries, so the aim is to move the ORs up the tree to one - # 'master' $or. Firstly, we must find all the necessary parts (part - # of an AND combination or just standard Q object), and store them - # separately from the OR parts. - or_groups = [] - and_parts = [] - for node in combination.children: - if isinstance(node, QCombination): - if node.operation == node.OR: - # Any of the children in an $or component may cause - # the query to succeed - or_groups.append(node.children) - elif node.operation == node.AND: - and_parts.append(node) - elif isinstance(node, Q): - and_parts.append(node) - - # Now we combine the parts into a usable query. AND together all of - # the necessary parts. Then for each $or part, create a new query - # that ANDs the necessary part with the $or part. - clauses = [] - for or_group in product(*or_groups): - q_object = reduce(lambda a, b: a & b, and_parts, Q()) - q_object = reduce(lambda a, b: a & b, or_group, q_object) - clauses.append(q_object) - # Finally, $or the generated clauses in to one query. Each of the - # clauses is sufficient for the query to succeed. - return reduce(lambda a, b: a | b, clauses, Q()) - - if combination.operation == combination.OR: - children = [] - # Crush any nested ORs in to this combination as MongoDB doesn't - # support nested $or operations - for node in combination.children: - if (isinstance(node, QCombination) and - node.operation == combination.OR): - children += node.children - else: - children.append(node) - combination.children = children - - return combination - - class QueryCompilerVisitor(QNodeVisitor): """Compiles the nodes in a query tree to a PyMongo-compatible query dictionary. @@ -115,45 +64,14 @@ class QueryCompilerVisitor(QNodeVisitor): self.document = document def visit_combination(self, combination): + operator = "$and" if combination.operation == combination.OR: - return {'$or': combination.children} - elif combination.operation == combination.AND: - return self._mongo_query_conjunction(combination.children) - return combination + operator = "$or" + return {operator: combination.children} def visit_query(self, query): return transform.query(self.document, **query.query) - def _mongo_query_conjunction(self, queries): - """Merges Mongo query dicts - effectively &ing them together. - """ - combined_query = {} - for query in queries: - for field, ops in query.items(): - if field not in combined_query: - combined_query[field] = ops - else: - # The field is already present in the query the only way - # we can merge is if both the existing value and the new - # value are operation dicts, reject anything else - if (not isinstance(combined_query[field], dict) or - not isinstance(ops, dict)): - message = 'Conflicting values for ' + field - raise InvalidQueryError(message) - - current_ops = set(combined_query[field].keys()) - new_ops = set(ops.keys()) - # Make sure that the same operation isn't applied more than - # once to a single field - intersection = current_ops.intersection(new_ops) - if intersection: - msg = 'Duplicate query conditions: ' - raise InvalidQueryError(msg + ', '.join(intersection)) - - # Right! We've got two non-overlapping dicts of operations! - combined_query[field].update(copy.deepcopy(ops)) - return combined_query - class QNode(object): """Base class for nodes in query trees. @@ -164,7 +82,6 @@ class QNode(object): def to_query(self, document): query = self.accept(SimplificationVisitor()) - query = query.accept(QueryTreeTransformerVisitor()) query = query.accept(QueryCompilerVisitor(document)) return query @@ -205,7 +122,8 @@ class QCombination(QNode): # If the child is a combination of the same type, we can merge its # children directly into this combinations children if isinstance(node, QCombination) and node.operation == operation: - self.children += node.children + # self.children += node.children + self.children.append(node) else: self.children.append(node) diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 82a9913..4af39e8 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -115,29 +115,31 @@ class QTest(unittest.TestCase): x = IntField() y = BooleanField() + TestDoc.drop_collection() + query = (Q(x__gt=0) | Q(x__exists=False)) query &= Q(x__lt=100) - self.assertEqual(query.to_query(TestDoc), { - '$or': [ - {'x': {'$lt': 100, '$gt': 0}}, - {'x': {'$lt': 100, '$exists': False}}, - ] + self.assertEqual(query.to_query(TestDoc), {'$and': [ + {'$or': [{'x': {'$gt': 0}}, + {'x': {'$exists': False}}]}, + {'x': {'$lt': 100}}] }) q1 = (Q(x__gt=0) | Q(x__exists=False)) q2 = (Q(x__lt=100) | Q(y=True)) query = (q1 & q2).to_query(TestDoc) - self.assertEqual(['$or'], query.keys()) - conditions = [ - {'x': {'$lt': 100, '$gt': 0}}, - {'x': {'$lt': 100, '$exists': False}}, - {'x': {'$gt': 0}, 'y': True}, - {'x': {'$exists': False}, 'y': True}, - ] - self.assertEqual(len(conditions), len(query['$or'])) - for condition in conditions: - self.assertTrue(condition in query['$or']) + TestDoc(x=101).save() + TestDoc(x=10).save() + TestDoc(y=True).save() + + self.assertEqual(query, + {'$and': [ + {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, + {'$or': [{'x': {'$lt': 100}}, {'y': True}]} + ]}) + + self.assertEqual(2, TestDoc.objects(q1 & q2).count()) def test_or_and_or_combination(self): """Ensure that Q-objects handle ORing ANDed ORed components. :) @@ -146,20 +148,40 @@ class QTest(unittest.TestCase): x = IntField() y = BooleanField() + TestDoc.drop_collection() + TestDoc(x=-1, y=True).save() + TestDoc(x=101, y=True).save() + TestDoc(x=99, y=False).save() + TestDoc(x=101, y=False).save() + q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) query = (q1 | q2).to_query(TestDoc) - self.assertEqual(['$or'], query.keys()) - conditions = [ - {'x': {'$gt': 0}, 'y': True}, - {'x': {'$gt': 0}, 'y': {'$exists': False}}, - {'x': {'$lt': 100}, 'y':False}, - {'x': {'$lt': 100}, 'y': {'$exists': False}}, - ] - self.assertEqual(len(conditions), len(query['$or'])) - for condition in conditions: - self.assertTrue(condition in query['$or']) + self.assertEqual(query, + {'$or': [ + {'$and': [{'x': {'$gt': 0}}, + {'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, + {'$and': [{'x': {'$lt': 100}}, + {'$or': [{'y': False}, {'y': {'$exists': False}}]}]} + ]} + ) + + self.assertEqual(2, TestDoc.objects(q1 | q2).count()) + + def test_multiple_occurence_in_field(self): + class Test(Document): + name = StringField(max_length=40) + title = StringField(max_length=40) + + q1 = Q(name__contains='te') | Q(title__contains='te') + q2 = Q(name__contains='12') | Q(title__contains='12') + + q3 = q1 & q2 + + query = q3.to_query(Test) + self.assertEqual(query["$and"][0], q1.to_query(Test)) + self.assertEqual(query["$and"][1], q2.to_query(Test)) def test_q_clone(self): From b9e0f525262111433158db16307424086efb71e0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Dec 2012 17:09:10 +0000 Subject: [PATCH 083/464] FileFields now copyable (#198) --- docs/changelog.rst | 1 + mongoengine/fields.py | 14 +++++++++++--- tests/fields/file.py | 39 ++++++++++++++++++++++++++------------- 3 files changed, 38 insertions(+), 16 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b9ab42c..c649303 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,6 +25,7 @@ Changes in 0.8.X - Fixed GridFSProxy __getattr__ behaviour (#196) - Fix Django timezone support (#151) - Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) +- FileFields now copyable (#198) Changes in 0.7.9 ================ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f6c0311..5f11ae3 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -986,14 +986,22 @@ class GridFSProxy(object): self_dict['_fs'] = None return self_dict + def __copy__(self): + copied = GridFSProxy() + copied.__dict__.update(self.__getstate__()) + return copied + + def __deepcopy__(self, memo): + return self.__copy__() + def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): - return ((self.grid_id == other.grid_id) and - (self.collection_name == other.collection_name) and - (self.db_alias == other.db_alias)) + return ((self.grid_id == other.grid_id) and + (self.collection_name == other.collection_name) and + (self.db_alias == other.db_alias)) else: return False diff --git a/tests/fields/file.py b/tests/fields/file.py index 17d9ec3..a39dadb 100644 --- a/tests/fields/file.py +++ b/tests/fields/file.py @@ -3,23 +3,17 @@ from __future__ import with_statement import sys sys.path[0:0] = [""] -import datetime +import copy import os import unittest -import uuid import tempfile -from decimal import Decimal - -from bson import Binary, DBRef, ObjectId import gridfs from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db -from mongoengine.base import _document_registry -from mongoengine.errors import NotRegistered -from mongoengine.python_support import PY3, b, StringIO, bin_type +from mongoengine.python_support import PY3, b, StringIO TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') @@ -50,13 +44,12 @@ class FileTest(unittest.TestCase): PutFile.drop_collection() text = b('Hello, World!') - more_text = b('Foo Bar') content_type = 'text/plain' putfile = PutFile() putfile.the_file.put(text, content_type=content_type) putfile.save() - putfile.validate() + result = PutFile.objects.first() self.assertTrue(putfile == result) self.assertEqual(result.the_file.read(), text) @@ -73,7 +66,7 @@ class FileTest(unittest.TestCase): putstring.seek(0) putfile.the_file.put(putstring, content_type=content_type) putfile.save() - putfile.validate() + result = PutFile.objects.first() self.assertTrue(putfile == result) self.assertEqual(result.the_file.read(), text) @@ -98,7 +91,7 @@ class FileTest(unittest.TestCase): streamfile.the_file.write(more_text) streamfile.the_file.close() streamfile.save() - streamfile.validate() + result = StreamFile.objects.first() self.assertTrue(streamfile == result) self.assertEqual(result.the_file.read(), text + more_text) @@ -135,7 +128,7 @@ class FileTest(unittest.TestCase): # Try replacing file with new one result.the_file.replace(more_text) result.save() - result.validate() + result = SetFile.objects.first() self.assertTrue(setfile == result) self.assertEqual(result.the_file.read(), more_text) @@ -366,5 +359,25 @@ class FileTest(unittest.TestCase): self.assertEqual(test_file.the_file.read(), b('Hello, World!')) + def test_copyable(self): + class PutFile(Document): + the_file = FileField() + + PutFile.drop_collection() + + text = b('Hello, World!') + content_type = 'text/plain' + + putfile = PutFile() + putfile.the_file.put(text, content_type=content_type) + putfile.save() + + class TestFile(Document): + name = StringField() + + self.assertEqual(putfile, copy.copy(putfile)) + self.assertEqual(putfile, copy.deepcopy(putfile)) + + if __name__ == '__main__': unittest.main() From 09a5f5c8f33f036fa7c12caf614f799bdc4046c2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 3 Jan 2013 12:56:42 +0000 Subject: [PATCH 084/464] Added note for django and sites config issues --- docs/django.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/django.rst b/docs/django.rst index 144baab..a4f0560 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -10,6 +10,10 @@ In your **settings.py** file, ignore the standard database settings (unless you also plan to use the ORM in your project), and instead call :func:`~mongoengine.connect` somewhere in the settings module. +.. note :: If getting an ``ImproperlyConfigured: settings.DATABASES is + improperly configured`` error you may need to remove + ``django.contrib.sites`` from ``INSTALLED_APPS`` in settings.py. + Authentication ============== MongoEngine includes a Django authentication backend, which uses MongoDB. The From 9bbd8dbe624c385c68404834884f20b304b5b64f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 09:41:08 +0000 Subject: [PATCH 085/464] Querysets now return clones and are no longer edit in place Fixes #56 --- docs/changelog.rst | 1 + docs/upgrade.rst | 22 + mongoengine/connection.py | 6 +- mongoengine/queryset/queryset.py | 1182 ++++++++++++++++-------------- tests/queryset/queryset.py | 11 +- tests/queryset/visitor.py | 4 +- 6 files changed, 656 insertions(+), 570 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c649303..4fd3e14 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -26,6 +26,7 @@ Changes in 0.8.X - Fix Django timezone support (#151) - Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) - FileFields now copyable (#198) +- Querysets now return clones and are no longer edit in place (#56) Changes in 0.7.9 ================ diff --git a/docs/upgrade.rst b/docs/upgrade.rst index bf48527..9c6c9a9 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -56,6 +56,28 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: :: meta = {'allow_inheritance': True} +Querysets +~~~~~~~~~ + +Querysets now return clones and should no longer be considered editable in +place. This brings us in line with how Django's querysets work and removes a +long running gotcha. If you edit your querysets inplace you will have to +update your code like so: :: + + # Old code: + mammals = Animal.objects(type="mammal") + mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8 + [m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset + + # Update example a) assign queryset after a change: + mammals = Animal.objects(type="mammal") + carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so fitler can be applied + [m for m in carnivores] # This will return all carnivores + + # Update example b) chain the queryset: + mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals + [m for m in mammals] # This will return all carnivores + Indexes ------- diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 1ccbbe3..87308ba 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -28,8 +28,10 @@ def register_connection(alias, name, host='localhost', port=27017, :param name: the name of the specific database to use :param host: the host name of the :program:`mongod` instance to connect to :param port: the port that the :program:`mongod` instance is running on - :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ - :param read_preference: The read preference for the collection ** Added pymongo 2.1 + :param is_slave: whether the connection can act as a slave + ** Depreciated pymongo 2.0.1+ + :param read_preference: The read preference for the collection + ** Added pymongo 2.1 :param slaves: a list of aliases of slave connections; each of these must be a registered connection that has :attr:`is_slave` set to ``True`` :param username: username to authenticate with diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 3ea9f23..239975f 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -42,7 +42,6 @@ class QuerySet(object): providing :class:`~mongoengine.Document` objects as the results. """ __dereference = False - __none = False def __init__(self, document, collection): self._document = document @@ -60,6 +59,7 @@ class QuerySet(object): self._read_preference = None self._iter = False self._scalar = [] + self._none = False self._as_pymongo = False self._as_pymongo_coerce = False @@ -71,35 +71,9 @@ class QuerySet(object): self._cursor_obj = None self._limit = None self._skip = None + self._slice = None self._hint = -1 # Using -1 as None is a valid value for hint - def clone(self): - """Creates a copy of the current - :class:`~mongoengine.queryset.QuerySet` - - .. versionadded:: 0.5 - """ - c = self.__class__(self._document, self._collection_obj) - - copy_props = ('_initial_query', '_query_obj', '_where_clause', - '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_limit', '_skip', '_slave_okay', '_hint', - '_read_preference') - - for prop in copy_props: - val = getattr(self, prop) - setattr(c, prop, copy.deepcopy(val)) - - return c - - @property - def _query(self): - if self._mongo_query is None: - self._mongo_query = self._query_obj.to_query(self._document) - if self._class_check: - self._mongo_query.update(self._initial_query) - return self._mongo_query - def __call__(self, q_obj=None, class_check=True, slave_okay=False, read_preference=None, **query): """Filter the selected documents by calling the @@ -121,87 +95,94 @@ class QuerySet(object): if q_obj: # make sure proper query object is passed if not isinstance(q_obj, QNode): - raise InvalidQueryError('Not a query object: %s. Did you intend to use key=value?' % q_obj) + msg = ("Not a query object: %s. " + "Did you intend to use key=value?" % q_obj) + raise InvalidQueryError(msg) query &= q_obj - self._query_obj &= query - self._mongo_query = None - self._cursor_obj = None + + queryset = self.clone() + queryset._query_obj &= query + queryset._mongo_query = None + queryset._cursor_obj = None if read_preference is not None: - self.read_preference(read_preference) - self._class_check = class_check + queryset.read_preference(read_preference) + queryset._class_check = class_check + return queryset + + + def __iter__(self): + """Support iterator protocol""" + self.rewind() return self - def filter(self, *q_objs, **query): - """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` + def __len__(self): + return self.count() + + def __getitem__(self, key): + """Support skip and limit using getitem and slicing syntax. """ - return self.__call__(*q_objs, **query) + queryset = self.clone() + + # Slice provided + if isinstance(key, slice): + try: + queryset._cursor_obj = queryset._cursor[key] + queryset._slice = key + queryset._skip, queryset._limit = key.start, key.stop + except IndexError, err: + # PyMongo raises an error if key.start == key.stop, catch it, + # bin it, kill it. + start = key.start or 0 + if start >= 0 and key.stop >= 0 and key.step is None: + if start == key.stop: + queryset.limit(0) + queryset._skip = key.start + queryset._limit = key.stop - start + return queryset + raise err + # Allow further QuerySet modifications to be performed + return queryset + # Integer index provided + elif isinstance(key, int): + if queryset._scalar: + return queryset._get_scalar( + queryset._document._from_son(queryset._cursor[key])) + if queryset._as_pymongo: + return queryset._get_as_pymongo(queryset._cursor.next()) + return queryset._document._from_son(queryset._cursor[key]) + raise AttributeError + + def __repr__(self): + """Provides the string representation of the QuerySet + + .. versionchanged:: 0.6.13 Now doesnt modify the cursor + """ + + if self._iter: + return '.. queryset mid-iteration ..' + + data = [] + for i in xrange(REPR_OUTPUT_SIZE + 1): + try: + data.append(self.next()) + except StopIteration: + break + if len(data) > REPR_OUTPUT_SIZE: + data[-1] = "...(remaining elements truncated)..." + + self.rewind() + return repr(data) + + # Core functions def all(self): """Returns all documents.""" return self.__call__() - def ensure_index(self, **kwargs): - """Deprecated use :func:`~Document.ensure_index`""" - msg = ("Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ("Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() - - @property - def _collection(self): - """Property that returns the collection object. This allows us to - perform operations only if the collection is accessed. + def filter(self, *q_objs, **query): + """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` """ - return self._collection_obj - - @property - def _cursor_args(self): - cursor_args = { - 'snapshot': self._snapshot, - 'timeout': self._timeout, - 'slave_okay': self._slave_okay, - } - if self._read_preference is not None: - cursor_args['read_preference'] = self._read_preference - if self._loaded_fields: - cursor_args['fields'] = self._loaded_fields.as_dict() - return cursor_args - - @property - def _cursor(self): - if self._cursor_obj is None: - - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) - # Apply where clauses to cursor - if self._where_clause: - self._cursor_obj.where(self._where_clause) - - if self._ordering: - # Apply query ordering - self._cursor_obj.sort(self._ordering) - elif self._document._meta['ordering']: - # Otherwise, apply the ordering from the document model - self.order_by(*self._document._meta['ordering']) - self._cursor_obj.sort(self._ordering) - - if self._limit is not None: - self._cursor_obj.limit(self._limit - (self._skip or 0)) - - if self._skip is not None: - self._cursor_obj.skip(self._skip) - - if self._hint != -1: - self._cursor_obj.hint(self._hint) - return self._cursor_obj + return self.__call__(*q_objs, **query) def get(self, *q_objs, **query): """Retrieve the the matching object raising @@ -212,22 +193,29 @@ class QuerySet(object): .. versionadded:: 0.3 """ - self.limit(2) - self.__call__(*q_objs, **query) + queryset = self.__call__(*q_objs, **query) + queryset = queryset.limit(2) try: - result = self.next() + result = queryset.next() except StopIteration: msg = ("%s matching query does not exist." - % self._document._class_name) - raise self._document.DoesNotExist(msg) + % queryset._document._class_name) + raise queryset._document.DoesNotExist(msg) try: - self.next() + queryset.next() except StopIteration: return result - self.rewind() - message = u'%d items returned, instead of 1' % self.count() - raise self._document.MultipleObjectsReturned(message) + queryset.rewind() + message = u'%d items returned, instead of 1' % queryset.count() + raise queryset._document.MultipleObjectsReturned(message) + + def create(self, **kwargs): + """Create new object. Returns the saved object instance. + + .. versionadded:: 0.4 + """ + return self._document(**kwargs).save() def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query): @@ -277,20 +265,12 @@ class QuerySet(object): doc.save(write_options=write_options) return doc, True - def create(self, **kwargs): - """Create new object. Returns the saved object instance. - - .. versionadded:: 0.4 - """ - doc = self._document(**kwargs) - doc.save() - return doc - def first(self): """Retrieve the first object matching the query. """ + queryset = self.clone() try: - result = self[0] + result = queryset[0] except IndexError: result = None return result @@ -367,6 +347,117 @@ class QuerySet(object): self._document, documents=results, loaded=True) return return_one and results[0] or results + def count(self): + """Count the selected elements in the query. + """ + if self._limit == 0: + return 0 + return self._cursor.count(with_limit_and_skip=True) + + def delete(self, safe=False): + """Delete the documents matched by the query. + + :param safe: check if the operation succeeded before returning + """ + queryset = self.clone() + doc = queryset._document + + # Handle deletes where skips or limits have been applied + if queryset._skip or queryset._limit: + for doc in queryset: + doc.delete() + return + + delete_rules = doc._meta.get('delete_rules') or {} + # Check for DENY rules before actually deleting/nullifying any other + # references + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == DENY and document_cls.objects( + **{field_name + '__in': self}).count() > 0: + msg = ("Could not delete document (%s.%s refers to it)" + % (document_cls.__name__, field_name)) + raise OperationError(msg) + + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == CASCADE: + ref_q = document_cls.objects(**{field_name + '__in': self}) + ref_q_count = ref_q.count() + if (doc != document_cls and ref_q_count > 0 + or (doc == document_cls and ref_q_count > 0)): + ref_q.delete(safe=safe) + elif rule == NULLIFY: + document_cls.objects(**{field_name + '__in': self}).update( + safe_update=safe, + **{'unset__%s' % field_name: 1}) + elif rule == PULL: + document_cls.objects(**{field_name + '__in': self}).update( + safe_update=safe, + **{'pull_all__%s' % field_name: self}) + + queryset._collection.remove(queryset._query, safe=safe) + + def update(self, safe_update=True, upsert=False, multi=True, + write_options=None, **update): + """Perform an atomic update on the fields matched by the query. When + ``safe_update`` is used, the number of affected documents is returned. + + :param safe_update: check if the operation succeeded before returning + :param upsert: Any existing document with that "_id" is overwritten. + :param write_options: extra keyword arguments for + :meth:`~pymongo.collection.Collection.update` + + .. versionadded:: 0.2 + """ + if not update: + raise OperationError("No update parameters, would remove data") + + if not write_options: + write_options = {} + + queryset = self.clone() + query = queryset._query + update = transform.update(queryset._document, **update) + + # If doing an atomic upsert on an inheritable class + # then ensure we add _cls to the update operation + if upsert and '_cls' in query: + if '$set' in update: + update["$set"]["_cls"] = queryset._document._class_name + else: + update["$set"] = {"_cls": queryset._document._class_name} + + try: + ret = queryset._collection.update(query, update, multi=multi, + upsert=upsert, safe=safe_update, + **write_options) + if ret is not None and 'n' in ret: + return ret['n'] + except pymongo.errors.OperationFailure, err: + if unicode(err) == u'multi not coded yet': + message = u'update() method requires MongoDB 1.1.3+' + raise OperationError(message) + raise OperationError(u'Update failed (%s)' % unicode(err)) + + def update_one(self, safe_update=True, upsert=False, write_options=None, + **update): + """Perform an atomic update on first field matched by the query. When + ``safe_update`` is used, the number of affected documents is returned. + + :param safe_update: check if the operation succeeded before returning + :param upsert: Any existing document with that "_id" is overwritten. + :param write_options: extra keyword arguments for + :meth:`~pymongo.collection.Collection.update` + :param update: Django-style update keyword arguments + + .. versionadded:: 0.2 + """ + return self.update(safe_update=True, upsert=upsert, multi=False, + write_options=None, **update) + def with_id(self, object_id): """Retrieve the object matching the id provided. Uses `object_id` only and raises InvalidQueryError if a filter has been applied. @@ -375,10 +466,11 @@ class QuerySet(object): .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set """ - if not self._query_obj.empty: + queryset = self.clone() + if not queryset._query_obj.empty: msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) - return self.filter(pk=object_id).first() + return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): """Retrieve a set of documents by their ids. @@ -406,139 +498,48 @@ class QuerySet(object): return doc_map - def next(self): - """Wrap the result in a :class:`~mongoengine.Document` object. - """ - self._iter = True - try: - if self._limit == 0 or self.__none: - raise StopIteration - if self._scalar: - return self._get_scalar(self._document._from_son( - self._cursor.next())) - if self._as_pymongo: - return self._get_as_pymongo(self._cursor.next()) - - return self._document._from_son(self._cursor.next()) - except StopIteration, e: - self.rewind() - raise e - - def rewind(self): - """Rewind the cursor to its unevaluated state. - - .. versionadded:: 0.3 - """ - self._iter = False - self._cursor.rewind() - def none(self): """Helper that just returns a list""" - self.__none = True - return self + queryset = self.clone() + queryset._none = True + return queryset - def count(self): - """Count the selected elements in the query. + def clone(self): + """Creates a copy of the current + :class:`~mongoengine.queryset.QuerySet` + + .. versionadded:: 0.5 """ - if self._limit == 0: - return 0 - return self._cursor.count(with_limit_and_skip=True) + c = self.__class__(self._document, self._collection_obj) - def __len__(self): - return self.count() + copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', + '_where_clause', '_loaded_fields', '_ordering', '_snapshot', + '_timeout', '_class_check', '_slave_okay', '_read_preference', + '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', + '_limit', '_skip', '_slice', '_hint') - def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None): - """Perform a map/reduce query using the current query spec - and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, - it must be the last call made, as it does not return a maleable - ``QuerySet``. + for prop in copy_props: + val = getattr(self, prop) + setattr(c, prop, copy.copy(val)) - See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` - and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` - tests in ``tests.queryset.QuerySetTest`` for usage examples. + if self._cursor_obj: + c._cursor_obj = self._cursor_obj.clone() - :param map_f: map function, as :class:`~bson.code.Code` or string - :param reduce_f: reduce function, as - :class:`~bson.code.Code` or string - :param output: output collection name, if set to 'inline' will try to - use :class:`~pymongo.collection.Collection.inline_map_reduce` - This can also be a dictionary containing output options - see: http://docs.mongodb.org/manual/reference/commands/#mapReduce - :param finalize_f: finalize function, an optional function that - performs any post-reduction processing. - :param scope: values to insert into map/reduce global scope. Optional. - :param limit: number of objects from current query to provide - to map/reduce method + if self._slice: + c._cursor_obj[self._slice] - Returns an iterator yielding - :class:`~mongoengine.document.MapReduceDocument`. + return c - .. note:: + def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to + a maximum depth in order to cut down the number queries to mongodb. - Map/Reduce changed in server version **>= 1.7.4**. The PyMongo - :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.11**. - - .. versionchanged:: 0.5 - - removed ``keep_temp`` keyword argument, which was only relevant - for MongoDB server versions older than 1.7.4 - - .. versionadded:: 0.3 + .. versionadded:: 0.5 """ - MapReduceDocument = _import_class('MapReduceDocument') - - if not hasattr(self._collection, "map_reduce"): - raise NotImplementedError("Requires MongoDB >= 1.7.1") - - map_f_scope = {} - if isinstance(map_f, Code): - map_f_scope = map_f.scope - map_f = unicode(map_f) - map_f = Code(self._sub_js_fields(map_f), map_f_scope) - - reduce_f_scope = {} - if isinstance(reduce_f, Code): - reduce_f_scope = reduce_f.scope - reduce_f = unicode(reduce_f) - reduce_f_code = self._sub_js_fields(reduce_f) - reduce_f = Code(reduce_f_code, reduce_f_scope) - - mr_args = {'query': self._query} - - if finalize_f: - finalize_f_scope = {} - if isinstance(finalize_f, Code): - finalize_f_scope = finalize_f.scope - finalize_f = unicode(finalize_f) - finalize_f_code = self._sub_js_fields(finalize_f) - finalize_f = Code(finalize_f_code, finalize_f_scope) - mr_args['finalize'] = finalize_f - - if scope: - mr_args['scope'] = scope - - if limit: - mr_args['limit'] = limit - - if output == 'inline' and not self._ordering: - map_reduce_function = 'inline_map_reduce' - else: - map_reduce_function = 'map_reduce' - mr_args['out'] = output - - results = getattr(self._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) - - if map_reduce_function == 'map_reduce': - results = results.find() - - if self._ordering: - results = results.sort(self._ordering) - - for doc in results: - yield MapReduceDocument(self._document, self._collection, - doc['_id'], doc['value']) + # Make select related work the same for querysets + max_depth += 1 + queryset = self.clone() + return queryset._dereference(queryset, max_depth=max_depth) def limit(self, n): """Limit the number of returned documents to `n`. This may also be @@ -546,14 +547,15 @@ class QuerySet(object): :param n: the maximum number of objects to return """ + queryset = self.clone() if n == 0: - self._cursor.limit(1) + queryset._cursor.limit(1) else: - self._cursor.limit(n) - self._limit = n + queryset._cursor.limit(n) + queryset._limit = n # Return self to allow chaining - return self + return queryset def skip(self, n): """Skip `n` documents before returning the results. This may also be @@ -561,9 +563,10 @@ class QuerySet(object): :param n: the number of objects to skip before returning results """ - self._cursor.skip(n) - self._skip = n - return self + queryset = self.clone() + queryset._cursor.skip(n) + queryset._skip = n + return queryset def hint(self, index=None): """Added 'hint' support, telling Mongo the proper index to use for the @@ -578,39 +581,10 @@ class QuerySet(object): .. versionadded:: 0.5 """ - self._cursor.hint(index) - self._hint = index - return self - - def __getitem__(self, key): - """Support skip and limit using getitem and slicing syntax. - """ - # Slice provided - if isinstance(key, slice): - try: - self._cursor_obj = self._cursor[key] - self._skip, self._limit = key.start, key.stop - except IndexError, err: - # PyMongo raises an error if key.start == key.stop, catch it, - # bin it, kill it. - start = key.start or 0 - if start >= 0 and key.stop >= 0 and key.step is None: - if start == key.stop: - self.limit(0) - self._skip, self._limit = key.start, key.stop - start - return self - raise err - # Allow further QuerySet modifications to be performed - return self - # Integer index provided - elif isinstance(key, int): - if self._scalar: - return self._get_scalar(self._document._from_son( - self._cursor[key])) - if self._as_pymongo: - return self._get_as_pymongo(self._cursor.next()) - return self._document._from_son(self._cursor[key]) - raise AttributeError + queryset = self.clone() + queryset._cursor.hint(index) + queryset._hint = index + return queryset def distinct(self, field): """Return a list of distinct values for a given field. @@ -621,8 +595,9 @@ class QuerySet(object): .. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.6 - Improved db_field refrence handling """ - return self._dereference(self._cursor.distinct(field), 1, - name=field, instance=self._document) + queryset = self.clone() + return queryset._dereference(queryset._cursor.distinct(field), 1, + name=field, instance=queryset._document) def only(self, *fields): """Load only a subset of this document's fields. :: @@ -679,11 +654,12 @@ class QuerySet(object): cleaned_fields.append((key, value)) fields = sorted(cleaned_fields, key=operator.itemgetter(1)) + queryset = self.clone() for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] - fields = self._fields_to_dbfields(fields) - self._loaded_fields += QueryFieldList(fields, value=value) - return self + fields = queryset._fields_to_dbfields(fields) + queryset._loaded_fields += QueryFieldList(fields, value=value) + return queryset def all_fields(self): """Include all fields. Reset all previously calls of .only() or @@ -693,18 +669,10 @@ class QuerySet(object): .. versionadded:: 0.5 """ - self._loaded_fields = QueryFieldList( - always_include=self._loaded_fields.always_include) - return self - - def _fields_to_dbfields(self, fields): - """Translate fields paths to its db equivalents""" - ret = [] - for field in fields: - field = ".".join(f.db_field for f in - self._document._lookup_field(field.split('.'))) - ret.append(field) - return ret + queryset = self.clone() + queryset._loaded_fields = QueryFieldList( + always_include=queryset._loaded_fields.always_include) + return queryset def order_by(self, *keys): """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The @@ -714,25 +682,9 @@ class QuerySet(object): :param keys: fields to order the query results by; keys may be prefixed with **+** or **-** to determine the ordering direction """ - key_list = [] - for key in keys: - if not key: - continue - direction = pymongo.ASCENDING - if key[0] == '-': - direction = pymongo.DESCENDING - if key[0] in ('-', '+'): - key = key[1:] - key = key.replace('__', '.') - try: - key = self._document._translate_field_name(key) - except: - pass - key_list.append((key, direction)) - - self._ordering = key_list - - return self + queryset = self.clone() + queryset._ordering = self._get_order_by(keys) + return queryset def explain(self, format=False): """Return an explain plan record for the @@ -740,7 +692,6 @@ class QuerySet(object): :param format: format the plan before returning it """ - plan = self._cursor.explain() if format: plan = pprint.pformat(plan) @@ -753,8 +704,9 @@ class QuerySet(object): ..versionchanged:: 0.5 - made chainable """ - self._snapshot = enabled - return self + queryset = self.clone() + queryset._snapshot = enabled + return queryset def timeout(self, enabled): """Enable or disable the default mongod timeout when querying. @@ -763,16 +715,18 @@ class QuerySet(object): ..versionchanged:: 0.5 - made chainable """ - self._timeout = enabled - return self + queryset = self.clone() + queryset._timeout = enabled + return queryset def slave_okay(self, enabled): """Enable or disable the slave_okay when querying. :param enabled: whether or not the slave_okay is enabled """ - self._slave_okay = enabled - return self + queryset = self.clone() + queryset._slave_okay = enabled + return queryset def read_preference(self, read_preference): """Change the read_preference when querying. @@ -781,170 +735,9 @@ class QuerySet(object): preference. """ validate_read_preference('read_preference', read_preference) - self._read_preference = read_preference - return self - - def delete(self, safe=False): - """Delete the documents matched by the query. - - :param safe: check if the operation succeeded before returning - """ - doc = self._document - - # Handle deletes where skips or limits have been applied - if self._skip or self._limit: - for doc in self: - doc.delete() - return - - delete_rules = doc._meta.get('delete_rules') or {} - # Check for DENY rules before actually deleting/nullifying any other - # references - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects( - **{field_name + '__in': self}).count() > 0: - msg = ("Could not delete document (%s.%s refers to it)" - % (document_cls.__name__, field_name)) - raise OperationError(msg) - - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == CASCADE: - ref_q = document_cls.objects(**{field_name + '__in': self}) - ref_q_count = ref_q.count() - if (doc != document_cls and ref_q_count > 0 - or (doc == document_cls and ref_q_count > 0)): - ref_q.delete(safe=safe) - elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update( - safe_update=safe, - **{'unset__%s' % field_name: 1}) - elif rule == PULL: - document_cls.objects(**{field_name + '__in': self}).update( - safe_update=safe, - **{'pull_all__%s' % field_name: self}) - - self._collection.remove(self._query, safe=safe) - - def update(self, safe_update=True, upsert=False, multi=True, - write_options=None, **update): - """Perform an atomic update on the fields matched by the query. When - ``safe_update`` is used, the number of affected documents is returned. - - :param safe_update: check if the operation succeeded before returning - :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for - :meth:`~pymongo.collection.Collection.update` - - .. versionadded:: 0.2 - """ - if not update: - raise OperationError("No update parameters, would remove data") - - if not write_options: - write_options = {} - - query = self._query - update = transform.update(self._document, **update) - - # If doing an atomic upsert on an inheritable class - # then ensure we add _cls to the update operation - if upsert and '_cls' in query: - if '$set' in update: - update["$set"]["_cls"] = self._document._class_name - else: - update["$set"] = {"_cls": self._document._class_name} - - try: - ret = self._collection.update(query, update, multi=multi, - upsert=upsert, safe=safe_update, - **write_options) - if ret is not None and 'n' in ret: - return ret['n'] - except pymongo.errors.OperationFailure, err: - if unicode(err) == u'multi not coded yet': - message = u'update() method requires MongoDB 1.1.3+' - raise OperationError(message) - raise OperationError(u'Update failed (%s)' % unicode(err)) - - def update_one(self, safe_update=True, upsert=False, write_options=None, - **update): - """Perform an atomic update on first field matched by the query. When - ``safe_update`` is used, the number of affected documents is returned. - - :param safe_update: check if the operation succeeded before returning - :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for - :meth:`~pymongo.collection.Collection.update` - :param update: Django-style update keyword arguments - - .. versionadded:: 0.2 - """ - return self.update(safe_update=True, upsert=upsert, multi=False, - write_options=None, **update) - - def __iter__(self): - self.rewind() - return self - - def _get_scalar(self, doc): - - def lookup(obj, name): - chunks = name.split('__') - for chunk in chunks: - obj = getattr(obj, chunk) - return obj - - data = [lookup(doc, n) for n in self._scalar] - if len(data) == 1: - return data[0] - - return tuple(data) - - def _get_as_pymongo(self, row): - # Extract which fields paths we should follow if .fields(...) was - # used. If not, handle all fields. - if not getattr(self, '__as_pymongo_fields', None): - self.__as_pymongo_fields = [] - for field in self._loaded_fields.fields - set(['_cls', '_id', '_types']): - self.__as_pymongo_fields.append(field) - while '.' in field: - field, _ = field.rsplit('.', 1) - self.__as_pymongo_fields.append(field) - - all_fields = not self.__as_pymongo_fields - - def clean(data, path=None): - path = path or '' - - if isinstance(data, dict): - new_data = {} - for key, value in data.iteritems(): - new_path = '%s.%s' % (path, key) if path else key - if all_fields or new_path in self.__as_pymongo_fields: - new_data[key] = clean(value, path=new_path) - data = new_data - elif isinstance(data, list): - data = [clean(d, path=path) for d in data] - else: - if self._as_pymongo_coerce: - # If we need to coerce types, we need to determine the - # type of this field and use the corresponding .to_python(...) - from mongoengine.fields import EmbeddedDocumentField - obj = self._document - for chunk in path.split('.'): - obj = getattr(obj, chunk, None) - if obj is None: - break - elif isinstance(obj, EmbeddedDocumentField): - obj = obj.document_type - if obj and data is not None: - data = obj.to_python(data) - return data - return clean(row) + queryset = self.clone() + queryset._read_preference = read_preference + return queryset def scalar(self, *fields): """Instead of returning Document instances, return either a specific @@ -955,14 +748,15 @@ class QuerySet(object): :param fields: One or more fields to return instead of a Document. """ - self._scalar = list(fields) + queryset = self.clone() + queryset._scalar = list(fields) if fields: - self.only(*fields) + queryset = queryset.only(*fields) else: - self.all_fields() + queryset = queryset.all_fields() - return self + return queryset def values_list(self, *fields): """An alias for scalar""" @@ -972,36 +766,122 @@ class QuerySet(object): """Instead of returning Document instances, return raw values from pymongo. - :param coerce_type: Field types (if applicable) would be use to coerce types. + :param coerce_type: Field types (if applicable) would be use to + coerce types. """ - self._as_pymongo = True - self._as_pymongo_coerce = coerce_types - return self + queryset = self.clone() + queryset._as_pymongo = True + queryset._as_pymongo_coerce = coerce_types + return queryset - def _sub_js_fields(self, code): - """When fields are specified with [~fieldname] syntax, where - *fieldname* is the Python name of a field, *fieldname* will be - substituted for the MongoDB name of the field (specified using the - :attr:`name` keyword argument in a field's constructor). + # JSON Helpers + + def to_json(self): + """Converts a queryset to JSON""" + queryset = self.clone() + return json_util.dumps(queryset._collection_obj.find(queryset._query)) + + def from_json(self, json_data): + """Converts json data to unsaved objects""" + son_data = json_util.loads(json_data) + return [self._document._from_son(data) for data in son_data] + + # JS functionality + + def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, + scope=None): + """Perform a map/reduce query using the current query spec + and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, + it must be the last call made, as it does not return a maleable + ``QuerySet``. + + See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` + and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` + tests in ``tests.queryset.QuerySetTest`` for usage examples. + + :param map_f: map function, as :class:`~bson.code.Code` or string + :param reduce_f: reduce function, as + :class:`~bson.code.Code` or string + :param output: output collection name, if set to 'inline' will try to + use :class:`~pymongo.collection.Collection.inline_map_reduce` + This can also be a dictionary containing output options + see: http://docs.mongodb.org/manual/reference/commands/#mapReduce + :param finalize_f: finalize function, an optional function that + performs any post-reduction processing. + :param scope: values to insert into map/reduce global scope. Optional. + :param limit: number of objects from current query to provide + to map/reduce method + + Returns an iterator yielding + :class:`~mongoengine.document.MapReduceDocument`. + + .. note:: + + Map/Reduce changed in server version **>= 1.7.4**. The PyMongo + :meth:`~pymongo.collection.Collection.map_reduce` helper requires + PyMongo version **>= 1.11**. + + .. versionchanged:: 0.5 + - removed ``keep_temp`` keyword argument, which was only relevant + for MongoDB server versions older than 1.7.4 + + .. versionadded:: 0.3 """ - def field_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field + queryset = self.clone() - def field_path_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return ".".join([f.db_field for f in fields]) + MapReduceDocument = _import_class('MapReduceDocument') - code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) - return code + if not hasattr(self._collection, "map_reduce"): + raise NotImplementedError("Requires MongoDB >= 1.7.1") + + map_f_scope = {} + if isinstance(map_f, Code): + map_f_scope = map_f.scope + map_f = unicode(map_f) + map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) + + reduce_f_scope = {} + if isinstance(reduce_f, Code): + reduce_f_scope = reduce_f.scope + reduce_f = unicode(reduce_f) + reduce_f_code = queryset._sub_js_fields(reduce_f) + reduce_f = Code(reduce_f_code, reduce_f_scope) + + mr_args = {'query': queryset._query} + + if finalize_f: + finalize_f_scope = {} + if isinstance(finalize_f, Code): + finalize_f_scope = finalize_f.scope + finalize_f = unicode(finalize_f) + finalize_f_code = queryset._sub_js_fields(finalize_f) + finalize_f = Code(finalize_f_code, finalize_f_scope) + mr_args['finalize'] = finalize_f + + if scope: + mr_args['scope'] = scope + + if limit: + mr_args['limit'] = limit + + if output == 'inline' and not queryset._ordering: + map_reduce_function = 'inline_map_reduce' + else: + map_reduce_function = 'map_reduce' + mr_args['out'] = output + + results = getattr(queryset._collection, map_reduce_function)( + map_f, reduce_f, **mr_args) + + if map_reduce_function == 'map_reduce': + results = results.find() + + if queryset._ordering: + results = results.sort(queryset._ordering) + + for doc in results: + yield MapReduceDocument(queryset._document, queryset._collection, + doc['_id'], doc['value']) def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be @@ -1025,24 +905,26 @@ class QuerySet(object): :param options: options that you want available to the function (accessed in Javascript through the ``options`` object) """ - code = self._sub_js_fields(code) + queryset = self.clone() - fields = [self._document._translate_field_name(f) for f in fields] - collection = self._document._get_collection_name() + code = queryset._sub_js_fields(code) + + fields = [queryset._document._translate_field_name(f) for f in fields] + collection = queryset._document._get_collection_name() scope = { 'collection': collection, 'options': options or {}, } - query = self._query - if self._where_clause: - query['$where'] = self._where_clause + query = queryset._query + if queryset._where_clause: + query['$where'] = queryset._where_clause scope['query'] = query code = Code(code, scope=scope) - db = self._document._get_db() + db = queryset._document._get_db() return db.eval(code, *fields) def where(self, where_clause): @@ -1056,9 +938,10 @@ class QuerySet(object): .. versionadded:: 0.5 """ - where_clause = self._sub_js_fields(where_clause) - self._where_clause = where_clause - return self + queryset = self.clone() + where_clause = queryset._sub_js_fields(where_clause) + queryset._where_clause = where_clause + return queryset def sum(self, field): """Sum over the values of the specified field. @@ -1157,6 +1040,101 @@ class QuerySet(object): normalize=normalize) return self._item_frequencies_exec_js(field, normalize=normalize) + # Iterator helpers + + def next(self): + """Wrap the result in a :class:`~mongoengine.Document` object. + """ + self._iter = True + try: + if self._limit == 0 or self._none: + raise StopIteration + if self._scalar: + return self._get_scalar(self._document._from_son( + self._cursor.next())) + if self._as_pymongo: + return self._get_as_pymongo(self._cursor.next()) + + return self._document._from_son(self._cursor.next()) + except StopIteration, e: + self.rewind() + raise e + + def rewind(self): + """Rewind the cursor to its unevaluated state. + + .. versionadded:: 0.3 + """ + self._iter = False + self._cursor.rewind() + + # Properties + + @property + def _collection(self): + """Property that returns the collection object. This allows us to + perform operations only if the collection is accessed. + """ + return self._collection_obj + + @property + def _cursor_args(self): + cursor_args = { + 'snapshot': self._snapshot, + 'timeout': self._timeout, + 'slave_okay': self._slave_okay, + } + if self._read_preference is not None: + cursor_args['read_preference'] = self._read_preference + if self._loaded_fields: + cursor_args['fields'] = self._loaded_fields.as_dict() + return cursor_args + + @property + def _cursor(self): + if self._cursor_obj is None: + + self._cursor_obj = self._collection.find(self._query, + **self._cursor_args) + # Apply where clauses to cursor + if self._where_clause: + where_clause = self._sub_js_fields(self._where_clause) + self._cursor_obj.where(where_clause) + + if self._ordering: + # Apply query ordering + self._cursor_obj.sort(self._ordering) + elif self._document._meta['ordering']: + # Otherwise, apply the ordering from the document model + order = self._get_order_by(self._document._meta['ordering']) + self._cursor_obj.sort(order) + + if self._limit is not None: + self._cursor_obj.limit(self._limit - (self._skip or 0)) + + if self._skip is not None: + self._cursor_obj.skip(self._skip) + + if self._hint != -1: + self._cursor_obj.hint(self._hint) + return self._cursor_obj + + @property + def _query(self): + if self._mongo_query is None: + self._mongo_query = self._query_obj.to_query(self._document) + if self._class_check: + self._mongo_query.update(self._initial_query) + return self._mongo_query + + @property + def _dereference(self): + if not self.__dereference: + self.__dereference = _import_class('DeReference')() + return self.__dereference + + # Helper Functions + def _item_frequencies_map_reduce(self, field, normalize=False): map_func = """ function() { @@ -1269,48 +1247,130 @@ class QuerySet(object): return frequencies - def __repr__(self): - """Provides the string representation of the QuerySet + def _fields_to_dbfields(self, fields): + """Translate fields paths to its db equivalents""" + ret = [] + for field in fields: + field = ".".join(f.db_field for f in + self._document._lookup_field(field.split('.'))) + ret.append(field) + return ret - .. versionchanged:: 0.6.13 Now doesnt modify the cursor + def _get_order_by(self, keys): + """Creates a list of order by fields """ - - if self._iter: - return '.. queryset mid-iteration ..' - - data = [] - for i in xrange(REPR_OUTPUT_SIZE + 1): + key_list = [] + for key in keys: + if not key: + continue + direction = pymongo.ASCENDING + if key[0] == '-': + direction = pymongo.DESCENDING + if key[0] in ('-', '+'): + key = key[1:] + key = key.replace('__', '.') try: - data.append(self.next()) - except StopIteration: - break - if len(data) > REPR_OUTPUT_SIZE: - data[-1] = "...(remaining elements truncated)..." + key = self._document._translate_field_name(key) + except: + pass + key_list.append((key, direction)) + return key_list - self.rewind() - return repr(data) + def _get_scalar(self, doc): - def select_related(self, max_depth=1): - """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to - a maximum depth in order to cut down the number queries to mongodb. + def lookup(obj, name): + chunks = name.split('__') + for chunk in chunks: + obj = getattr(obj, chunk) + return obj - .. versionadded:: 0.5 + data = [lookup(doc, n) for n in self._scalar] + if len(data) == 1: + return data[0] + + return tuple(data) + + def _get_as_pymongo(self, row): + # Extract which fields paths we should follow if .fields(...) was + # used. If not, handle all fields. + if not getattr(self, '__as_pymongo_fields', None): + self.__as_pymongo_fields = [] + for field in self._loaded_fields.fields - set(['_cls', '_id']): + self.__as_pymongo_fields.append(field) + while '.' in field: + field, _ = field.rsplit('.', 1) + self.__as_pymongo_fields.append(field) + + all_fields = not self.__as_pymongo_fields + + def clean(data, path=None): + path = path or '' + + if isinstance(data, dict): + new_data = {} + for key, value in data.iteritems(): + new_path = '%s.%s' % (path, key) if path else key + if all_fields or new_path in self.__as_pymongo_fields: + new_data[key] = clean(value, path=new_path) + data = new_data + elif isinstance(data, list): + data = [clean(d, path=path) for d in data] + else: + if self._as_pymongo_coerce: + # If we need to coerce types, we need to determine the + # type of this field and use the corresponding + # .to_python(...) + from mongoengine.fields import EmbeddedDocumentField + obj = self._document + for chunk in path.split('.'): + obj = getattr(obj, chunk, None) + if obj is None: + break + elif isinstance(obj, EmbeddedDocumentField): + obj = obj.document_type + if obj and data is not None: + data = obj.to_python(data) + return data + return clean(row) + + def _sub_js_fields(self, code): + """When fields are specified with [~fieldname] syntax, where + *fieldname* is the Python name of a field, *fieldname* will be + substituted for the MongoDB name of the field (specified using the + :attr:`name` keyword argument in a field's constructor). """ - # Make select related work the same for querysets - max_depth += 1 - return self._dereference(self, max_depth=max_depth) + def field_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return u'["%s"]' % fields[-1].db_field - def to_json(self): - """Converts a queryset to JSON""" - return json_util.dumps(self._collection_obj.find(self._query)) + def field_path_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return ".".join([f.db_field for f in fields]) - def from_json(self, json_data): - """Converts json data to unsaved objects""" - son_data = json_util.loads(json_data) - return [self._document._from_son(data) for data in son_data] + code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) + code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, + code) + return code - @property - def _dereference(self): - if not self.__dereference: - self.__dereference = _import_class('DeReference')() - return self.__dereference + # Deprecated + + def ensure_index(self, **kwargs): + """Deprecated use :func:`~Document.ensure_index`""" + msg = ("Doc.objects()._ensure_index() is deprecated. " + "Use Doc.ensure_index() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_index(**kwargs) + return self + + def _ensure_indexes(self): + """Deprecated use :func:`~Document.ensure_indexes`""" + msg = ("Doc.objects()._ensure_indexes() is deprecated. " + "Use Doc.ensure_indexes() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_indexes() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index bad3d36..bf64a56 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -713,19 +713,19 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(p._cursor_args, {'snapshot': False, 'slave_okay': False, 'timeout': True}) - p.snapshot(False).slave_okay(False).timeout(False) + p = p.snapshot(False).slave_okay(False).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': False, 'slave_okay': False, 'timeout': False}) - p.snapshot(True).slave_okay(False).timeout(False) + p = p.snapshot(True).slave_okay(False).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': False, 'timeout': False}) - p.snapshot(True).slave_okay(True).timeout(False) + p = p.snapshot(True).slave_okay(True).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': True, 'timeout': False}) - p.snapshot(True).slave_okay(True).timeout(True) + p = p.snapshot(True).slave_okay(True).timeout(True) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': True, 'timeout': True}) @@ -773,7 +773,8 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(docs), 1000) # Limit and skip - self.assertEqual('[, , ]', "%s" % docs[1:4]) + docs = docs[1:4] + self.assertEqual('[, , ]', "%s" % docs) self.assertEqual(docs.count(), 3) self.assertEqual(len(docs), 3) diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 4af39e8..98815db 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -202,8 +202,8 @@ class QTest(unittest.TestCase): self.assertEqual(test2.count(), 3) self.assertFalse(test2 == test) - test2.filter(x=6) - self.assertEqual(test2.count(), 1) + test3 = test2.filter(x=6) + self.assertEqual(test3.count(), 1) self.assertEqual(test.count(), 3) def test_q(self): From e537369d9885df724537974e9c44710110cfe9e7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 14:02:34 +0000 Subject: [PATCH 086/464] Trying to get travis to build the 0.8 branch --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c7cc271..806d8b4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,4 +26,4 @@ notifications: branches: only: - master - - 0.8 \ No newline at end of file + - "0.8" From e6ac8cab53a3269d2dee91d529f646d59f1b98c8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 14:28:42 +0000 Subject: [PATCH 087/464] Fixing python 2.5 support --- mongoengine/queryset/queryset.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 239975f..e637370 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -516,12 +516,15 @@ class QuerySet(object): '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_class_check', '_slave_okay', '_read_preference', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', - '_limit', '_skip', '_slice', '_hint') + '_limit', '_skip', '_hint') for prop in copy_props: val = getattr(self, prop) setattr(c, prop, copy.copy(val)) + if self._slice: + c._slice = self._slice + if self._cursor_obj: c._cursor_obj = self._cursor_obj.clone() From d9ed33d1b191f075a5370a7f9f199205496ea953 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 14:33:08 +0000 Subject: [PATCH 088/464] Added python 3.3 support to travis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 806d8b4..c5f3961 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,7 @@ python: - 2.7 - 3.1 - 3.2 + - 3.3 env: - PYMONGO=dev - PYMONGO=2.4.1 From 85173d188ba8038cf6713e94d25c43e53e49db1d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 15:57:08 +0000 Subject: [PATCH 089/464] Add simplejson to python 2.5 build --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index c5f3961..bf34bab 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,7 @@ install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.5' ]]; then pip install simplejson --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From 0f9e4ef352fa14b52ddc37b3bf9f17f03f8f0ab5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 16:27:58 +0000 Subject: [PATCH 090/464] Add mongoengine.png asset in the build --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 35ca579..4b42e71 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ if sys.version_info[0] == 3: extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") - extra_opts['package_data'] = {"tests": ["mongoengine.png"]} + extra_opts['package_data'] = {"tests": ["fields/mongoengine.png"]} else: extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] extra_opts['packages'] = find_packages(exclude=('tests',)) From 5c45eee817a680851f990e0ca0e1fc26c5cb6e9c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 4 Jan 2013 16:28:26 +0000 Subject: [PATCH 091/464] Whitespace --- tests/document/instance.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index 0054480..c8a1b11 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -654,8 +654,7 @@ class InstanceTest(unittest.TestCase): Foo.drop_collection() - a = Foo(name='hello') - a.save() + a = Foo(name='hello').save() a.bar = a with open(TEST_IMAGE_PATH, 'rb') as test_image: @@ -665,7 +664,7 @@ class InstanceTest(unittest.TestCase): # Confirm can save and it resets the changed fields without hitting # max recursion error b = Foo.objects.with_id(a.id) - b.name='world' + b.name = 'world' b.save() self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) From 7bb9c7d47f18cdee5c68eceb7cbe6792a962dbae Mon Sep 17 00:00:00 2001 From: Nick Joyce Date: Mon, 7 Jan 2013 14:49:39 +0000 Subject: [PATCH 092/464] Ensure that the update actions are grouped rather than serial. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is a performance update. When multiple properties of the same entity have been deleted and modified, 2 calls to update the entity are made, one {"$set": … } and another {"$unset": … }. This is 2 network interface calls which is a performance killer (even at lan speeds). Fixes: #210 --- mongoengine/document.py | 11 ++++++++--- tests/test_document.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 3 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b3afaf..2bd1d22 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -234,11 +234,16 @@ class Document(BaseDocument): select_dict[actual_key] = doc[actual_key] upsert = self._created + work = {} + if updates: - collection.update(select_dict, {"$set": updates}, - upsert=upsert, safe=safe, **write_options) + work["$set"] = updates + if removals: - collection.update(select_dict, {"$unset": removals}, + work["$unset"] = removals + + if work: + collection.update(select_dict, work, upsert=upsert, safe=safe, **write_options) warn_cascade = not cascade and 'cascade' not in self._meta diff --git a/tests/test_document.py b/tests/test_document.py index cd0ab8f..fa6eb28 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -3515,6 +3515,36 @@ class ValidatorErrorTest(unittest.TestCase): self.assertRaises(OperationError, change_shard_key) + def test_set_unset_one_operation(self): + """Ensure that $set and $unset actions are performed in the same + operation. + """ + class FooBar(Document): + meta = { + 'collection': 'foobar', + } + + foo = StringField(default=None) + bar = StringField(default=None) + + FooBar.drop_collection() + + # write an entity with a single prop + foo = FooBar(foo='foo') + foo.save() + + self.assertEqual(foo.foo, 'foo') + + # set foo to the default causing mongoengine to $unset foo. + foo.foo = None + foo.bar = 'bar' + + foo.save() + foo.reload() + + self.assertIsNone(foo.foo) + self.assertEqual(foo.bar, 'bar') + if __name__ == '__main__': unittest.main() From 50905ab459e729d2136a47c0e7be0c9d10daef14 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 9 Jan 2013 08:41:03 +0000 Subject: [PATCH 093/464] Test update --- tests/test_dereference.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 41f8aeb..8557ec5 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -198,6 +198,10 @@ class FieldTest(unittest.TestCase): raw_data = Group._get_collection().find_one() self.assertTrue(isinstance(raw_data['author'], DBRef)) self.assertTrue(isinstance(raw_data['members'][0], DBRef)) + group = Group.objects.first() + + self.assertEqual(group.author, user) + self.assertEqual(group.members, [user]) # Migrate the model definition class Group(Document): From 8e038dd5634d85a57ebb3560cd735aff0eaa44ed Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 9 Jan 2013 08:51:45 +0000 Subject: [PATCH 094/464] Updated travis python-3.1 no longer supported --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 806d8b4..550cc6c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,11 +2,11 @@ language: python services: mongodb python: - - 2.5 - - 2.6 - - 2.7 - - 3.1 - - 3.2 + - "2.5" + - "2.6" + - "2.7" + - "3.2" + - "3.3" env: - PYMONGO=dev - PYMONGO=2.4.1 From 06681a453fe18e3bf91912630a979672e489821b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 9 Jan 2013 13:15:21 +0000 Subject: [PATCH 095/464] python 3.3 test fixes --- tests/test_document.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/test_document.py b/tests/test_document.py index cd0ab8f..9fc79f5 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -924,7 +924,7 @@ class DocumentTest(unittest.TestCase): self.assertEqual(1, Person.objects.count()) info = Person.objects._collection.index_information() - self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) + self.assertEqual(sorted(info.keys()), ['_id_', '_types_1_name_1', '_types_1_user_guid_1']) Person.drop_collection() def test_disable_index_creation(self): @@ -968,7 +968,7 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - self.assertEqual(info.keys(), ['_types_1_date.yr_-1', '_id_']) + self.assertEqual(sorted(info.keys()), [ '_id_', '_types_1_date.yr_-1']) BlogPost.drop_collection() def test_list_embedded_document_index(self): @@ -991,7 +991,8 @@ class DocumentTest(unittest.TestCase): info = BlogPost.objects._collection.index_information() # we don't use _types in with list fields by default - self.assertEqual(info.keys(), ['_id_', '_types_1', 'tags.tag_1']) + self.assertEqual(sorted(info.keys()), + ['_id_', '_types_1', 'tags.tag_1']) post1 = BlogPost(title="Embedded Indexes tests in place", tags=[Tag(name="about"), Tag(name="time")] @@ -1008,7 +1009,7 @@ class DocumentTest(unittest.TestCase): recursive_obj = EmbeddedDocumentField(RecursiveObject) info = RecursiveDocument.objects._collection.index_information() - self.assertEqual(info.keys(), ['_id_', '_types_1']) + self.assertEqual(sorted(info.keys()), ['_id_', '_types_1']) def test_geo_indexes_recursion(self): @@ -2719,7 +2720,7 @@ class DocumentTest(unittest.TestCase): Person.drop_collection() - self.assertEqual(Person._fields.keys(), ['name', 'id']) + self.assertEqual(sorted(Person._fields.keys()), ['id', 'name']) Person(name="Rozza").save() From 72dd9daa23ea4e9adf44df6a3c3ab2499e971ad6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 9 Jan 2013 16:16:48 +0000 Subject: [PATCH 096/464] Fixing py3.3 tests --- mongoengine/queryset/transform.py | 2 +- tests/queryset/transform.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 8ee84ee..9fe8780 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -26,7 +26,7 @@ def query(_doc_cls=None, _field_operation=False, **query): """ mongo_query = {} merge_query = defaultdict(list) - for key, value in query.items(): + for key, value in sorted(query.items()): if key == "__raw__": mongo_query.update(value) continue diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 666b345..d38cbfd 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -26,7 +26,7 @@ class TransformTest(unittest.TestCase): self.assertEqual(transform.query(age__gt=20, age__lt=50), {'age': {'$gt': 20, '$lt': 50}}) self.assertEqual(transform.query(age=20, age__gt=50), - {'age': 20}) + {'$and': [{'age': {'$gt': 50}}, {'age': 20}]}) self.assertEqual(transform.query(friend__age__gte=30), {'friend.age': {'$gte': 30}}) self.assertEqual(transform.query(name__exists=True), From 87c965edd347138fd071da0cd5fa82a2377f7205 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 10 Jan 2013 11:08:07 +0000 Subject: [PATCH 097/464] Fixing PY3.3 test cases --- mongoengine/queryset.py | 12 +++++++++--- tests/test_document.py | 17 +++++++++++------ tests/test_queryset.py | 2 +- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 160201e..bff05fc 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -7,10 +7,11 @@ import operator from collections import defaultdict from functools import partial -from mongoengine.python_support import product, reduce +from mongoengine.python_support import product, reduce, PY3 import pymongo from bson.code import Code +from bson.son import SON from mongoengine import signals @@ -388,7 +389,12 @@ class QuerySet(object): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) if self._class_check: - self._mongo_query.update(self._initial_query) + if PY3: + query = SON(self._initial_query.items()) + query.update(self._mongo_query) + self._mongo_query = query + else: + self._mongo_query.update(self._initial_query) return self._mongo_query def ensure_index(self, key_or_list, drop_dups=False, background=False, @@ -704,7 +710,7 @@ class QuerySet(object): mongo_query = {} merge_query = defaultdict(list) - for key, value in query.items(): + for key, value in sorted(query.items()): if key == "__raw__": mongo_query.update(value) continue diff --git a/tests/test_document.py b/tests/test_document.py index 9fc79f5..3e8d813 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -8,6 +8,7 @@ import sys import unittest import uuid import warnings +import operator from nose.plugins.skip import SkipTest from datetime import datetime @@ -452,7 +453,8 @@ class DocumentTest(unittest.TestCase): info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) + self.assertEqual([[('_id', 1)], [('_types', 1), ('name', 1)]], + sorted(info, key=operator.itemgetter(0))) # Turn off inheritance class Animal(Document): @@ -473,7 +475,8 @@ class DocumentTest(unittest.TestCase): info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) + self.assertEqual([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], + sorted(info, key=operator.itemgetter(0))) info = collection.index_information() indexes_to_drop = [key for key, value in info.iteritems() if '_types' in dict(value['key'])] @@ -482,14 +485,16 @@ class DocumentTest(unittest.TestCase): info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)]], info) + self.assertEqual([[(u'_id', 1)]], + sorted(info, key=operator.itemgetter(0))) # Recreate indexes dog = Animal.objects.first() dog.save() info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEqual([[(u'_id', 1)], [(u'name', 1),]], info) + self.assertEqual([[(u'_id', 1)], [(u'name', 1),]], + sorted(info, key=operator.itemgetter(0))) Animal.drop_collection() @@ -3412,8 +3417,8 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() except ValidationError, e: - expected_error_message = """ValidationError(Field is required: ['username', 'name'])""" - self.assertEqual(e.message, expected_error_message) + expected_error_message = """ValidationError(Field is required""" + self.assertTrue(expected_error_message in e.message) self.assertEqual(e.to_dict(), { 'username': 'Field is required', 'name': 'Field is required'}) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index b09eafb..5234cea 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -47,7 +47,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50), {'age': {'$gt': 20, '$lt': 50}}) self.assertEqual(QuerySet._transform_query(age=20, age__gt=50), - {'age': 20}) + {'$and': [{'age': {'$gt': 50}}, {'age': 20}]}) self.assertEqual(QuerySet._transform_query(friend__age__gte=30), {'friend.age': {'$gte': 30}}) self.assertEqual(QuerySet._transform_query(name__exists=True), From e508625935ceee3c8becc387df7a5c396153dc1a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 14 Jan 2013 16:37:40 +0000 Subject: [PATCH 098/464] Update docs/upgrade.rst --- docs/upgrade.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 82ac7ca..901c251 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -183,3 +183,9 @@ Alternatively, you can rename your collections eg :: else: print "Upgraded collection names" + +mongodb 1.8 > 2.0 + +=================== + +Its been reported that indexes may need to be recreated to the newer version of indexes. +To do this drop indexes and call ``ensure_indexes`` on each model. From f5d02e1b1015856d013e73d01c7035359f0e9edb Mon Sep 17 00:00:00 2001 From: Martin Alderete Date: Tue, 15 Jan 2013 02:40:15 -0300 Subject: [PATCH 099/464] Fixed issue with choices validation when they are simple list/tuple, after model.validate() did not get any error message. Added test to ensure that model.validate() set the correct error messages. --- AUTHORS | 1 + mongoengine/base.py | 2 +- tests/test_fields.py | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 82a1dfa..ac48d7b 100644 --- a/AUTHORS +++ b/AUTHORS @@ -128,3 +128,4 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida + * Martin Alderete https://github.com/malderete diff --git a/mongoengine/base.py b/mongoengine/base.py index 013afe7..f73af4c 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -252,7 +252,7 @@ class BaseField(object): elif value_to_check not in self.choices: msg = ('Value must be %s of %s' % (err_msg, unicode(self.choices))) - self.error() + self.error(msg) # check validation argument if self.validation is not None: diff --git a/tests/test_fields.py b/tests/test_fields.py index 28af1b2..fa7e396 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1706,6 +1706,41 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() + + def test_simple_choices_validation_invalid_value(self): + """Ensure that error messages are correct. + """ + SIZES = ('S', 'M', 'L', 'XL', 'XXL') + COLORS = (('R', 'Red'), ('B', 'Blue')) + SIZE_MESSAGE = u"Value must be one of ('S', 'M', 'L', 'XL', 'XXL')" + COLOR_MESSAGE = u"Value must be one of ['R', 'B']" + + class Shirt(Document): + size = StringField(max_length=3, choices=SIZES) + color = StringField(max_length=1, choices=COLORS) + + Shirt.drop_collection() + + shirt = Shirt() + shirt.validate() + + shirt.size = "S" + shirt.color = "R" + shirt.validate() + + shirt.size = "XS" + shirt.color = "G" + + try: + shirt.validate() + except ValidationError, error: + # get the validation rules + error_dict = error.to_dict() + self.assertEqual(error_dict['size'], SIZE_MESSAGE) + self.assertEqual(error_dict['color'], COLOR_MESSAGE) + + Shirt.drop_collection() + def test_file_fields(self): """Ensure that file fields can be written to and their data retrieved """ From 17eeeb75366ab4625ee5c1a11089aaf9c21fee18 Mon Sep 17 00:00:00 2001 From: lcya86 Date: Thu, 17 Jan 2013 15:31:27 +0800 Subject: [PATCH 100/464] Update mongoengine/django/sessions.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit added the "get_decoded" method to the MongoSession class --- mongoengine/django/sessions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 810b626..8a35e89 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -34,6 +34,9 @@ class MongoSession(Document): meta = {'collection': MONGOENGINE_SESSION_COLLECTION, 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, 'allow_inheritance': False} + + def get_decoded(self): + return SessionStore().decode(self.session_data) class SessionStore(SessionBase): From 2c7b12c022130727c25c3c0686a53a3762639f8e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 13:31:53 +0000 Subject: [PATCH 101/464] Added support for $maxDistance (#179) --- docs/changelog.rst | 1 + docs/guide/querying.rst | 2 ++ mongoengine/queryset/transform.py | 5 ++++- tests/queryset/queryset.py | 10 ++++++++++ 4 files changed, 17 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4fd3e14..3e8f782 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -27,6 +27,7 @@ Changes in 0.8.X - Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) - FileFields now copyable (#198) - Querysets now return clones and are no longer edit in place (#56) +- Added support for $maxDistance (#179) Changes in 0.7.9 ================ diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index d582943..40e36e3 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -92,6 +92,8 @@ may used with :class:`~mongoengine.GeoPointField`\ s: * ``within_polygon`` -- filter documents to those within a given polygon (e.g. [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). .. note:: Requires Mongo Server 2.0 +* ``max_distance`` -- can be added to your location queries to set a maximum +distance. Querying lists diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 9fe8780..5707cec 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -9,7 +9,8 @@ __all__ = ('query', 'update') COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not') GEO_OPERATORS = ('within_distance', 'within_spherical_distance', - 'within_box', 'within_polygon', 'near', 'near_sphere') + 'within_box', 'within_polygon', 'near', 'near_sphere', + 'max_distance') STRING_OPERATORS = ('contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact') @@ -97,6 +98,8 @@ def query(_doc_cls=None, _field_operation=False, **query): value = {'$nearSphere': value} elif op == 'within_box': value = {'$within': {'$box': value}} + elif op == "max_distance": + value = {'$maxDistance': value} else: raise NotImplementedError("Geo method '%s' has not " "been implemented" % op) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index bf64a56..b5b0b28 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2238,6 +2238,12 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event3, event1, event2]) + # find events within 10 degrees of san francisco + point = [37.7566023, -122.415579] + events = Event.objects(location__near=point, location__max_distance=10) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + # find events within 10 degrees of san francisco point_and_distance = [[37.7566023, -122.415579], 10] events = Event.objects(location__within_distance=point_and_distance) @@ -2317,6 +2323,10 @@ class QuerySetTest(unittest.TestCase): ); self.assertEqual(points.count(), 2) + points = Point.objects(location__near_sphere=[-122, 37.5], + location__max_distance=60 / earth_radius); + self.assertEqual(points.count(), 2) + # Finds both points, but orders the north point first because it's # closer to the reference point to the north. points = Point.objects(location__near_sphere=[-122, 38.5]) From 3ba58ebaae51a708ccf02bd3bd7f6e75e6ce3258 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 13:39:53 +0000 Subject: [PATCH 102/464] Added Nicolas Trippar to AUTHORS #179 --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 989fd68..7c2f8c8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -133,4 +133,5 @@ that much better: * Pete Campton * Martyn Smith * Marcelo Anton - * Aleksey Porfirov \ No newline at end of file + * Aleksey Porfirov + * Nicolas Trippar \ No newline at end of file From 344dc64df85cbcbcfccaad4cbce7351465170c43 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 14:05:06 +0000 Subject: [PATCH 103/464] Updated authors and changelog #163 --- AUTHORS | 3 ++- docs/changelog.rst | 1 + mongoengine/document.py | 7 +++---- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/AUTHORS b/AUTHORS index 7c2f8c8..aa7f833 100644 --- a/AUTHORS +++ b/AUTHORS @@ -134,4 +134,5 @@ that much better: * Martyn Smith * Marcelo Anton * Aleksey Porfirov - * Nicolas Trippar \ No newline at end of file + * Nicolas Trippar + * Manuel Hermann \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 3e8f782..1905a9d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,7 @@ Changes in 0.8.X - FileFields now copyable (#198) - Querysets now return clones and are no longer edit in place (#56) - Added support for $maxDistance (#179) +- Uses getlasterror to test created on updated saves (#163) Changes in 0.7.9 ================ diff --git a/mongoengine/document.py b/mongoengine/document.py index 03a838b..69d4d40 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -219,11 +219,11 @@ class Document(BaseDocument): doc = self.to_mongo() - find_delta = ('_id' not in doc or self._created or force_insert) + created = ('_id' not in doc or self._created or force_insert) try: collection = self.__class__.objects._collection - if find_delta: + if created: if force_insert: object_id = collection.insert(doc, safe=safe, **write_options) @@ -289,8 +289,7 @@ class Document(BaseDocument): self._changed_fields = [] self._created = False - signals.post_save.send(self.__class__, document=self, - created=find_delta) + signals.post_save.send(self.__class__, document=self, created=created) return self def cascade_save(self, warn_cascade=None, *args, **kwargs): From 692f00864d982a8c54f08bcda3712b43d9708751 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 15:16:58 +0000 Subject: [PATCH 104/464] Fixed inheritance and unique index creation (#140) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 79 +++++++++++++++++++++++---------- mongoengine/base/metaclasses.py | 5 +-- mongoengine/document.py | 17 ++----- tests/document/indexes.py | 9 ++-- 5 files changed, 65 insertions(+), 46 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1905a9d..cb0ac6c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -29,6 +29,7 @@ Changes in 0.8.X - Querysets now return clones and are no longer edit in place (#56) - Added support for $maxDistance (#179) - Uses getlasterror to test created on updated saves (#163) +- Fixed inheritance and unique index creation (#140) Changes in 0.7.9 ================ diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 93bde8e..9f40061 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -509,6 +509,34 @@ class BaseDocument(object): obj._created = False return obj + @classmethod + def _build_index_specs(cls, meta_indexes): + """Generate and merge the full index specs + """ + + geo_indices = cls._geo_indices() + unique_indices = cls._unique_with_indexes() + index_specs = [cls._build_index_spec(spec) + for spec in meta_indexes] + + def merge_index_specs(index_specs, indices): + if not indices: + return index_specs + + spec_fields = [v['fields'] + for k, v in enumerate(index_specs)] + # Merge unqiue_indexes with existing specs + for k, v in enumerate(indices): + if v['fields'] in spec_fields: + index_specs[spec_fields.index(v['fields'])].update(v) + else: + index_specs.append(v) + return index_specs + + index_specs = merge_index_specs(index_specs, geo_indices) + index_specs = merge_index_specs(index_specs, unique_indices) + return index_specs + @classmethod def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec. @@ -576,6 +604,7 @@ class BaseDocument(object): """ unique_indexes = [] for field_name, field in cls._fields.items(): + sparse = False # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True @@ -596,11 +625,14 @@ class BaseDocument(object): unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True + sparse = (not sparse and + parts[-1].name not in cls.__dict__) unique_fields += unique_with # Add the new index to the list - index = [("%s%s" % (namespace, f), pymongo.ASCENDING) + fields = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields] + index = {'fields': fields, 'unique': True, 'sparse': sparse} unique_indexes.append(index) # Grab any embedded document field unique indexes @@ -612,6 +644,29 @@ class BaseDocument(object): return unique_indexes + @classmethod + def _geo_indices(cls, inspected=None): + inspected = inspected or [] + geo_indices = [] + inspected.append(cls) + + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + GeoPointField = _import_class("GeoPointField") + + for field in cls._fields.values(): + if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): + continue + if hasattr(field, 'document_type'): + field_cls = field.document_type + if field_cls in inspected: + continue + if hasattr(field_cls, '_geo_indices'): + geo_indices += field_cls._geo_indices(inspected) + elif field._geo_index: + geo_indices.append({'fields': + [(field.db_field, pymongo.GEO2D)]}) + return geo_indices + @classmethod def _lookup_field(cls, parts): """Lookup a field based on its attribute and return a list containing @@ -671,28 +726,6 @@ class BaseDocument(object): parts = [f.db_field for f in cls._lookup_field(parts)] return '.'.join(parts) - @classmethod - def _geo_indices(cls, inspected=None): - inspected = inspected or [] - geo_indices = [] - inspected.append(cls) - - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") - GeoPointField = _import_class("GeoPointField") - - for field in cls._fields.values(): - if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): - continue - if hasattr(field, 'document_type'): - field_cls = field.document_type - if field_cls in inspected: - continue - if hasattr(field_cls, '_geo_indices'): - geo_indices += field_cls._geo_indices(inspected) - elif field._geo_index: - geo_indices.append(field) - return geo_indices - def __set_field_display(self): """Dynamically set the display value for a field with choices""" for attr_name, field in self._fields.items(): diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index af39e14..2b63bfa 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -329,10 +329,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta = new_class._meta # Set index specifications - meta['index_specs'] = [new_class._build_index_spec(spec) - for spec in meta['indexes']] - unique_indexes = new_class._unique_with_indexes() - new_class._meta['unique_indexes'] = unique_indexes + meta['index_specs'] = new_class._build_index_specs(meta['indexes']) # If collection is a callable - call it and set the value collection = meta.get('collection') diff --git a/mongoengine/document.py b/mongoengine/document.py index 69d4d40..fff7efa 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -105,7 +105,7 @@ class Document(BaseDocument): By default, _cls will be added to the start of every index (that doesn't contain a list) if allow_inheritance is True. This can be - disabled by either setting types to False on the specific index or + disabled by either setting cls to False on the specific index or by setting index_cls to False on the meta dictionary for the document. """ @@ -481,12 +481,6 @@ class Document(BaseDocument): first_field = fields[0][0] return first_field == '_cls' - # Ensure indexes created by uniqueness constraints - for index in cls._meta['unique_indexes']: - cls_indexed = cls_indexed or includes_cls(index) - collection.ensure_index(index, unique=True, background=background, - drop_dups=drop_dups, **index_opts) - # Ensure document-defined indexes are created if cls._meta['index_specs']: index_spec = cls._meta['index_specs'] @@ -496,7 +490,8 @@ class Document(BaseDocument): cls_indexed = cls_indexed or includes_cls(fields) opts = index_opts.copy() opts.update(spec) - collection.ensure_index(fields, background=background, **opts) + collection.ensure_index(fields, background=background, + drop_dups=drop_dups, **opts) # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls @@ -505,12 +500,6 @@ class Document(BaseDocument): collection.ensure_index('_cls', background=background, **index_opts) - # Add geo indicies - for field in cls._geo_indices(): - index_spec = [(field.db_field, pymongo.GEO2D)] - collection.ensure_index(index_spec, background=background, - **index_opts) - class DynamicDocument(Document): """A Dynamic Document class allowing flexible, expandable and uncontrolled diff --git a/tests/document/indexes.py b/tests/document/indexes.py index cf25f61..fb278aa 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -259,13 +259,12 @@ class IndexesTest(unittest.TestCase): tags = ListField(StringField()) meta = { 'indexes': [ - {'fields': ['-date'], 'unique': True, - 'sparse': True, 'types': False}, + {'fields': ['-date'], 'unique': True, 'sparse': True}, ], } self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, - 'sparse': True, 'types': False}], + 'sparse': True}], BlogPost._meta['index_specs']) BlogPost.drop_collection() @@ -674,7 +673,7 @@ class IndexesTest(unittest.TestCase): User.drop_collection() - def test_types_index_with_pk(self): + def test_index_with_pk(self): """Ensure you can use `pk` as part of a query""" class Comment(EmbeddedDocument): @@ -687,7 +686,7 @@ class IndexesTest(unittest.TestCase): {'fields': ['pk', 'comments.comment_id'], 'unique': True}]} except UnboundLocalError: - self.fail('Unbound local error at types index + pk definition') + self.fail('Unbound local error at index + pk definition') info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] From 3364e040c83412444dd19c00f714d33c27a0e526 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 16:05:44 +0000 Subject: [PATCH 105/464] Ensure $maxDistance is always the last part of the query (#179) --- mongoengine/queryset/transform.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 5707cec..71f12e3 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -1,5 +1,7 @@ from collections import defaultdict +from bson import SON + from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError, LookUpError @@ -123,6 +125,16 @@ def query(_doc_cls=None, _field_operation=False, **query): elif key in mongo_query: if key in mongo_query and isinstance(mongo_query[key], dict): mongo_query[key].update(value) + # $maxDistance needs to come last - convert to SON + if '$maxDistance' in mongo_query[key]: + value_dict = mongo_query[key] + value_son = SON() + for k, v in value_dict.iteritems(): + if k == '$maxDistance': + continue + value_son[k] = v + value_son['$maxDistance'] = value_dict['$maxDistance'] + mongo_query[key] = value_son else: # Store for manually merging later merge_query[key].append(value) From 445f9453c4d305c394bb833dcd16ddce4b3ab2b8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 16:38:07 +0000 Subject: [PATCH 106/464] Fixed reverse delete rule with inheritance (#197) --- mongoengine/document.py | 17 +++++++++++++---- tests/document/instance.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index fff7efa..f40f1c9 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -7,7 +7,7 @@ from bson.dbref import DBRef from mongoengine import signals, queryset from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, - BaseDict, BaseList, ALLOW_INHERITANCE) + BaseDict, BaseList, ALLOW_INHERITANCE, get_document) from queryset import OperationError, NotUniqueError from connection import get_db, DEFAULT_CONNECTION_NAME @@ -421,9 +421,18 @@ class Document(BaseDocument): """This method registers the delete rules to apply when removing this object. """ - delete_rules = cls._meta.get('delete_rules') or {} - delete_rules[(document_cls, field_name)] = rule - cls._meta['delete_rules'] = delete_rules + classes = [get_document(class_name) + for class_name in cls._subclasses + if class_name != cls.__name__] + [cls] + documents = [get_document(class_name) + for class_name in document_cls._subclasses + if class_name != document_cls.__name__] + [document_cls] + + for cls in classes: + for document_cls in documents: + delete_rules = cls._meta.get('delete_rules') or {} + delete_rules[(document_cls, field_name)] = rule + cls._meta['delete_rules'] = delete_rules @classmethod def drop_collection(cls): diff --git a/tests/document/instance.py b/tests/document/instance.py index c8a1b11..07c4f0e 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1378,6 +1378,42 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(len(BlogPost.objects), 0) + def test_reverse_delete_rule_with_document_inheritance(self): + """Ensure that a referenced document is also deleted upon deletion + of a child document. + """ + + class Writer(self.Person): + pass + + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + reviewer = ReferenceField(self.Person, reverse_delete_rule=NULLIFY) + + self.Person.drop_collection() + BlogPost.drop_collection() + + author = Writer(name='Test User') + author.save() + + reviewer = Writer(name='Re Viewer') + reviewer.save() + + post = BlogPost(content='Watched some TV') + post.author = author + post.reviewer = reviewer + post.save() + + reviewer.delete() + self.assertEqual(len(BlogPost.objects), 1) + self.assertEqual(BlogPost.objects.get().reviewer, None) + + # Delete the Writer should lead to deletion of the BlogPost + author.delete() + self.assertEqual(len(BlogPost.objects), 0) + + def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon deletion for complex fields. From c44b98a7e15d1c82f54e5c90ccd17e8d834cb4df Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 17:54:35 +0000 Subject: [PATCH 107/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index cb0ac6c..c0757fb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,7 @@ Changes in 0.8.X - Added support for $maxDistance (#179) - Uses getlasterror to test created on updated saves (#163) - Fixed inheritance and unique index creation (#140) +- Fixed reverse delete rule with inheritance (#197) Changes in 0.7.9 ================ From 6d68ad735cfcb6f37b964612867f31ff188b622f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 22 Jan 2013 17:56:15 +0000 Subject: [PATCH 108/464] Fixed validation for GenericReferences Where the references haven't been dereferenced --- docs/changelog.rst | 1 + mongoengine/fields.py | 8 ++- tests/test_dereference.py | 105 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 112 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c0757fb..ba2c04c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,6 +31,7 @@ Changes in 0.8.X - Uses getlasterror to test created on updated saves (#163) - Fixed inheritance and unique index creation (#140) - Fixed reverse delete rule with inheritance (#197) +- Fixed validation for GenericReferences which havent been dereferenced Changes in 0.7.9 ================ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 5f11ae3..f781774 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -865,11 +865,15 @@ class GenericReferenceField(BaseField): return super(GenericReferenceField, self).__get__(instance, owner) def validate(self, value): - if not isinstance(value, (Document, DBRef)): + if not isinstance(value, (Document, DBRef, dict, SON)): self.error('GenericReferences can only contain documents') + if isinstance(value, (dict, SON)): + if '_ref' not in value or '_cls' not in value: + self.error('GenericReferences can only contain documents') + # We need the id from the saved object to create the DBRef - if isinstance(value, Document) and value.id is None: + elif isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been' ' saved to the database') diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 8557ec5..f42482d 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1,4 +1,7 @@ from __future__ import with_statement +import sys +sys.path[0:0] = [""] + import unittest from bson import DBRef, ObjectId @@ -1018,3 +1021,105 @@ class FieldTest(unittest.TestCase): msg = Message.objects.get(id=1) self.assertEqual(0, msg.comments[0].id) self.assertEqual(1, msg.comments[1].id) + + def test_list_item_dereference_dref_false_save_doesnt_cause_extra_queries(self): + """Ensure that DBRef items in ListFields are dereferenced. + """ + class User(Document): + name = StringField() + + class Group(Document): + name = StringField() + members = ListField(ReferenceField(User, dbref=False)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + Group(name="Test", members=User.objects).save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + group_obj.name = "new test" + group_obj.save() + + self.assertEqual(q, 2) + + def test_list_item_dereference_dref_true_save_doesnt_cause_extra_queries(self): + """Ensure that DBRef items in ListFields are dereferenced. + """ + class User(Document): + name = StringField() + + class Group(Document): + name = StringField() + members = ListField(ReferenceField(User, dbref=True)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + Group(name="Test", members=User.objects).save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + group_obj.name = "new test" + group_obj.save() + + self.assertEqual(q, 2) + + def test_generic_reference_save_doesnt_cause_extra_queries(self): + + class UserA(Document): + name = StringField() + + class UserB(Document): + name = StringField() + + class UserC(Document): + name = StringField() + + class Group(Document): + name = StringField() + members = ListField(GenericReferenceField()) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i).save() + b = UserB(name='User B %s' % i).save() + c = UserC(name='User C %s' % i).save() + + members += [a, b, c] + + Group(name="test", members=members).save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + group_obj.name = "new test" + group_obj.save() + + self.assertEqual(q, 2) + +if __name__ == '__main__': + unittest.main() From e5e88d792e457e17040d803ab8c0592b174989bb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 12:54:14 +0000 Subject: [PATCH 109/464] Added SwitchDB context manager (#106) --- docs/apireference.rst | 1 + docs/changelog.rst | 1 + docs/guide/connecting.rst | 18 ++++++++++++++++ mongoengine/connection.py | 43 ++++++++++++++++++++++++++++++++++++++- tests/test_connection.py | 23 +++++++++++++++++++++ 5 files changed, 85 insertions(+), 1 deletion(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index 0f8901a..69b1db0 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -7,6 +7,7 @@ Connecting .. autofunction:: mongoengine.connect .. autofunction:: mongoengine.register_connection +.. autoclass:: mongoengine.SwitchDB Documents ========= diff --git a/docs/changelog.rst b/docs/changelog.rst index ba2c04c..354d471 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -32,6 +32,7 @@ Changes in 0.8.X - Fixed inheritance and unique index creation (#140) - Fixed reverse delete rule with inheritance (#197) - Fixed validation for GenericReferences which havent been dereferenced +- Added SwitchDB context manager (#106) Changes in 0.7.9 ================ diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 657c46c..b39ccda 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -69,3 +69,21 @@ to point across databases and collections. Below is an example schema, using book = ReferenceField(Book) meta = {"db_alias": "users-books-db"} + + +Switch Database Context Manager +=============================== + +Sometimes you might want to switch the database to query against for a class. +The SwitchDB context manager allows you to change the database alias for a +class eg :: + + from mongoengine import SwitchDB + + class User(Document): + name = StringField() + + meta = {"db_alias": "user-db"} + + with SwitchDB(User, 'archive-user-db') as User: + User(name="Ross").save() # Saves the 'archive-user-db' diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 87308ba..b6c78e8 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -3,7 +3,7 @@ from pymongo import Connection, ReplicaSetConnection, uri_parser __all__ = ['ConnectionError', 'connect', 'register_connection', - 'DEFAULT_CONNECTION_NAME'] + 'DEFAULT_CONNECTION_NAME', 'SwitchDB'] DEFAULT_CONNECTION_NAME = 'default' @@ -163,6 +163,47 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): return get_connection(alias) + +class SwitchDB(object): + """ SwitchDB alias contextmanager. + + Example :: + # Register connections + register_connection('default', 'mongoenginetest') + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group(name="test").save() # Saves in the default db + + with SwitchDB(Group, 'testdb-1') as Group: + Group(name="hello testdb!").save() # Saves in testdb-1 + + """ + + def __init__(self, cls, db_alias): + """ Construct the query_counter. + + :param cls: the class to change the registered db + :param db_alias: the name of the specific database to use + """ + self.cls = cls + self.collection = cls._get_collection() + self.db_alias = db_alias + self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) + + def __enter__(self): + """ change the db_alias and clear the cached collection """ + self.cls._meta["db_alias"] = self.db_alias + self.cls._collection = None + return self.cls + + def __exit__(self, t, value, traceback): + """ Reset the db_alias and collection """ + self.cls._meta["db_alias"] = self.ori_db_alias + self.cls._collection = self.collection + # Support old naming convention _get_connection = get_connection _get_db = get_db diff --git a/tests/test_connection.py b/tests/test_connection.py index cd03df0..4931dc9 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -93,6 +93,29 @@ class ConnectionTest(unittest.TestCase): date_doc = DateDoc.objects.first() self.assertEqual(d, date_doc.the_date) + def test_switch_db_context_manager(self): + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group.drop_collection() + + Group(name="hello - default").save() + self.assertEqual(1, Group.objects.count()) + + with SwitchDB(Group, 'testdb-1') as Group: + + self.assertEqual(0, Group.objects.count()) + + Group(name="hello").save() + + self.assertEqual(1, Group.objects.count()) + + Group.drop_collection() + self.assertEqual(0, Group.objects.count()) + + self.assertEqual(1, Group.objects.count()) if __name__ == '__main__': unittest.main() From ea46edf50a712021d12249fd9e784bffbb505520 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 16:07:07 +0000 Subject: [PATCH 110/464] Added switch_db method to document instances (#106) --- docs/changelog.rst | 1 + mongoengine/connection.py | 1 + mongoengine/document.py | 46 +++++++++++++++++++++++++++++++---- tests/document/instance.py | 50 ++++++++++++++++++++++++++++++++++++++ tests/test_connection.py | 1 + 5 files changed, 94 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 354d471..65e1103 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -33,6 +33,7 @@ Changes in 0.8.X - Fixed reverse delete rule with inheritance (#197) - Fixed validation for GenericReferences which havent been dereferenced - Added SwitchDB context manager (#106) +- Added switch_db method to document instances (#106) Changes in 0.7.9 ================ diff --git a/mongoengine/connection.py b/mongoengine/connection.py index b6c78e8..80791e5 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -168,6 +168,7 @@ class SwitchDB(object): """ SwitchDB alias contextmanager. Example :: + # Register connections register_connection('default', 'mongoenginetest') register_connection('testdb-1', 'mongoenginetest2') diff --git a/mongoengine/document.py b/mongoengine/document.py index f40f1c9..3bc4cae 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -9,7 +9,7 @@ from mongoengine import signals, queryset from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, BaseDict, BaseList, ALLOW_INHERITANCE, get_document) from queryset import OperationError, NotUniqueError -from connection import get_db, DEFAULT_CONNECTION_NAME +from connection import get_db, DEFAULT_CONNECTION_NAME, SwitchDB __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', @@ -222,7 +222,7 @@ class Document(BaseDocument): created = ('_id' not in doc or self._created or force_insert) try: - collection = self.__class__.objects._collection + collection = self._get_collection() if created: if force_insert: object_id = collection.insert(doc, safe=safe, @@ -321,6 +321,16 @@ class Document(BaseDocument): ref.save(**kwargs) ref._changed_fields = [] + @property + def _qs(self): + """ + Returns the queryset to use for updating / reloading / deletions + """ + qs = self.__class__.objects + if hasattr(self, '_objects'): + qs = self._objects + return qs + @property def _object_key(self): """Dict to identify object in collection @@ -342,7 +352,7 @@ class Document(BaseDocument): raise OperationError('attempt to update a document not yet saved') # Need to add shard key to query, or you get an error - return self.__class__.objects(**self._object_key).update_one(**kwargs) + return self._qs.filter(**self._object_key).update_one(**kwargs) def delete(self, safe=False): """Delete the :class:`~mongoengine.Document` from the database. This @@ -353,13 +363,39 @@ class Document(BaseDocument): signals.pre_delete.send(self.__class__, document=self) try: - self.__class__.objects(**self._object_key).delete(safe=safe) + self._qs.filter(**self._object_key).delete(safe=safe) except pymongo.errors.OperationFailure, err: message = u'Could not delete document (%s)' % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self) + def switch_db(self, db_alias): + """ + Temporarily switch the database for a document instance. + + Only really useful for archiving off data and calling `save()`:: + + user = User.objects.get(id=user_id) + user.switch_db('archive-db') + user.save() + + If you need to read from another database see + :class:`~mongoengine.SwitchDB` + + :param db_alias: The database alias to use for saving the document + """ + with SwitchDB(self.__class__, db_alias) as cls: + collection = cls._get_collection() + db = cls._get_db + self._get_collection = lambda: collection + self._get_db = lambda: db + self._collection = collection + self._created = True + self._objects = self.__class__.objects + self._objects._collection_obj = collection + return self + def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to a maximum depth in order to cut down the number queries to mongodb. @@ -377,7 +413,7 @@ class Document(BaseDocument): .. versionchanged:: 0.6 Now chainable """ id_field = self._meta['id_field'] - obj = self.__class__.objects( + obj = self._qs.filter( **{id_field: self[id_field]} ).limit(1).select_related(max_depth=max_depth) if obj: diff --git a/tests/document/instance.py b/tests/document/instance.py index 07c4f0e..3b5a4bd 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -2114,6 +2114,56 @@ class ValidatorErrorTest(unittest.TestCase): self.assertEqual(classic_doc, dict_doc) self.assertEqual(classic_doc._data, dict_doc._data) + def test_switch_db_instance(self): + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group.drop_collection() + with SwitchDB(Group, 'testdb-1') as Group: + Group.drop_collection() + + Group(name="hello - default").save() + self.assertEqual(1, Group.objects.count()) + + group = Group.objects.first() + group.switch_db('testdb-1') + group.name = "hello - testdb!" + group.save() + + with SwitchDB(Group, 'testdb-1') as Group: + group = Group.objects.first() + self.assertEqual("hello - testdb!", group.name) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + + # Slightly contrived now - perform an update + # Only works as they have the same object_id + group.switch_db('testdb-1') + group.update(set__name="hello - update") + + with SwitchDB(Group, 'testdb-1') as Group: + group = Group.objects.first() + self.assertEqual("hello - update", group.name) + Group.drop_collection() + self.assertEqual(0, Group.objects.count()) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + + # Totally contrived now - perform a delete + # Only works as they have the same object_id + group.switch_db('testdb-1') + group.delete() + + with SwitchDB(Group, 'testdb-1') as Group: + self.assertEqual(0, Group.objects.count()) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_connection.py b/tests/test_connection.py index 4931dc9..7ff18a3 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -94,6 +94,7 @@ class ConnectionTest(unittest.TestCase): self.assertEqual(d, date_doc.the_date) def test_switch_db_context_manager(self): + connect('mongoenginetest') register_connection('testdb-1', 'mongoenginetest2') class Group(Document): From 4f70c27b5695216839bd92d42eadd04518d95e58 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 16:19:07 +0000 Subject: [PATCH 111/464] Updated doc string --- mongoengine/connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 80791e5..9f906a2 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -165,7 +165,7 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): class SwitchDB(object): - """ SwitchDB alias contextmanager. + """ SwitchDB alias context manager. Example :: @@ -184,7 +184,7 @@ class SwitchDB(object): """ def __init__(self, cls, db_alias): - """ Construct the query_counter. + """ Construct the SwitchDB context manager :param cls: the class to change the registered db :param db_alias: the name of the specific database to use From 3a6dc77d3630ca87033374ae8c094f5ee8c9cda2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 19:05:44 +0000 Subject: [PATCH 112/464] Added no_dereference context manager (#82) Reorganised the context_managers as well --- docs/apireference.rst | 8 +- docs/changelog.rst | 3 +- docs/guide/connecting.rst | 8 +- docs/guide/querying.rst | 18 +++- mongoengine/base/fields.py | 12 ++- mongoengine/common.py | 2 +- mongoengine/connection.py | 43 +-------- mongoengine/context_managers.py | 159 +++++++++++++++++++++++++++++++ mongoengine/document.py | 18 ++-- mongoengine/fields.py | 2 +- mongoengine/queryset/manager.py | 4 +- mongoengine/queryset/queryset.py | 1 - mongoengine/tests.py | 59 ------------ tests/queryset/queryset.py | 2 +- tests/test_connection.py | 4 +- tests/test_dereference.py | 74 +++++++++++++- 16 files changed, 289 insertions(+), 128 deletions(-) create mode 100644 mongoengine/context_managers.py delete mode 100644 mongoengine/tests.py diff --git a/docs/apireference.rst b/docs/apireference.rst index 69b1db0..049cc30 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -7,7 +7,6 @@ Connecting .. autofunction:: mongoengine.connect .. autofunction:: mongoengine.register_connection -.. autoclass:: mongoengine.SwitchDB Documents ========= @@ -35,6 +34,13 @@ Documents .. autoclass:: mongoengine.ValidationError :members: +Context Managers +================ + +.. autoclass:: mongoengine.context_managers.switch_db +.. autoclass:: mongoengine.context_managers.no_dereference +.. autoclass:: mongoengine.context_managers.query_counter + Querying ======== diff --git a/docs/changelog.rst b/docs/changelog.rst index 65e1103..bead693 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -32,8 +32,9 @@ Changes in 0.8.X - Fixed inheritance and unique index creation (#140) - Fixed reverse delete rule with inheritance (#197) - Fixed validation for GenericReferences which havent been dereferenced -- Added SwitchDB context manager (#106) +- Added switch_db context manager (#106) - Added switch_db method to document instances (#106) +- Added no_dereference context manager (#82) Changes in 0.7.9 ================ diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index b39ccda..ebd61a9 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -75,15 +75,15 @@ Switch Database Context Manager =============================== Sometimes you might want to switch the database to query against for a class. -The SwitchDB context manager allows you to change the database alias for a -class eg :: +The :class:`~mongoengine.context_managers.switch_db` context manager allows +you to change the database alias for a class eg :: - from mongoengine import SwitchDB + from mongoengine.context_managers import switch_db class User(Document): name = StringField() meta = {"db_alias": "user-db"} - with SwitchDB(User, 'archive-user-db') as User: + with switch_db(User, 'archive-user-db') as User: User(name="Ross").save() # Saves the 'archive-user-db' diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 40e36e3..7ccf143 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -93,7 +93,7 @@ may used with :class:`~mongoengine.GeoPointField`\ s: [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). .. note:: Requires Mongo Server 2.0 * ``max_distance`` -- can be added to your location queries to set a maximum -distance. + distance. Querying lists @@ -369,6 +369,22 @@ references to the depth of 1 level. If you have more complicated documents and want to dereference more of the object at once then increasing the :attr:`max_depth` will dereference more levels of the document. +Turning off dereferencing +------------------------- + +Sometimes for performance reasons you don't want to automatically dereference +data . To turn off all dereferencing you can use the +:class:`~mongoengine.context_managers.no_dereference` context manager:: + + with no_dereference(Post) as Post: + post = Post.objects.first() + assert(isinstance(post.author, ObjectId)) + +.. note:: + + :class:`~mongoengine.context_managers.no_dereference` only works on the + Default QuerySet manager. + Advanced queries ================ Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index a892fbd..82981e2 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -23,6 +23,7 @@ class BaseField(object): name = None _geo_index = False _auto_gen = False # Call `generate` to generate a value + _auto_dereference = True # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that MongoEngine implicitly @@ -163,9 +164,11 @@ class ComplexBaseField(BaseField): ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') - dereference = self.field is None or isinstance(self.field, - (GenericReferenceField, ReferenceField)) - if not self._dereference and instance._initialised and dereference: + dereference = (self._auto_dereference and + (self.field is None or isinstance(self.field, + (GenericReferenceField, ReferenceField)))) + + if not self.__dereference and instance._initialised and dereference: instance._data[self.name] = self._dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name @@ -182,7 +185,8 @@ class ComplexBaseField(BaseField): value = BaseDict(value, instance, self.name) instance._data[self.name] = value - if (instance._initialised and isinstance(value, (BaseList, BaseDict)) + if (self._auto_dereference and instance._initialised and + isinstance(value, (BaseList, BaseDict)) and not value._dereferenced): value = self._dereference( value, max_depth=1, instance=instance, name=self.name diff --git a/mongoengine/common.py b/mongoengine/common.py index a8422c0..718ac0b 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -11,7 +11,7 @@ def _import_class(cls_name): field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', 'FileField', 'GenericReferenceField', 'GenericEmbeddedDocumentField', 'GeoPointField', - 'ReferenceField', 'StringField') + 'ReferenceField', 'StringField', 'ComplexBaseField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 9f906a2..a47be44 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -3,7 +3,7 @@ from pymongo import Connection, ReplicaSetConnection, uri_parser __all__ = ['ConnectionError', 'connect', 'register_connection', - 'DEFAULT_CONNECTION_NAME', 'SwitchDB'] + 'DEFAULT_CONNECTION_NAME'] DEFAULT_CONNECTION_NAME = 'default' @@ -164,47 +164,6 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): return get_connection(alias) -class SwitchDB(object): - """ SwitchDB alias context manager. - - Example :: - - # Register connections - register_connection('default', 'mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') - - class Group(Document): - name = StringField() - - Group(name="test").save() # Saves in the default db - - with SwitchDB(Group, 'testdb-1') as Group: - Group(name="hello testdb!").save() # Saves in testdb-1 - - """ - - def __init__(self, cls, db_alias): - """ Construct the SwitchDB context manager - - :param cls: the class to change the registered db - :param db_alias: the name of the specific database to use - """ - self.cls = cls - self.collection = cls._get_collection() - self.db_alias = db_alias - self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) - - def __enter__(self): - """ change the db_alias and clear the cached collection """ - self.cls._meta["db_alias"] = self.db_alias - self.cls._collection = None - return self.cls - - def __exit__(self, t, value, traceback): - """ Reset the db_alias and collection """ - self.cls._meta["db_alias"] = self.ori_db_alias - self.cls._collection = self.collection - # Support old naming convention _get_connection = get_connection _get_db = get_db diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py new file mode 100644 index 0000000..7255d51 --- /dev/null +++ b/mongoengine/context_managers.py @@ -0,0 +1,159 @@ +from mongoengine.common import _import_class +from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db +from mongoengine.queryset import OperationError, QuerySet + +__all__ = ("switch_db", "no_dereference", "query_counter") + + +class switch_db(object): + """ switch_db alias context manager. + + Example :: + + # Register connections + register_connection('default', 'mongoenginetest') + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group(name="test").save() # Saves in the default db + + with switch_db(Group, 'testdb-1') as Group: + Group(name="hello testdb!").save() # Saves in testdb-1 + + """ + + def __init__(self, cls, db_alias): + """ Construct the switch_db context manager + + :param cls: the class to change the registered db + :param db_alias: the name of the specific database to use + """ + self.cls = cls + self.collection = cls._get_collection() + self.db_alias = db_alias + self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) + + def __enter__(self): + """ change the db_alias and clear the cached collection """ + self.cls._meta["db_alias"] = self.db_alias + self.cls._collection = None + return self.cls + + def __exit__(self, t, value, traceback): + """ Reset the db_alias and collection """ + self.cls._meta["db_alias"] = self.ori_db_alias + self.cls._collection = self.collection + + +class no_dereference(object): + """ no_dereference context manager. + + Turns off all dereferencing in Documents:: + + with no_dereference(Group) as Group: + Group.objects.find() + + """ + + def __init__(self, cls): + """ Construct the no_dereference context manager. + + :param cls: the class to turn dereferencing off on + """ + self.cls = cls + + ReferenceField = _import_class('ReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') + ComplexBaseField = _import_class('ComplexBaseField') + + self.deref_fields = [k for k, v in self.cls._fields.iteritems() + if isinstance(v, (ReferenceField, + GenericReferenceField, + ComplexBaseField))] + + def __enter__(self): + """ change the objects default and _auto_dereference values""" + if 'queryset_class' in self.cls._meta: + raise OperationError("no_dereference context manager only works on" + " default queryset classes") + objects = self.cls.__dict__['objects'] + objects.default = QuerySetNoDeRef + self.cls.objects = objects + for field in self.deref_fields: + self.cls._fields[field]._auto_dereference = False + return self.cls + + def __exit__(self, t, value, traceback): + """ Reset the default and _auto_dereference values""" + objects = self.cls.__dict__['objects'] + objects.default = QuerySet + self.cls.objects = objects + for field in self.deref_fields: + self.cls._fields[field]._auto_dereference = True + return self.cls + + +class QuerySetNoDeRef(QuerySet): + """Special no_dereference QuerySet""" + def __dereference(items, max_depth=1, instance=None, name=None): + return items + + +class query_counter(object): + """ Query_counter contextmanager to get the number of queries. """ + + def __init__(self): + """ Construct the query_counter. """ + self.counter = 0 + self.db = get_db() + + def __enter__(self): + """ On every with block we need to drop the profile collection. """ + self.db.set_profiling_level(0) + self.db.system.profile.drop() + self.db.set_profiling_level(2) + return self + + def __exit__(self, t, value, traceback): + """ Reset the profiling level. """ + self.db.set_profiling_level(0) + + def __eq__(self, value): + """ == Compare querycounter. """ + return value == self._get_count() + + def __ne__(self, value): + """ != Compare querycounter. """ + return not self.__eq__(value) + + def __lt__(self, value): + """ < Compare querycounter. """ + return self._get_count() < value + + def __le__(self, value): + """ <= Compare querycounter. """ + return self._get_count() <= value + + def __gt__(self, value): + """ > Compare querycounter. """ + return self._get_count() > value + + def __ge__(self, value): + """ >= Compare querycounter. """ + return self._get_count() >= value + + def __int__(self): + """ int representation. """ + return self._get_count() + + def __repr__(self): + """ repr query_counter as the number of queries. """ + return u"%s" % self._get_count() + + def _get_count(self): + """ Get the number of queries. """ + count = self.db.system.profile.find().count() - self.counter + self.counter += 1 + return count diff --git a/mongoengine/document.py b/mongoengine/document.py index 3bc4cae..9d4a1e6 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,15 +1,17 @@ +from __future__ import with_statement import warnings import pymongo import re from bson.dbref import DBRef -from mongoengine import signals, queryset - -from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, - BaseDict, BaseList, ALLOW_INHERITANCE, get_document) -from queryset import OperationError, NotUniqueError -from connection import get_db, DEFAULT_CONNECTION_NAME, SwitchDB +from mongoengine import signals +from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, + BaseDocument, BaseDict, BaseList, + ALLOW_INHERITANCE, get_document) +from mongoengine.queryset import OperationError, NotUniqueError +from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME +from mongoengine.context_managers import switch_db __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', @@ -381,11 +383,11 @@ class Document(BaseDocument): user.save() If you need to read from another database see - :class:`~mongoengine.SwitchDB` + :class:`~mongoengine.context_managers.switch_db` :param db_alias: The database alias to use for saving the document """ - with SwitchDB(self.__class__, db_alias) as cls: + with switch_db(self.__class__, db_alias) as cls: collection = cls._get_collection() db = cls._get_db self._get_collection = lambda: collection diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f781774..1ccdb65 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -779,7 +779,7 @@ class ReferenceField(BaseField): value = instance._data.get(self.name) # Dereference DBRefs - if isinstance(value, DBRef): + if self._auto_dereference and isinstance(value, DBRef): value = self.document_type._get_db().dereference(value) if value is not None: instance._data[self.name] = self.document_type._from_son(value) diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index d9f9992..47c2143 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -18,11 +18,11 @@ class QuerySetManager(object): """ get_queryset = None + default = QuerySet def __init__(self, queryset_func=None): if queryset_func: self.get_queryset = queryset_func - self._collections = {} def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when @@ -33,7 +33,7 @@ class QuerySetManager(object): return self # owner is the document that contains the QuerySetManager - queryset_class = owner._meta.get('queryset_class') or QuerySet + queryset_class = owner._meta.get('queryset_class', self.default) queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: arg_count = self.get_queryset.func_code.co_argcount diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index e637370..a9ff6e7 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -109,7 +109,6 @@ class QuerySet(object): queryset._class_check = class_check return queryset - def __iter__(self): """Support iterator protocol""" self.rewind() diff --git a/mongoengine/tests.py b/mongoengine/tests.py deleted file mode 100644 index 6866377..0000000 --- a/mongoengine/tests.py +++ /dev/null @@ -1,59 +0,0 @@ -from mongoengine.connection import get_db - - -class query_counter(object): - """ Query_counter contextmanager to get the number of queries. """ - - def __init__(self): - """ Construct the query_counter. """ - self.counter = 0 - self.db = get_db() - - def __enter__(self): - """ On every with block we need to drop the profile collection. """ - self.db.set_profiling_level(0) - self.db.system.profile.drop() - self.db.set_profiling_level(2) - return self - - def __exit__(self, t, value, traceback): - """ Reset the profiling level. """ - self.db.set_profiling_level(0) - - def __eq__(self, value): - """ == Compare querycounter. """ - return value == self._get_count() - - def __ne__(self, value): - """ != Compare querycounter. """ - return not self.__eq__(value) - - def __lt__(self, value): - """ < Compare querycounter. """ - return self._get_count() < value - - def __le__(self, value): - """ <= Compare querycounter. """ - return self._get_count() <= value - - def __gt__(self, value): - """ > Compare querycounter. """ - return self._get_count() > value - - def __ge__(self, value): - """ >= Compare querycounter. """ - return self._get_count() >= value - - def __int__(self): - """ int representation. """ - return self._get_count() - - def __repr__(self): - """ repr query_counter as the number of queries. """ - return u"%s" % self._get_count() - - def _get_count(self): - """ Get the number of queries. """ - count = self.db.system.profile.find().count() - self.counter - self.counter += 1 - return count diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index b5b0b28..3594044 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -17,7 +17,7 @@ from bson import ObjectId from mongoengine import * from mongoengine.connection import get_connection from mongoengine.python_support import PY3 -from mongoengine.tests import query_counter +from mongoengine.context_managers import query_counter from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, QueryFieldList, queryset_manager) diff --git a/tests/test_connection.py b/tests/test_connection.py index 7ff18a3..2a216fe 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,3 +1,4 @@ +from __future__ import with_statement import datetime import pymongo import unittest @@ -8,6 +9,7 @@ from bson.tz_util import utc from mongoengine import * from mongoengine.connection import get_db, get_connection, ConnectionError +from mongoengine.context_managers import switch_db class ConnectionTest(unittest.TestCase): @@ -105,7 +107,7 @@ class ConnectionTest(unittest.TestCase): Group(name="hello - default").save() self.assertEqual(1, Group.objects.count()) - with SwitchDB(Group, 'testdb-1') as Group: + with switch_db(Group, 'testdb-1') as Group: self.assertEqual(0, Group.objects.count()) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index f42482d..8e4ffdd 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -8,7 +8,7 @@ from bson import DBRef, ObjectId from mongoengine import * from mongoengine.connection import get_db -from mongoengine.tests import query_counter +from mongoengine.context_managers import query_counter, no_dereference class FieldTest(unittest.TestCase): @@ -1121,5 +1121,77 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) + def test_no_dereference_context_manager_object_id(self): + """Ensure that DBRef items in ListFields aren't dereferenced. + """ + class User(Document): + name = StringField() + + class Group(Document): + ref = ReferenceField(User, dbref=False) + generic = GenericReferenceField() + members = ListField(ReferenceField(User, dbref=False)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + user = User.objects.first() + Group(ref=user, members=User.objects, generic=user).save() + + with no_dereference(Group) as NoDeRefGroup: + self.assertTrue(Group._fields['members']._auto_dereference) + self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + + with no_dereference(Group) as Group: + group = Group.objects.first() + self.assertTrue(all([not isinstance(m, User) + for m in group.members])) + self.assertFalse(isinstance(group.ref, User)) + self.assertFalse(isinstance(group.generic, User)) + + self.assertTrue(all([isinstance(m, User) + for m in group.members])) + self.assertTrue(isinstance(group.ref, User)) + self.assertTrue(isinstance(group.generic, User)) + + def test_no_dereference_context_manager_dbref(self): + """Ensure that DBRef items in ListFields aren't dereferenced. + """ + class User(Document): + name = StringField() + + class Group(Document): + ref = ReferenceField(User, dbref=True) + generic = GenericReferenceField() + members = ListField(ReferenceField(User, dbref=True)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + user = User.objects.first() + Group(ref=user, members=User.objects, generic=user).save() + + with no_dereference(Group) as NoDeRefGroup: + self.assertTrue(Group._fields['members']._auto_dereference) + self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + + with no_dereference(Group) as Group: + group = Group.objects.first() + self.assertTrue(all([not isinstance(m, User) + for m in group.members])) + self.assertFalse(isinstance(group.ref, User)) + self.assertFalse(isinstance(group.generic, User)) + + self.assertTrue(all([isinstance(m, User) + for m in group.members])) + self.assertTrue(isinstance(group.ref, User)) + self.assertTrue(isinstance(group.generic, User)) + if __name__ == '__main__': unittest.main() From c8b65317effab9631b786e78f4592382a32c3658 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 20:15:05 +0000 Subject: [PATCH 113/464] Updated documentation instance tests --- tests/document/instance.py | 103 +++++++++++++++++++------------------ 1 file changed, 52 insertions(+), 51 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index 3b5a4bd..4c67046 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -18,11 +18,12 @@ from mongoengine.errors import (NotRegistered, InvalidDocumentError, from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db from mongoengine.base import get_document +from mongoengine.context_managers import switch_db TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), '../fields/mongoengine.png') -__all__ = ("InstanceTest",) +__all__ = ("InstanceTest", "ValidatorErrorTest") class InstanceTest(unittest.TestCase): @@ -1926,6 +1927,56 @@ class InstanceTest(unittest.TestCase): } )]), "1,2") + def test_switch_db_instance(self): + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group.drop_collection() + with switch_db(Group, 'testdb-1') as Group: + Group.drop_collection() + + Group(name="hello - default").save() + self.assertEqual(1, Group.objects.count()) + + group = Group.objects.first() + group.switch_db('testdb-1') + group.name = "hello - testdb!" + group.save() + + with switch_db(Group, 'testdb-1') as Group: + group = Group.objects.first() + self.assertEqual("hello - testdb!", group.name) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + + # Slightly contrived now - perform an update + # Only works as they have the same object_id + group.switch_db('testdb-1') + group.update(set__name="hello - update") + + with switch_db(Group, 'testdb-1') as Group: + group = Group.objects.first() + self.assertEqual("hello - update", group.name) + Group.drop_collection() + self.assertEqual(0, Group.objects.count()) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + + # Totally contrived now - perform a delete + # Only works as they have the same object_id + group.switch_db('testdb-1') + group.delete() + + with switch_db(Group, 'testdb-1') as Group: + self.assertEqual(0, Group.objects.count()) + + group = Group.objects.first() + self.assertEqual("hello - default", group.name) + class ValidatorErrorTest(unittest.TestCase): @@ -2114,56 +2165,6 @@ class ValidatorErrorTest(unittest.TestCase): self.assertEqual(classic_doc, dict_doc) self.assertEqual(classic_doc._data, dict_doc._data) - def test_switch_db_instance(self): - register_connection('testdb-1', 'mongoenginetest2') - - class Group(Document): - name = StringField() - - Group.drop_collection() - with SwitchDB(Group, 'testdb-1') as Group: - Group.drop_collection() - - Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) - - group = Group.objects.first() - group.switch_db('testdb-1') - group.name = "hello - testdb!" - group.save() - - with SwitchDB(Group, 'testdb-1') as Group: - group = Group.objects.first() - self.assertEqual("hello - testdb!", group.name) - - group = Group.objects.first() - self.assertEqual("hello - default", group.name) - - # Slightly contrived now - perform an update - # Only works as they have the same object_id - group.switch_db('testdb-1') - group.update(set__name="hello - update") - - with SwitchDB(Group, 'testdb-1') as Group: - group = Group.objects.first() - self.assertEqual("hello - update", group.name) - Group.drop_collection() - self.assertEqual(0, Group.objects.count()) - - group = Group.objects.first() - self.assertEqual("hello - default", group.name) - - # Totally contrived now - perform a delete - # Only works as they have the same object_id - group.switch_db('testdb-1') - group.delete() - - with SwitchDB(Group, 'testdb-1') as Group: - self.assertEqual(0, Group.objects.count()) - - group = Group.objects.first() - self.assertEqual("hello - default", group.name) - if __name__ == '__main__': unittest.main() From 9797d7a7fb9c21684f360d03b06800b99b8093c4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 21:19:21 +0000 Subject: [PATCH 114/464] Added switch_collection context manager and method (#220) --- mongoengine/context_managers.py | 45 +++++++++- mongoengine/document.py | 27 +++++- tests/test_connection.py | 24 ----- tests/test_context_managers.py | 153 ++++++++++++++++++++++++++++++++ tests/test_dereference.py | 71 --------------- 5 files changed, 223 insertions(+), 97 deletions(-) create mode 100644 tests/test_context_managers.py diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 7255d51..e73d4a2 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -2,7 +2,7 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.queryset import OperationError, QuerySet -__all__ = ("switch_db", "no_dereference", "query_counter") +__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter") class switch_db(object): @@ -47,6 +47,49 @@ class switch_db(object): self.cls._collection = self.collection +class switch_collection(object): + """ switch_collection alias context manager. + + Example :: + + class Group(Document): + name = StringField() + + Group(name="test").save() # Saves in the default db + + with switch_collection(Group, 'group1') as Group: + Group(name="hello testdb!").save() # Saves in group1 collection + + """ + + def __init__(self, cls, collection_name): + """ Construct the switch_collection context manager + + :param cls: the class to change the registered db + :param collection_name: the name of the collection to use + """ + self.cls = cls + self.ori_collection = cls._get_collection() + self.ori_get_collection_name = cls._get_collection_name + self.collection_name = collection_name + + def __enter__(self): + """ change the _get_collection_name and clear the cached collection """ + + @classmethod + def _get_collection_name(cls): + return self.collection_name + + self.cls._get_collection_name = _get_collection_name + self.cls._collection = None + return self.cls + + def __exit__(self, t, value, traceback): + """ Reset the collection """ + self.cls._collection = self.ori_collection + self.cls._get_collection_name = self.ori_get_collection_name + + class no_dereference(object): """ no_dereference context manager. diff --git a/mongoengine/document.py b/mongoengine/document.py index 9d4a1e6..75873b4 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -11,7 +11,7 @@ from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, ALLOW_INHERITANCE, get_document) from mongoengine.queryset import OperationError, NotUniqueError from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME -from mongoengine.context_managers import switch_db +from mongoengine.context_managers import switch_db, switch_collection __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', @@ -398,6 +398,31 @@ class Document(BaseDocument): self._objects._collection_obj = collection return self + def switch_collection(self, collection_name): + """ + Temporarily switch the collection for a document instance. + + Only really useful for archiving off data and calling `save()`:: + + user = User.objects.get(id=user_id) + user.switch_collection('old-users') + user.save() + + If you need to read from another database see + :class:`~mongoengine.context_managers.switch_collection` + + :param collection_name: The database alias to use for saving the + document + """ + with switch_collection(self.__class__, collection_name) as cls: + collection = cls._get_collection() + self._get_collection = lambda: collection + self._collection = collection + self._created = True + self._objects = self.__class__.objects + self._objects._collection_obj = collection + return self + def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to a maximum depth in order to cut down the number queries to mongodb. diff --git a/tests/test_connection.py b/tests/test_connection.py index 2a216fe..c32d231 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -95,30 +95,6 @@ class ConnectionTest(unittest.TestCase): date_doc = DateDoc.objects.first() self.assertEqual(d, date_doc.the_date) - def test_switch_db_context_manager(self): - connect('mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') - - class Group(Document): - name = StringField() - - Group.drop_collection() - - Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) - - with switch_db(Group, 'testdb-1') as Group: - - self.assertEqual(0, Group.objects.count()) - - Group(name="hello").save() - - self.assertEqual(1, Group.objects.count()) - - Group.drop_collection() - self.assertEqual(0, Group.objects.count()) - - self.assertEqual(1, Group.objects.count()) if __name__ == '__main__': unittest.main() diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py new file mode 100644 index 0000000..10fe7b8 --- /dev/null +++ b/tests/test_context_managers.py @@ -0,0 +1,153 @@ +from __future__ import with_statement +import unittest + +from mongoengine import * +from mongoengine.connection import get_db +from mongoengine.context_managers import (switch_db, switch_collection, + no_dereference, query_counter) + + +class ContextManagersTest(unittest.TestCase): + + def test_switch_db_context_manager(self): + connect('mongoenginetest') + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group.drop_collection() + + Group(name="hello - default").save() + self.assertEqual(1, Group.objects.count()) + + with switch_db(Group, 'testdb-1') as Group: + + self.assertEqual(0, Group.objects.count()) + + Group(name="hello").save() + + self.assertEqual(1, Group.objects.count()) + + Group.drop_collection() + self.assertEqual(0, Group.objects.count()) + + self.assertEqual(1, Group.objects.count()) + + def test_switch_collection_context_manager(self): + connect('mongoenginetest') + register_connection('testdb-1', 'mongoenginetest2') + + class Group(Document): + name = StringField() + + Group.drop_collection() + with switch_collection(Group, 'group1') as Group: + Group.drop_collection() + + Group(name="hello - group").save() + self.assertEqual(1, Group.objects.count()) + + with switch_collection(Group, 'group1') as Group: + + self.assertEqual(0, Group.objects.count()) + + Group(name="hello - group1").save() + + self.assertEqual(1, Group.objects.count()) + + Group.drop_collection() + self.assertEqual(0, Group.objects.count()) + + self.assertEqual(1, Group.objects.count()) + + def test_no_dereference_context_manager_object_id(self): + """Ensure that DBRef items in ListFields aren't dereferenced. + """ + connect('mongoenginetest') + + class User(Document): + name = StringField() + + class Group(Document): + ref = ReferenceField(User, dbref=False) + generic = GenericReferenceField() + members = ListField(ReferenceField(User, dbref=False)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + user = User.objects.first() + Group(ref=user, members=User.objects, generic=user).save() + + with no_dereference(Group) as NoDeRefGroup: + self.assertTrue(Group._fields['members']._auto_dereference) + self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + + with no_dereference(Group) as Group: + group = Group.objects.first() + self.assertTrue(all([not isinstance(m, User) + for m in group.members])) + self.assertFalse(isinstance(group.ref, User)) + self.assertFalse(isinstance(group.generic, User)) + + self.assertTrue(all([isinstance(m, User) + for m in group.members])) + self.assertTrue(isinstance(group.ref, User)) + self.assertTrue(isinstance(group.generic, User)) + + def test_no_dereference_context_manager_dbref(self): + """Ensure that DBRef items in ListFields aren't dereferenced. + """ + connect('mongoenginetest') + + class User(Document): + name = StringField() + + class Group(Document): + ref = ReferenceField(User, dbref=True) + generic = GenericReferenceField() + members = ListField(ReferenceField(User, dbref=True)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + User(name='user %s' % i).save() + + user = User.objects.first() + Group(ref=user, members=User.objects, generic=user).save() + + with no_dereference(Group) as NoDeRefGroup: + self.assertTrue(Group._fields['members']._auto_dereference) + self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + + with no_dereference(Group) as Group: + group = Group.objects.first() + self.assertTrue(all([not isinstance(m, User) + for m in group.members])) + self.assertFalse(isinstance(group.ref, User)) + self.assertFalse(isinstance(group.generic, User)) + + self.assertTrue(all([isinstance(m, User) + for m in group.members])) + self.assertTrue(isinstance(group.ref, User)) + self.assertTrue(isinstance(group.generic, User)) + + def test_query_counter(self): + connect('mongoenginetest') + db = get_db() + + with query_counter() as q: + self.assertEqual(0, q) + + for i in xrange(1, 51): + db.test.find({}).count() + + self.assertEqual(50, q) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 8e4ffdd..adbc519 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1121,77 +1121,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) - def test_no_dereference_context_manager_object_id(self): - """Ensure that DBRef items in ListFields aren't dereferenced. - """ - class User(Document): - name = StringField() - - class Group(Document): - ref = ReferenceField(User, dbref=False) - generic = GenericReferenceField() - members = ListField(ReferenceField(User, dbref=False)) - - User.drop_collection() - Group.drop_collection() - - for i in xrange(1, 51): - User(name='user %s' % i).save() - - user = User.objects.first() - Group(ref=user, members=User.objects, generic=user).save() - - with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) - - with no_dereference(Group) as Group: - group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) - for m in group.members])) - self.assertFalse(isinstance(group.ref, User)) - self.assertFalse(isinstance(group.generic, User)) - - self.assertTrue(all([isinstance(m, User) - for m in group.members])) - self.assertTrue(isinstance(group.ref, User)) - self.assertTrue(isinstance(group.generic, User)) - - def test_no_dereference_context_manager_dbref(self): - """Ensure that DBRef items in ListFields aren't dereferenced. - """ - class User(Document): - name = StringField() - - class Group(Document): - ref = ReferenceField(User, dbref=True) - generic = GenericReferenceField() - members = ListField(ReferenceField(User, dbref=True)) - - User.drop_collection() - Group.drop_collection() - - for i in xrange(1, 51): - User(name='user %s' % i).save() - - user = User.objects.first() - Group(ref=user, members=User.objects, generic=user).save() - - with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) - - with no_dereference(Group) as Group: - group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) - for m in group.members])) - self.assertFalse(isinstance(group.ref, User)) - self.assertFalse(isinstance(group.generic, User)) - - self.assertTrue(all([isinstance(m, User) - for m in group.members])) - self.assertTrue(isinstance(group.ref, User)) - self.assertTrue(isinstance(group.generic, User)) if __name__ == '__main__': unittest.main() From d58f594c173fa57bb0b16e77c18724ea63dbf536 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 23 Jan 2013 21:21:46 +0000 Subject: [PATCH 115/464] Updated changelog --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index bead693..53dbeb9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -35,6 +35,8 @@ Changes in 0.8.X - Added switch_db context manager (#106) - Added switch_db method to document instances (#106) - Added no_dereference context manager (#82) +- Added switch_collection context manager (#220) +- Added switch_collection method to document instances (#220) Changes in 0.7.9 ================ From fff27f9b8744a747eb4abd85061fb60bbff5071e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 10:37:54 +0000 Subject: [PATCH 116/464] Added support for compound primary keys (#149) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + docs/django.rst | 4 ++-- docs/guide/defining-documents.rst | 22 ++++++++++++-------- tests/document/indexes.py | 34 +++++++++++++++++++++++++++++++ 5 files changed, 52 insertions(+), 12 deletions(-) diff --git a/AUTHORS b/AUTHORS index aa7f833..3f80dca 100644 --- a/AUTHORS +++ b/AUTHORS @@ -135,4 +135,5 @@ that much better: * Marcelo Anton * Aleksey Porfirov * Nicolas Trippar - * Manuel Hermann \ No newline at end of file + * Manuel Hermann + * Gustavo Gawryszewski \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 53dbeb9..7486ae9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -37,6 +37,7 @@ Changes in 0.8.X - Added no_dereference context manager (#82) - Added switch_collection context manager (#220) - Added switch_collection method to document instances (#220) +- Added support for compound primary keys (#149) Changes in 0.7.9 ================ diff --git a/docs/django.rst b/docs/django.rst index a4f0560..ba93432 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -2,7 +2,7 @@ Using MongoEngine with Django ============================= -.. note :: Updated to support Django 1.4 +.. note:: Updated to support Django 1.4 Connecting ========== @@ -10,7 +10,7 @@ In your **settings.py** file, ignore the standard database settings (unless you also plan to use the ORM in your project), and instead call :func:`~mongoengine.connect` somewhere in the settings module. -.. note :: If getting an ``ImproperlyConfigured: settings.DATABASES is +.. note:: If getting an ``ImproperlyConfigured: settings.DATABASES is improperly configured`` error you may need to remove ``django.contrib.sites`` from ``INSTALLED_APPS`` in settings.py. diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 9abea9b..c698285 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -135,7 +135,8 @@ arguments can be set on all fields: field, will not have two documents in the collection with the same value. :attr:`primary_key` (Default: False) - When True, use this field as a primary key for the collection. + When True, use this field as a primary key for the collection. `DictField` + and `EmbeddedDocuments` both support being the primary key for a document. :attr:`choices` (Default: None) An iterable (e.g. a list or tuple) of choices to which the value of this @@ -441,6 +442,7 @@ The following example shows a :class:`Log` document that will be limited to Indexes ======= + You can specify indexes on collections to make querying faster. This is done by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may @@ -473,20 +475,22 @@ If a dictionary is passed then the following options are available: :attr:`unique` (Default: False) Whether the index should be unique. -.. note :: +.. note:: - To index embedded files / dictionary fields use 'dot' notation eg: - `rank.title` + Inheritance adds extra fields indices see: :ref:`document-inheritance`. -.. warning:: +Compound Indexes and Indexing sub documents +------------------------------------------- - Inheritance adds extra indices. - If don't need inheritance for a document turn inheritance off - - see :ref:`document-inheritance`. +Compound indexes can be created by adding the Embedded field or dictionary +field name to the index definition. +Sometimes its more efficient to index parts of Embeedded / dictionary fields, +in this case use 'dot' notation to identify the value to index eg: `rank.title` Geospatial indexes ---------------------------- +------------------ + Geospatial indexes will be automatically created for all :class:`~mongoengine.GeoPointField`\ s diff --git a/tests/document/indexes.py b/tests/document/indexes.py index fb278aa..c059590 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -693,5 +693,39 @@ class IndexesTest(unittest.TestCase): index_item = [('_id', 1), ('comments.comment_id', 1)] self.assertTrue(index_item in info) + def test_compound_key_embedded(self): + + class CompoundKey(EmbeddedDocument): + name = StringField(required=True) + term = StringField(required=True) + + class Report(Document): + key = EmbeddedDocumentField(CompoundKey, primary_key=True) + text = StringField() + + Report.drop_collection() + + my_key = CompoundKey(name="n", term="ok") + report = Report(text="OK", key=my_key).save() + + self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, + report.to_mongo()) + self.assertEqual(report, Report.objects.get(pk=my_key)) + + def test_compound_key_dictfield(self): + + class Report(Document): + key = DictField(primary_key=True) + text = StringField() + + Report.drop_collection() + + my_key = {"name": "n", "term": "ok"} + report = Report(text="OK", key=my_key).save() + + self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, + report.to_mongo()) + self.assertEqual(report, Report.objects.get(pk=my_key)) + if __name__ == '__main__': unittest.main() From e7ba5eb160e8f05385d8849eef031984ad916b04 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 10:41:01 +0000 Subject: [PATCH 117/464] Added #121 to changelog --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7486ae9..219e935 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -37,7 +37,7 @@ Changes in 0.8.X - Added no_dereference context manager (#82) - Added switch_collection context manager (#220) - Added switch_collection method to document instances (#220) -- Added support for compound primary keys (#149) +- Added support for compound primary keys (#149) (#121) Changes in 0.7.9 ================ From e38bf63be0c51c6243010f255bd45e9e2a8ddcec Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 11:29:51 +0000 Subject: [PATCH 118/464] Fixed overriding objects with custom manager (#58) --- AUTHORS | 3 ++- docs/changelog.rst | 3 ++- mongoengine/document.py | 17 ++++++++--------- tests/queryset/queryset.py | 26 ++++++++++++++++++++++++++ 4 files changed, 38 insertions(+), 11 deletions(-) diff --git a/AUTHORS b/AUTHORS index 3f80dca..c32ab9f 100644 --- a/AUTHORS +++ b/AUTHORS @@ -136,4 +136,5 @@ that much better: * Aleksey Porfirov * Nicolas Trippar * Manuel Hermann - * Gustavo Gawryszewski \ No newline at end of file + * Gustavo Gawryszewski + * Max Countryman \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 219e935..0d164b5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -34,10 +34,11 @@ Changes in 0.8.X - Fixed validation for GenericReferences which havent been dereferenced - Added switch_db context manager (#106) - Added switch_db method to document instances (#106) -- Added no_dereference context manager (#82) +- Added no_dereference context manager (#82) (#61) - Added switch_collection context manager (#220) - Added switch_collection method to document instances (#220) - Added support for compound primary keys (#149) (#121) +- Fixed overriding objects with custom manager (#58) Changes in 0.7.9 ================ diff --git a/mongoengine/document.py b/mongoengine/document.py index 75873b4..edc819c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -9,7 +9,7 @@ from mongoengine import signals from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, BaseDict, BaseList, ALLOW_INHERITANCE, get_document) -from mongoengine.queryset import OperationError, NotUniqueError +from mongoengine.queryset import OperationError, NotUniqueError, QuerySet from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME from mongoengine.context_managers import switch_db, switch_collection @@ -328,10 +328,9 @@ class Document(BaseDocument): """ Returns the queryset to use for updating / reloading / deletions """ - qs = self.__class__.objects - if hasattr(self, '_objects'): - qs = self._objects - return qs + if not hasattr(self, '__objects'): + self.__objects = QuerySet(self, self._get_collection()) + return self.__objects @property def _object_key(self): @@ -394,8 +393,8 @@ class Document(BaseDocument): self._get_db = lambda: db self._collection = collection self._created = True - self._objects = self.__class__.objects - self._objects._collection_obj = collection + self.__objects = self._qs + self.__objects._collection_obj = collection return self def switch_collection(self, collection_name): @@ -419,8 +418,8 @@ class Document(BaseDocument): self._get_collection = lambda: collection self._collection = collection self._created = True - self._objects = self.__class__.objects - self._objects._collection_obj = collection + self.__objects = self._qs + self.__objects._collection_obj = collection return self def select_related(self, max_depth=1): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 3594044..0ad3092 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2074,6 +2074,32 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + def test_custom_manager_overriding_objects_works(self): + + class Foo(Document): + bar = StringField(default='bar') + active = BooleanField(default=False) + + @queryset_manager + def objects(doc_cls, queryset): + return queryset(active=True) + + @queryset_manager + def with_inactive(doc_cls, queryset): + return queryset(active=False) + + Foo.drop_collection() + + Foo(active=True).save() + Foo(active=False).save() + + self.assertEqual(1, Foo.objects.count()) + self.assertEqual(1, Foo.with_inactive.count()) + + Foo.with_inactive.first().delete() + self.assertEqual(0, Foo.with_inactive.count()) + self.assertEqual(1, Foo.objects.count()) + def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. From eefbd3f5974e6fd8eb3810d01de168a2a280b0da Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 12:52:16 +0000 Subject: [PATCH 119/464] Updated wobbly python 3.3 test --- tests/document/instance.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index 4c67046..247f627 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -2026,8 +2026,6 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() except ValidationError, e: - expected_error_message = """ValidationError(Field is required: ['username', 'name'])""" - self.assertEqual(e.message, expected_error_message) self.assertEqual(e.to_dict(), { 'username': 'Field is required', 'name': 'Field is required'}) From ed2ea24b75ffe70258882d1cf53a86e90b6ec1e4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 13:10:51 +0000 Subject: [PATCH 120/464] More test edge case fixing --- tests/test_context_managers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 10fe7b8..c9efe8b 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -140,6 +140,7 @@ class ContextManagersTest(unittest.TestCase): def test_query_counter(self): connect('mongoenginetest') db = get_db() + db.test.find({}) with query_counter() as q: self.assertEqual(0, q) From ba48dfb4bf283c5b3f20d5a9f47d69ec09e6f2f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 17:33:10 +0000 Subject: [PATCH 121/464] Added no_dereference method for querysets (#82) (#61) --- docs/changelog.rst | 1 + docs/guide/querying.rst | 13 +++++++++---- mongoengine/base/document.py | 13 +++++++++++-- mongoengine/base/fields.py | 1 + mongoengine/context_managers.py | 14 +++----------- mongoengine/fields.py | 5 +++-- mongoengine/queryset/queryset.py | 15 ++++++++++++--- tests/queryset/queryset.py | 21 +++++++++++++++++++++ 8 files changed, 61 insertions(+), 22 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0d164b5..e24eaf4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -39,6 +39,7 @@ Changes in 0.8.X - Added switch_collection method to document instances (#220) - Added support for compound primary keys (#149) (#121) - Fixed overriding objects with custom manager (#58) +- Added no_dereference method for querysets (#82) (#61) Changes in 0.7.9 ================ diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 7ccf143..3279853 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -373,17 +373,22 @@ Turning off dereferencing ------------------------- Sometimes for performance reasons you don't want to automatically dereference -data . To turn off all dereferencing you can use the +data. To turn off dereferencing of the results of a query use +:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: + + post = Post.objects.no_dereference().first() + assert(isinstance(post.author, ObjectId)) + +You can also turn off all dereferencing for a fixed period by using the :class:`~mongoengine.context_managers.no_dereference` context manager:: with no_dereference(Post) as Post: post = Post.objects.first() assert(isinstance(post.author, ObjectId)) -.. note:: + # Outside the context manager dereferencing occurs. + assert(isinstance(post.author, User)) - :class:`~mongoengine.context_managers.no_dereference` only works on the - Default QuerySet manager. Advanced queries ================ diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 9f40061..7c1597e 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,3 +1,4 @@ +import copy import operator from functools import partial @@ -461,9 +462,10 @@ class BaseDocument(object): return cls._meta.get('collection', None) @classmethod - def _from_son(cls, son): + def _from_son(cls, son, _auto_dereference=True): """Create an instance of a Document (subclass) from a PyMongo SON. """ + # get the class name from the document, falling back to the given # class if unavailable class_name = son.get('_cls', cls._class_name) @@ -480,7 +482,12 @@ class BaseDocument(object): changed_fields = [] errors_dict = {} - for field_name, field in cls._fields.iteritems(): + fields = cls._fields + if not _auto_dereference: + fields = copy.copy(fields) + + for field_name, field in fields.iteritems(): + field._auto_dereference = _auto_dereference if field.db_field in data: value = data[field.db_field] try: @@ -507,6 +514,8 @@ class BaseDocument(object): obj = cls(__auto_convert=False, **data) obj._changed_fields = changed_fields obj._created = False + if not _auto_dereference: + obj._fields = fields return obj @classmethod diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 82981e2..25f86af 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -168,6 +168,7 @@ class ComplexBaseField(BaseField): (self.field is None or isinstance(self.field, (GenericReferenceField, ReferenceField)))) + self._auto_dereference = instance._fields[self.name]._auto_dereference if not self.__dereference and instance._initialised and dereference: instance._data[self.name] = self._dereference( instance._data.get(self.name), max_depth=1, instance=instance, diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index e73d4a2..76d5fbf 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -93,7 +93,8 @@ class switch_collection(object): class no_dereference(object): """ no_dereference context manager. - Turns off all dereferencing in Documents:: + Turns off all dereferencing in Documents for the duration of the context + manager:: with no_dereference(Group) as Group: Group.objects.find() @@ -118,21 +119,12 @@ class no_dereference(object): def __enter__(self): """ change the objects default and _auto_dereference values""" - if 'queryset_class' in self.cls._meta: - raise OperationError("no_dereference context manager only works on" - " default queryset classes") - objects = self.cls.__dict__['objects'] - objects.default = QuerySetNoDeRef - self.cls.objects = objects for field in self.deref_fields: self.cls._fields[field]._auto_dereference = False return self.cls def __exit__(self, t, value, traceback): """ Reset the default and _auto_dereference values""" - objects = self.cls.__dict__['objects'] - objects.default = QuerySet - self.cls.objects = objects for field in self.deref_fields: self.cls._fields[field]._auto_dereference = True return self.cls @@ -145,7 +137,7 @@ class QuerySetNoDeRef(QuerySet): class query_counter(object): - """ Query_counter contextmanager to get the number of queries. """ + """ Query_counter context manager to get the number of queries. """ def __init__(self): """ Construct the query_counter. """ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 1ccdb65..11e9d3f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -777,7 +777,7 @@ class ReferenceField(BaseField): # Get value from document instance if available value = instance._data.get(self.name) - + self._auto_dereference = instance._fields[self.name]._auto_dereference # Dereference DBRefs if self._auto_dereference and isinstance(value, DBRef): value = self.document_type._get_db().dereference(value) @@ -859,7 +859,8 @@ class GenericReferenceField(BaseField): return self value = instance._data.get(self.name) - if isinstance(value, (dict, SON)): + self._auto_dereference = instance._fields[self.name]._auto_dereference + if self._auto_dereference and isinstance(value, (dict, SON)): instance._data[self.name] = self.dereference(value) return super(GenericReferenceField, self).__get__(instance, owner) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index a9ff6e7..f73b0e7 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -42,6 +42,7 @@ class QuerySet(object): providing :class:`~mongoengine.Document` objects as the results. """ __dereference = False + _auto_dereference = True def __init__(self, document, collection): self._document = document @@ -145,10 +146,12 @@ class QuerySet(object): elif isinstance(key, int): if queryset._scalar: return queryset._get_scalar( - queryset._document._from_son(queryset._cursor[key])) + queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference)) if queryset._as_pymongo: return queryset._get_as_pymongo(queryset._cursor.next()) - return queryset._document._from_son(queryset._cursor[key]) + return queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference) raise AttributeError def __repr__(self): @@ -515,7 +518,7 @@ class QuerySet(object): '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_class_check', '_slave_okay', '_read_preference', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', - '_limit', '_skip', '_hint') + '_limit', '_skip', '_hint', '_auto_dereference') for prop in copy_props: val = getattr(self, prop) @@ -1135,6 +1138,12 @@ class QuerySet(object): self.__dereference = _import_class('DeReference')() return self.__dereference + def no_dereference(self): + """Turn off any dereferencing.""" + queryset = self.clone() + queryset._auto_dereference = False + return queryset + # Helper Functions def _item_frequencies_map_reduce(self, field, normalize=False): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 0ad3092..c6b7c0e 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3103,5 +3103,26 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(results[1]['name'], 'Barack Obama') self.assertEqual(results[1]['price'], Decimal('2.22')) + def test_no_dereference(self): + + class Organization(Document): + name = StringField() + + class User(Document): + name = StringField() + organization = ReferenceField(Organization) + + User.drop_collection() + Organization.drop_collection() + + whitehouse = Organization(name="White House").save() + User(name="Bob Dole", organization=whitehouse).save() + + qs = User.objects() + self.assertTrue(isinstance(qs.first().organization, Organization)) + self.assertFalse(isinstance(qs.no_dereference().first().organization, + Organization)) + self.assertTrue(isinstance(qs.first().organization, Organization)) + if __name__ == '__main__': unittest.main() From 9f551121fbbe852bd639ce4999ad0835db3fd8a6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 17:41:21 +0000 Subject: [PATCH 122/464] Added docs for no_dereference and scalar (#68) --- mongoengine/queryset/queryset.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index f73b0e7..b5e3351 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -748,8 +748,12 @@ class QuerySet(object): """Instead of returning Document instances, return either a specific value or a tuple of values in order. - This effects all results and can be unset by calling ``scalar`` - without arguments. Calls ``only`` automatically. + Can be used along with + :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off + dereferencing. + + .. note:: This effects all results and can be unset by calling + ``scalar`` without arguments. Calls ``only`` automatically. :param fields: One or more fields to return instead of a Document. """ @@ -1139,7 +1143,8 @@ class QuerySet(object): return self.__dereference def no_dereference(self): - """Turn off any dereferencing.""" + """Turn off any dereferencing for the results of this queryset. + """ queryset = self.clone() queryset._auto_dereference = False return queryset From 83da08ef7dda6b40f0e288e98b6560107279e3c7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 24 Jan 2013 17:43:57 +0000 Subject: [PATCH 123/464] Documentation fixes --- docs/guide/defining-documents.rst | 2 +- docs/tutorial.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index c698285..3fdb9a6 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -47,7 +47,7 @@ be saved :: >>> Page.objects(tags='mongoengine').count() >>> 1 -..note:: +.. note:: There is one caveat on Dynamic Documents: fields cannot start with `_` diff --git a/docs/tutorial.rst b/docs/tutorial.rst index c2fb5b9..c4b69c4 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -170,7 +170,7 @@ To delete all the posts if a user is deleted set the rule:: See :class:`~mongoengine.ReferenceField` for more information. -..note:: +.. note:: MapFields and DictFields currently don't support automatic handling of deleted references From 621b2b3f72e142356e06e06845faea8caa8e9279 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 25 Jan 2013 11:28:20 +0000 Subject: [PATCH 124/464] Undefined data should not override instance methods (#49) --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 10 +- mongoengine/base/document.py | 16 ++- tests/document/__init__.py | 1 + tests/document/instance.py | 193 +++-------------------------- tests/document/validation.py | 195 ++++++++++++++++++++++++++++++ 6 files changed, 230 insertions(+), 186 deletions(-) create mode 100644 tests/document/validation.py diff --git a/docs/changelog.rst b/docs/changelog.rst index e24eaf4..5bdd0f8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -40,6 +40,7 @@ Changes in 0.8.X - Added support for compound primary keys (#149) (#121) - Fixed overriding objects with custom manager (#58) - Added no_dereference method for querysets (#82) (#61) +- Undefined data should not override instance methods (#49) Changes in 0.7.9 ================ diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 3fdb9a6..350ba67 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -600,8 +600,7 @@ Working with existing data -------------------------- As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and easily get working with existing data. Just define the document to match -the expected schema in your database. If you have wildly varying schemas then -a :class:`~mongoengine.DynamicDocument` might be more appropriate. :: +the expected schema in your database :: # Will work with data in an existing collection named 'cmsPage' class Page(Document): @@ -609,3 +608,10 @@ a :class:`~mongoengine.DynamicDocument` might be more appropriate. :: meta = { 'collection': 'cmsPage' } + +If you have wildly varying schemas then using a +:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of +defining all possible field types. + +If you use :class:`~mongoengine.Document` and the database contains data that +isn't defined then that data will be stored in the `document._data` dictionary. diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 7c1597e..a88a38b 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -60,13 +60,17 @@ class BaseDocument(object): else: FileField = _import_class('FileField') for key, value in values.iteritems(): + if key == '__auto_convert': + continue key = self._reverse_db_field_map.get(key, key) - if (value is not None and __auto_convert and - key in self._fields): - field = self._fields.get(key) - if not isinstance(field, FileField): - value = field.to_python(value) - setattr(self, key, value) + if key in self._fields or key in ('id', 'pk', '_cls'): + if __auto_convert and value is not None: + field = self._fields.get(key) + if field and not isinstance(field, FileField): + value = field.to_python(value) + setattr(self, key, value) + else: + self._data[key] = value # Set any get_fieldname_display methods self.__set_field_display() diff --git a/tests/document/__init__.py b/tests/document/__init__.py index 7774ee1..1acc9f4 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -9,6 +9,7 @@ from indexes import * from inheritance import * from instance import * from json_serialisation import * +from validation import * if __name__ == '__main__': unittest.main() diff --git a/tests/document/instance.py b/tests/document/instance.py index 247f627..99e4edb 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -23,7 +23,7 @@ from mongoengine.context_managers import switch_db TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), '../fields/mongoengine.png') -__all__ = ("InstanceTest", "ValidatorErrorTest") +__all__ = ("InstanceTest",) class InstanceTest(unittest.TestCase): @@ -1977,192 +1977,29 @@ class InstanceTest(unittest.TestCase): group = Group.objects.first() self.assertEqual("hello - default", group.name) - -class ValidatorErrorTest(unittest.TestCase): - - def test_to_dict(self): - """Ensure a ValidationError handles error to_dict correctly. - """ - error = ValidationError('root') - self.assertEqual(error.to_dict(), {}) - - # 1st level error schema - error.errors = {'1st': ValidationError('bad 1st'), } - self.assertTrue('1st' in error.to_dict()) - self.assertEqual(error.to_dict()['1st'], 'bad 1st') - - # 2nd level error schema - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd'), - })} - self.assertTrue('1st' in error.to_dict()) - self.assertTrue(isinstance(error.to_dict()['1st'], dict)) - self.assertTrue('2nd' in error.to_dict()['1st']) - self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') - - # moar levels - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd', errors={ - '3rd': ValidationError('bad 3rd', errors={ - '4th': ValidationError('Inception'), - }), - }), - })} - self.assertTrue('1st' in error.to_dict()) - self.assertTrue('2nd' in error.to_dict()['1st']) - self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) - self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) - self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], - 'Inception') - - self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") - - def test_model_validation(self): + def test_no_overwritting_no_data_loss(self): class User(Document): username = StringField(primary_key=True) - name = StringField(required=True) - - try: - User().validate() - except ValidationError, e: - self.assertEqual(e.to_dict(), { - 'username': 'Field is required', - 'name': 'Field is required'}) - - def test_spaces_in_keys(self): - - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - setattr(doc, 'hello world', 1) - doc.save() - - one = Doc.objects.filter(**{'hello world': 1}).count() - self.assertEqual(1, one) - - def test_fields_rewrite(self): - class BasePerson(Document): - name = StringField() - age = IntField() - meta = {'abstract': True} - - class Person(BasePerson): - name = StringField(required=True) - - p = Person(age=15) - self.assertRaises(ValidationError, p.validate) - - def test_cascaded_save_wrong_reference(self): - - class ADocument(Document): - val = IntField() - - class BDocument(Document): - a = ReferenceField(ADocument) - - ADocument.drop_collection() - BDocument.drop_collection() - - a = ADocument() - a.val = 15 - a.save() - - b = BDocument() - b.a = a - b.save() - - a.delete() - - b = BDocument.objects.first() - b.save(cascade=True) - - def test_shard_key(self): - class LogEntry(Document): - machine = StringField() - log = StringField() - - meta = { - 'shard_key': ('machine',) - } - - LogEntry.drop_collection() - - log = LogEntry() - log.machine = "Localhost" - log.save() - - log.log = "Saving" - log.save() - - def change_shard_key(): - log.machine = "127.0.0.1" - - self.assertRaises(OperationError, change_shard_key) - - def test_shard_key_primary(self): - class LogEntry(Document): - machine = StringField(primary_key=True) - log = StringField() - - meta = { - 'shard_key': ('machine',) - } - - LogEntry.drop_collection() - - log = LogEntry() - log.machine = "Localhost" - log.save() - - log.log = "Saving" - log.save() - - def change_shard_key(): - log.machine = "127.0.0.1" - - self.assertRaises(OperationError, change_shard_key) - - def test_kwargs_simple(self): - - class Embedded(EmbeddedDocument): name = StringField() - class Doc(Document): - doc_name = StringField() - doc = EmbeddedDocumentField(Embedded) + @property + def foo(self): + return True - classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) - dict_doc = Doc(**{"doc_name": "my doc", - "doc": {"name": "embedded doc"}}) + User.drop_collection() - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + user = User(username="Ross", foo="bar") + self.assertTrue(user.foo) - def test_kwargs_complex(self): - - class Embedded(EmbeddedDocument): - name = StringField() - - class Doc(Document): - doc_name = StringField() - docs = ListField(EmbeddedDocumentField(Embedded)) - - classic_doc = Doc(doc_name="my doc", docs=[ - Embedded(name="embedded doc1"), - Embedded(name="embedded doc2")]) - dict_doc = Doc(**{"doc_name": "my doc", - "docs": [{"name": "embedded doc1"}, - {"name": "embedded doc2"}]}) - - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + User._get_collection().save({"_id": "Ross", "foo": "Bar", + "data": [1, 2, 3]}) + user = User.objects.first() + self.assertEqual("Ross", user.username) + self.assertEqual(True, user.foo) + self.assertEqual("Bar", user._data["foo"]) + self.assertEqual([1, 2, 3], user._data["data"]) if __name__ == '__main__': unittest.main() diff --git a/tests/document/validation.py b/tests/document/validation.py new file mode 100644 index 0000000..dafb3a3 --- /dev/null +++ b/tests/document/validation.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import * + +__all__ = ("ValidatorErrorTest",) + + +class ValidatorErrorTest(unittest.TestCase): + + def test_to_dict(self): + """Ensure a ValidationError handles error to_dict correctly. + """ + error = ValidationError('root') + self.assertEqual(error.to_dict(), {}) + + # 1st level error schema + error.errors = {'1st': ValidationError('bad 1st'), } + self.assertTrue('1st' in error.to_dict()) + self.assertEqual(error.to_dict()['1st'], 'bad 1st') + + # 2nd level error schema + error.errors = {'1st': ValidationError('bad 1st', errors={ + '2nd': ValidationError('bad 2nd'), + })} + self.assertTrue('1st' in error.to_dict()) + self.assertTrue(isinstance(error.to_dict()['1st'], dict)) + self.assertTrue('2nd' in error.to_dict()['1st']) + self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') + + # moar levels + error.errors = {'1st': ValidationError('bad 1st', errors={ + '2nd': ValidationError('bad 2nd', errors={ + '3rd': ValidationError('bad 3rd', errors={ + '4th': ValidationError('Inception'), + }), + }), + })} + self.assertTrue('1st' in error.to_dict()) + self.assertTrue('2nd' in error.to_dict()['1st']) + self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) + self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) + self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], + 'Inception') + + self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") + + def test_model_validation(self): + + class User(Document): + username = StringField(primary_key=True) + name = StringField(required=True) + + try: + User().validate() + except ValidationError, e: + self.assertEqual(e.to_dict(), { + 'username': 'Field is required', + 'name': 'Field is required'}) + + def test_spaces_in_keys(self): + + class Embedded(DynamicEmbeddedDocument): + pass + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + setattr(doc, 'hello world', 1) + doc.save() + + one = Doc.objects.filter(**{'hello world': 1}).count() + self.assertEqual(1, one) + + def test_fields_rewrite(self): + class BasePerson(Document): + name = StringField() + age = IntField() + meta = {'abstract': True} + + class Person(BasePerson): + name = StringField(required=True) + + p = Person(age=15) + self.assertRaises(ValidationError, p.validate) + + def test_cascaded_save_wrong_reference(self): + + class ADocument(Document): + val = IntField() + + class BDocument(Document): + a = ReferenceField(ADocument) + + ADocument.drop_collection() + BDocument.drop_collection() + + a = ADocument() + a.val = 15 + a.save() + + b = BDocument() + b.a = a + b.save() + + a.delete() + + b = BDocument.objects.first() + b.save(cascade=True) + + def test_shard_key(self): + class LogEntry(Document): + machine = StringField() + log = StringField() + + meta = { + 'shard_key': ('machine',) + } + + LogEntry.drop_collection() + + log = LogEntry() + log.machine = "Localhost" + log.save() + + log.log = "Saving" + log.save() + + def change_shard_key(): + log.machine = "127.0.0.1" + + self.assertRaises(OperationError, change_shard_key) + + def test_shard_key_primary(self): + class LogEntry(Document): + machine = StringField(primary_key=True) + log = StringField() + + meta = { + 'shard_key': ('machine',) + } + + LogEntry.drop_collection() + + log = LogEntry() + log.machine = "Localhost" + log.save() + + log.log = "Saving" + log.save() + + def change_shard_key(): + log.machine = "127.0.0.1" + + self.assertRaises(OperationError, change_shard_key) + + def test_kwargs_simple(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + doc = EmbeddedDocumentField(Embedded) + + classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) + dict_doc = Doc(**{"doc_name": "my doc", + "doc": {"name": "embedded doc"}}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) + + def test_kwargs_complex(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + docs = ListField(EmbeddedDocumentField(Embedded)) + + classic_doc = Doc(doc_name="my doc", docs=[ + Embedded(name="embedded doc1"), + Embedded(name="embedded doc2")]) + dict_doc = Doc(**{"doc_name": "my doc", + "docs": [{"name": "embedded doc1"}, + {"name": "embedded doc2"}]}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) From eb1b6e34c71abe45d96cbf356f1253a3ca5c51a6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 25 Jan 2013 11:51:58 +0000 Subject: [PATCH 125/464] Updated upgrade docs (#49) --- docs/upgrade.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 9c6c9a9..d328248 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -56,6 +56,26 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: :: meta = {'allow_inheritance': True} +Previously, if you had data the database that wasn't defined in the Document +definition, it would set it as an attribute on the document. This is no longer +the case and the data is set only in the ``document._data`` dictionary: :: + + >>> from mongoengine import * + >>> class Animal(Document): + ... name = StringField() + ... + >>> cat = Animal(name="kit", size="small") + + # 0.7 + >>> cat.size + u'small' + + # 0.8 + >>> cat.size + Traceback (most recent call last): + File "", line 1, in + AttributeError: 'Animal' object has no attribute 'size' + Querysets ~~~~~~~~~ From 0ea363c7fc760e210bee5b83b3ab83f657a6e0ae Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 25 Jan 2013 12:13:46 +0000 Subject: [PATCH 126/464] Updated authors and changelof (#142) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index c32ab9f..ac811dd 100644 --- a/AUTHORS +++ b/AUTHORS @@ -137,4 +137,5 @@ that much better: * Nicolas Trippar * Manuel Hermann * Gustavo Gawryszewski - * Max Countryman \ No newline at end of file + * Max Countryman + * caitifbrito \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 5bdd0f8..19bc446 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -41,6 +41,7 @@ Changes in 0.8.X - Fixed overriding objects with custom manager (#58) - Added no_dereference method for querysets (#82) (#61) - Undefined data should not override instance methods (#49) +- Added Django Group and Permission (#142) Changes in 0.7.9 ================ From 9d9a4afee9aef2e11beb37207c914a8a76329370 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 12:05:09 +0000 Subject: [PATCH 127/464] Added Doc class and pk to Validation messages (#69) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 8 +- tests/document/instance.py | 137 +++++++++++++++++++++--------- tests/document/validation.py | 159 ++++++++++++----------------------- 4 files changed, 161 insertions(+), 144 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 19bc446..601c2db 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -42,6 +42,7 @@ Changes in 0.8.X - Added no_dereference method for querysets (#82) (#61) - Undefined data should not override instance methods (#49) - Added Django Group and Permission (#142) +- Added Doc class and pk to Validation messages (#69) Changes in 0.7.9 ================ diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index a88a38b..4f5a87e 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -274,7 +274,13 @@ class BaseDocument(object): field_name=field.name) if errors: - raise ValidationError('ValidationError', errors=errors) + pk = "None" + if hasattr(self, 'pk'): + pk = self.pk + elif self._instance: + pk = self._instance.pk + message = "ValidationError (%s:%s) " % (self._class_name, pk) + raise ValidationError(message, errors=errors) def to_json(self): """Converts a document to JSON""" diff --git a/tests/document/instance.py b/tests/document/instance.py index 99e4edb..3d4e8a9 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -466,45 +466,6 @@ class InstanceTest(unittest.TestCase): doc = Doc.objects.get() self.assertEqual(doc, doc.embedded_field[0]._instance) - def test_embedded_document_validation(self): - """Ensure that embedded documents may be validated. - """ - class Comment(EmbeddedDocument): - date = DateTimeField() - content = StringField(required=True) - - comment = Comment() - self.assertRaises(ValidationError, comment.validate) - - comment.content = 'test' - comment.validate() - - comment.date = 4 - self.assertRaises(ValidationError, comment.validate) - - comment.date = datetime.now() - comment.validate() - self.assertEqual(comment._instance, None) - - def test_embedded_db_field_validate(self): - - class SubDoc(EmbeddedDocument): - val = IntField() - - class Doc(Document): - e = EmbeddedDocumentField(SubDoc, db_field='eb') - - Doc.drop_collection() - - Doc(e=SubDoc(val=15)).save() - - doc = Doc.objects.first() - doc.validate() - keys = doc._data.keys() - self.assertEqual(2, len(keys)) - self.assertTrue('id' in keys) - self.assertTrue('e' in keys) - def test_document_clean(self): class TestDocument(Document): status = StringField() @@ -2001,5 +1962,103 @@ class InstanceTest(unittest.TestCase): self.assertEqual("Bar", user._data["foo"]) self.assertEqual([1, 2, 3], user._data["data"]) + + def test_spaces_in_keys(self): + + class Embedded(DynamicEmbeddedDocument): + pass + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + setattr(doc, 'hello world', 1) + doc.save() + + one = Doc.objects.filter(**{'hello world': 1}).count() + self.assertEqual(1, one) + + def test_shard_key(self): + class LogEntry(Document): + machine = StringField() + log = StringField() + + meta = { + 'shard_key': ('machine',) + } + + LogEntry.drop_collection() + + log = LogEntry() + log.machine = "Localhost" + log.save() + + log.log = "Saving" + log.save() + + def change_shard_key(): + log.machine = "127.0.0.1" + + self.assertRaises(OperationError, change_shard_key) + + def test_shard_key_primary(self): + class LogEntry(Document): + machine = StringField(primary_key=True) + log = StringField() + + meta = { + 'shard_key': ('machine',) + } + + LogEntry.drop_collection() + + log = LogEntry() + log.machine = "Localhost" + log.save() + + log.log = "Saving" + log.save() + + def change_shard_key(): + log.machine = "127.0.0.1" + + self.assertRaises(OperationError, change_shard_key) + + def test_kwargs_simple(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + doc = EmbeddedDocumentField(Embedded) + + classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) + dict_doc = Doc(**{"doc_name": "my doc", + "doc": {"name": "embedded doc"}}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) + + def test_kwargs_complex(self): + + class Embedded(EmbeddedDocument): + name = StringField() + + class Doc(Document): + doc_name = StringField() + docs = ListField(EmbeddedDocumentField(Embedded)) + + classic_doc = Doc(doc_name="my doc", docs=[ + Embedded(name="embedded doc1"), + Embedded(name="embedded doc2")]) + dict_doc = Doc(**{"doc_name": "my doc", + "docs": [{"name": "embedded doc1"}, + {"name": "embedded doc2"}]}) + + self.assertEqual(classic_doc, dict_doc) + self.assertEqual(classic_doc._data, dict_doc._data) + if __name__ == '__main__': unittest.main() diff --git a/tests/document/validation.py b/tests/document/validation.py index dafb3a3..aaf6b0c 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -3,6 +3,7 @@ import sys sys.path[0:0] = [""] import unittest +from datetime import datetime from mongoengine import * @@ -11,6 +12,9 @@ __all__ = ("ValidatorErrorTest",) class ValidatorErrorTest(unittest.TestCase): + def setUp(self): + connect(db='mongoenginetest') + def test_to_dict(self): """Ensure a ValidationError handles error to_dict correctly. """ @@ -57,25 +61,19 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() except ValidationError, e: + self.assertTrue("User:None" in e.message) self.assertEqual(e.to_dict(), { 'username': 'Field is required', 'name': 'Field is required'}) - def test_spaces_in_keys(self): - - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - setattr(doc, 'hello world', 1) - doc.save() - - one = Doc.objects.filter(**{'hello world': 1}).count() - self.assertEqual(1, one) + user = User(username="RossC0", name="Ross").save() + user.name = None + try: + user.save() + except ValidationError, e: + self.assertTrue("User:RossC0" in e.message) + self.assertEqual(e.to_dict(), { + 'name': 'Field is required'}) def test_fields_rewrite(self): class BasePerson(Document): @@ -89,107 +87,60 @@ class ValidatorErrorTest(unittest.TestCase): p = Person(age=15) self.assertRaises(ValidationError, p.validate) - def test_cascaded_save_wrong_reference(self): + def test_embedded_document_validation(self): + """Ensure that embedded documents may be validated. + """ + class Comment(EmbeddedDocument): + date = DateTimeField() + content = StringField(required=True) - class ADocument(Document): - val = IntField() + comment = Comment() + self.assertRaises(ValidationError, comment.validate) - class BDocument(Document): - a = ReferenceField(ADocument) + comment.content = 'test' + comment.validate() - ADocument.drop_collection() - BDocument.drop_collection() + comment.date = 4 + self.assertRaises(ValidationError, comment.validate) - a = ADocument() - a.val = 15 - a.save() + comment.date = datetime.now() + comment.validate() + self.assertEqual(comment._instance, None) - b = BDocument() - b.a = a - b.save() + def test_embedded_db_field_validate(self): - a.delete() - - b = BDocument.objects.first() - b.save(cascade=True) - - def test_shard_key(self): - class LogEntry(Document): - machine = StringField() - log = StringField() - - meta = { - 'shard_key': ('machine',) - } - - LogEntry.drop_collection() - - log = LogEntry() - log.machine = "Localhost" - log.save() - - log.log = "Saving" - log.save() - - def change_shard_key(): - log.machine = "127.0.0.1" - - self.assertRaises(OperationError, change_shard_key) - - def test_shard_key_primary(self): - class LogEntry(Document): - machine = StringField(primary_key=True) - log = StringField() - - meta = { - 'shard_key': ('machine',) - } - - LogEntry.drop_collection() - - log = LogEntry() - log.machine = "Localhost" - log.save() - - log.log = "Saving" - log.save() - - def change_shard_key(): - log.machine = "127.0.0.1" - - self.assertRaises(OperationError, change_shard_key) - - def test_kwargs_simple(self): - - class Embedded(EmbeddedDocument): - name = StringField() + class SubDoc(EmbeddedDocument): + val = IntField(required=True) class Doc(Document): - doc_name = StringField() - doc = EmbeddedDocumentField(Embedded) + id = StringField(primary_key=True) + e = EmbeddedDocumentField(SubDoc, db_field='eb') - classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) - dict_doc = Doc(**{"doc_name": "my doc", - "doc": {"name": "embedded doc"}}) + try: + Doc(id="bad").validate() + except ValidationError, e: + self.assertTrue("SubDoc:None" in e.message) + self.assertEqual(e.to_dict(), { + 'e.val': 'Field is required'}) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + Doc.drop_collection() - def test_kwargs_complex(self): + Doc(id="test", e=SubDoc(val=15)).save() - class Embedded(EmbeddedDocument): - name = StringField() + doc = Doc.objects.first() + keys = doc._data.keys() + self.assertEqual(2, len(keys)) + self.assertTrue('id' in keys) + self.assertTrue('e' in keys) - class Doc(Document): - doc_name = StringField() - docs = ListField(EmbeddedDocumentField(Embedded)) + doc.e.val = "OK" + try: + doc.save() + except ValidationError, e: + self.assertTrue("SubDoc:test" in e.message) + self.assertEqual(e.to_dict(), { + 'e.val': 'Field is required'}) - classic_doc = Doc(doc_name="my doc", docs=[ - Embedded(name="embedded doc1"), - Embedded(name="embedded doc2")]) - dict_doc = Doc(**{"doc_name": "my doc", - "docs": [{"name": "embedded doc1"}, - {"name": "embedded doc2"}]}) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) +if __name__ == '__main__': + unittest.main() From de2f774e8533ac18c161182cd7c37754e8d68844 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 13:29:44 +0000 Subject: [PATCH 128/464] Fix validation test --- tests/document/validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/document/validation.py b/tests/document/validation.py index aaf6b0c..0f67f50 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -137,7 +137,7 @@ class ValidatorErrorTest(unittest.TestCase): try: doc.save() except ValidationError, e: - self.assertTrue("SubDoc:test" in e.message) + self.assertTrue("Doc:test" in e.message) self.assertEqual(e.to_dict(), { 'e.val': 'Field is required'}) From f182daa85eae94ccfffaa40e04f6370c007d10c4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 13:32:21 +0000 Subject: [PATCH 129/464] Fixed Documents deleted via a queryset don't call any signals (#105) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 11 ++++++--- tests/document/instance.py | 40 ++++++++++++++++++++++++++++++-- 3 files changed, 47 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 601c2db..8b41d6e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -43,6 +43,7 @@ Changes in 0.8.X - Undefined data should not override instance methods (#49) - Added Django Group and Permission (#142) - Added Doc class and pk to Validation messages (#69) +- Fixed Documents deleted via a queryset don't call any signals (#105) Changes in 0.7.9 ================ diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index b5e3351..65703c3 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -364,10 +364,15 @@ class QuerySet(object): queryset = self.clone() doc = queryset._document - # Handle deletes where skips or limits have been applied - if queryset._skip or queryset._limit: + has_delete_signal = ( + signals.pre_delete.has_receivers_for(self._document) or + signals.post_delete.has_receivers_for(self._document)) + + # Handle deletes where skips or limits have been applied or has a + # delete signal + if queryset._skip or queryset._limit or has_delete_signal: for doc in queryset: - doc.delete() + doc.delete(safe=safe) return delete_rules = doc._meta.get('delete_rules') or {} diff --git a/tests/document/instance.py b/tests/document/instance.py index 3d4e8a9..172f0cc 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -19,6 +19,7 @@ from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db from mongoengine.base import get_document from mongoengine.context_managers import switch_db +from mongoengine import signals TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), '../fields/mongoengine.png') @@ -1375,7 +1376,6 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(len(BlogPost.objects), 0) - def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon deletion for complex fields. @@ -1410,6 +1410,43 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(len(BlogPost.objects), 0) + + def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): + ''' ensure the pre_delete signal is triggered upon a cascading deletion + setup a blog post with content, an author and editor + delete the author which triggers deletion of blogpost via cascade + blog post's pre_delete signal alters an editor attribute + ''' + class Editor(self.Person): + review_queue = IntField(default=0) + + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + editor = ReferenceField(Editor) + + @classmethod + def pre_delete(cls, sender, document, **kwargs): + # decrement the docs-to-review count + document.editor.update(dec__review_queue=1) + + signals.pre_delete.connect(BlogPost.pre_delete, sender=BlogPost) + + self.Person.drop_collection() + BlogPost.drop_collection() + Editor.drop_collection() + + author = self.Person(name='Will S.').save() + editor = Editor(name='Max P.', review_queue=1).save() + BlogPost(content='wrote some books', author=author, + editor=editor).save() + + # delete the author, the post is also deleted due to the CASCADE rule + author.delete() + # the pre-delete signal should have decremented the editor's queue + editor = Editor.objects(name='Max P.').get() + self.assertEqual(editor.review_queue, 0) + def test_two_way_reverse_delete_rule(self): """Ensure that Bi-Directional relationships work with reverse_delete_rule @@ -1426,7 +1463,6 @@ class InstanceTest(unittest.TestCase): Bar.register_delete_rule(Foo, 'bar', NULLIFY) Foo.register_delete_rule(Bar, 'foo', NULLIFY) - Bar.drop_collection() Foo.drop_collection() From 0cbd3663e47d677aa4921070888d32ae4823a9f1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 13:40:28 +0000 Subject: [PATCH 130/464] Updated tests --- tests/all_warnings/__init__.py | 6 +++++- tests/document/class_methods.py | 2 ++ tests/document/dynamic.py | 1 - tests/document/indexes.py | 1 - tests/document/inheritance.py | 2 ++ tests/document/validation.py | 4 ++-- tests/test_connection.py | 9 +++++---- tests/test_context_managers.py | 2 ++ tests/test_dereference.py | 1 - tests/test_django.py | 5 +++++ tests/test_replicaset_connection.py | 2 ++ tests/test_signals.py | 15 ++++++++++----- 12 files changed, 35 insertions(+), 15 deletions(-) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 220b0bb..8cbe22d 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -3,7 +3,8 @@ This test has been put into a module. This is because it tests warnings that only get triggered on first hit. This way we can ensure its imported into the top level and called first by the test suite. """ - +import sys +sys.path[0:0] = [""] import unittest import warnings @@ -88,3 +89,6 @@ class AllWarnings(unittest.TestCase): self.assertEqual(SyntaxWarning, warning["category"]) self.assertEqual('non_abstract_base', InheritedDocumentFailTest._get_collection_name()) + +import sys +sys.path[0:0] = [""] \ No newline at end of file diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 8e9a877..83e68ff 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import with_statement +import sys +sys.path[0:0] = [""] import unittest from mongoengine import * diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index 4848b8f..5881cd0 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -1,6 +1,5 @@ import unittest import sys - sys.path[0:0] = [""] from mongoengine import * diff --git a/tests/document/indexes.py b/tests/document/indexes.py index c059590..ff08ef1 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -2,7 +2,6 @@ from __future__ import with_statement import unittest import sys - sys.path[0:0] = [""] import os diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index c5e1860..3b550f1 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import sys +sys.path[0:0] = [""] import unittest import warnings diff --git a/tests/document/validation.py b/tests/document/validation.py index 0f67f50..24ffed6 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -121,7 +121,7 @@ class ValidatorErrorTest(unittest.TestCase): except ValidationError, e: self.assertTrue("SubDoc:None" in e.message) self.assertEqual(e.to_dict(), { - 'e.val': 'Field is required'}) + "e": {'val': 'OK could not be converted to int'}}) Doc.drop_collection() @@ -139,7 +139,7 @@ class ValidatorErrorTest(unittest.TestCase): except ValidationError, e: self.assertTrue("Doc:test" in e.message) self.assertEqual(e.to_dict(), { - 'e.val': 'Field is required'}) + "e": {'val': 'OK could not be converted to int'}}) if __name__ == '__main__': diff --git a/tests/test_connection.py b/tests/test_connection.py index c32d231..5b9743d 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,13 +1,14 @@ from __future__ import with_statement -import datetime -import pymongo +import sys +sys.path[0:0] = [""] import unittest +import datetime -import mongoengine.connection - +import pymongo from bson.tz_util import utc from mongoengine import * +import mongoengine.connection from mongoengine.connection import get_db, get_connection, ConnectionError from mongoengine.context_managers import switch_db diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index c9efe8b..eef63be 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -1,4 +1,6 @@ from __future__ import with_statement +import sys +sys.path[0:0] = [""] import unittest from mongoengine import * diff --git a/tests/test_dereference.py b/tests/test_dereference.py index adbc519..4198f3c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1,7 +1,6 @@ from __future__ import with_statement import sys sys.path[0:0] = [""] - import unittest from bson import DBRef, ObjectId diff --git a/tests/test_django.py b/tests/test_django.py index 3b0b04f..563f407 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -1,4 +1,6 @@ from __future__ import with_statement +import sys +sys.path[0:0] = [""] import unittest from nose.plugins.skip import SkipTest from mongoengine.python_support import PY3 @@ -163,3 +165,6 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): key = session.session_key session = SessionStore(key) self.assertTrue('test_expire' in session, 'Session has expired before it is expected') + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index 3118c5a..d27960f 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -1,3 +1,5 @@ +import sys +sys.path[0:0] = [""] import unittest import pymongo diff --git a/tests/test_signals.py b/tests/test_signals.py index 2ca820d..fc638cf 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import sys +sys.path[0:0] = [""] import unittest from mongoengine import * @@ -21,6 +23,7 @@ class SignalTests(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') + class Author(Document): name = StringField() @@ -70,7 +73,6 @@ class SignalTests(unittest.TestCase): signal_output.append('Not loaded') self.Author = Author - class Another(Document): name = StringField() @@ -122,8 +124,8 @@ class SignalTests(unittest.TestCase): self.ExplicitId = ExplicitId self.ExplicitId.objects.delete() - # Save up the number of connected signals so that we can check at the end - # that all the signals we register get properly unregistered + # Save up the number of connected signals so that we can check at the + # end that all the signals we register get properly unregistered self.pre_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), @@ -192,7 +194,7 @@ class SignalTests(unittest.TestCase): """ Model saves should throw some signals. """ def create_author(): - a1 = self.Author(name='Bill Shakespeare') + self.Author(name='Bill Shakespeare') def bulk_create_author_with_load(): a1 = self.Author(name='Bill Shakespeare') @@ -216,7 +218,7 @@ class SignalTests(unittest.TestCase): ]) a1.reload() - a1.name='William Shakespeare' + a1.name = 'William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, William Shakespeare", "post_save signal, William Shakespeare", @@ -257,3 +259,6 @@ class SignalTests(unittest.TestCase): self.assertEqual(self.get_signal_output(ei.save), ['Is created']) # second time, it must be an update self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + +if __name__ == '__main__': + unittest.main() From 8c1f8e54cdb97a6bb4bf8523c05ec1b783e7a283 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 14:12:47 +0000 Subject: [PATCH 131/464] Added the "get_decoded" method to the MongoSession class (#216) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + docs/upgrade.rst | 2 +- mongoengine/django/sessions.py | 3 ++- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/AUTHORS b/AUTHORS index ac811dd..64f0b0a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -138,4 +138,5 @@ that much better: * Manuel Hermann * Gustavo Gawryszewski * Max Countryman - * caitifbrito \ No newline at end of file + * caitifbrito + * lcya86 刘春洋 \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 8b41d6e..6556154 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -44,6 +44,7 @@ Changes in 0.8.X - Added Django Group and Permission (#142) - Added Doc class and pk to Validation messages (#69) - Fixed Documents deleted via a queryset don't call any signals (#105) +- Added the "get_decoded" method to the MongoSession class (#216) Changes in 0.7.9 ================ diff --git a/docs/upgrade.rst b/docs/upgrade.rst index fcd5f71..8724503 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -298,5 +298,5 @@ Alternatively, you can rename your collections eg :: mongodb 1.8 > 2.0 + =================== -Its been reported that indexes may need to be recreated to the newer version of indexes. +Its been reported that indexes may need to be recreated to the newer version of indexes. To do this drop indexes and call ``ensure_indexes`` on each model. diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 17cae8a..1c9288e 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -25,6 +25,7 @@ MONGOENGINE_SESSION_DATA_ENCODE = getattr( settings, 'MONGOENGINE_SESSION_DATA_ENCODE', True) + class MongoSession(Document): session_key = fields.StringField(primary_key=True, max_length=40) session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ @@ -34,7 +35,7 @@ class MongoSession(Document): meta = {'collection': MONGOENGINE_SESSION_COLLECTION, 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, 'allow_inheritance': False} - + def get_decoded(self): return SessionStore().decode(self.session_data) From 5b161b7445eb3d53b067ecab6b25a92a6001930f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 14:17:54 +0000 Subject: [PATCH 132/464] ReadPreference that overrides slave_okay (#218) --- mongoengine/queryset/queryset.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 65703c3..4c66461 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1095,11 +1095,12 @@ class QuerySet(object): def _cursor_args(self): cursor_args = { 'snapshot': self._snapshot, - 'timeout': self._timeout, - 'slave_okay': self._slave_okay, + 'timeout': self._timeout } if self._read_preference is not None: cursor_args['read_preference'] = self._read_preference + else: + cursor_args['slave_okay'] = self._slave_okay if self._loaded_fields: cursor_args['fields'] = self._loaded_fields.as_dict() return cursor_args From 3208a7f15dbe8d99253c3a397c354812ba883953 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 14:28:40 +0000 Subject: [PATCH 133/464] Merge fix tests --- tests/fields/fields.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 8cf3bb5..7c4e785 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1725,9 +1725,6 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() -<<<<<<< HEAD:tests/fields/fields.py -======= - def test_simple_choices_validation_invalid_value(self): """Ensure that error messages are correct. """ @@ -2060,7 +2057,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(test_file.the_file.read(), b('Hello, World!')) ->>>>>>> de5fbfde2ca96b93490e0bc96e04f3aa4affcfb5:tests/test_fields.py def test_geo_indexes(self): """Ensure that indexes are created automatically for GeoPointFields. """ From 1ca098c40291ff205e64080dbb1877e8083c3c67 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 14:40:26 +0000 Subject: [PATCH 134/464] Fixed invalid choices error bubbling (#214) --- docs/changelog.rst | 1 + tests/fields/__init__.py | 2 +- tests/fields/{file.py => file_tests.py} | 0 3 files changed, 2 insertions(+), 1 deletion(-) rename tests/fields/{file.py => file_tests.py} (100%) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6556154..f289b39 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -45,6 +45,7 @@ Changes in 0.8.X - Added Doc class and pk to Validation messages (#69) - Fixed Documents deleted via a queryset don't call any signals (#105) - Added the "get_decoded" method to the MongoSession class (#216) +- Fixed invalid choices error bubbling (#214) Changes in 0.7.9 ================ diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py index 86dfa84..0731838 100644 --- a/tests/fields/__init__.py +++ b/tests/fields/__init__.py @@ -1,2 +1,2 @@ from fields import * -from file import * \ No newline at end of file +from file_tests import * \ No newline at end of file diff --git a/tests/fields/file.py b/tests/fields/file_tests.py similarity index 100% rename from tests/fields/file.py rename to tests/fields/file_tests.py From 4177fc6df2800908c0b868bd577b9bce1bc85cfa Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 15:57:33 +0000 Subject: [PATCH 135/464] Can call del Doc.attr to delete field value --- mongoengine/document.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 66aa263..525d964 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -48,17 +48,6 @@ class EmbeddedDocument(BaseDocument): super(EmbeddedDocument, self).__init__(*args, **kwargs) self._changed_fields = [] - def __delattr__(self, *args, **kwargs): - """Handle deletions of fields""" - field_name = args[0] - if field_name in self._fields: - default = self._fields[field_name].default - if callable(default): - default = default() - setattr(self, field_name, default) - else: - super(EmbeddedDocument, self).__delattr__(*args, **kwargs) - def __eq__(self, other): if isinstance(other, self.__class__): return self._data == other._data From 9ca632d5184cac9ea5f7d660a63e79658881309e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 16:00:38 +0000 Subject: [PATCH 136/464] Updated Save so it calls $set and $unset in a single operation (#211) --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index ff58024..fb895d7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -141,3 +141,4 @@ that much better: * caitifbrito * lcya86 刘春洋 * Martin Alderete (https://github.com/malderete) + * Nick Joyce diff --git a/docs/changelog.rst b/docs/changelog.rst index f289b39..213e6fb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -46,6 +46,7 @@ Changes in 0.8.X - Fixed Documents deleted via a queryset don't call any signals (#105) - Added the "get_decoded" method to the MongoSession class (#216) - Fixed invalid choices error bubbling (#214) +- Updated Save so it calls $set and $unset in a single operation (#211) Changes in 0.7.9 ================ From 39dac7d4dbe809e7eacac190d348326718ec0c1b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 16:26:01 +0000 Subject: [PATCH 137/464] Fix file open rules --- tests/fields/fields.py | 8 ++++---- tests/fields/file_tests.py | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 7c4e785..124c953 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1950,7 +1950,7 @@ class FieldTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -1973,7 +1973,7 @@ class FieldTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -1996,7 +1996,7 @@ class FieldTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -2019,7 +2019,7 @@ class FieldTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index a39dadb..44d2862 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -208,7 +208,7 @@ class FileTest(unittest.TestCase): Animal.drop_collection() marmot = Animal(genus='Marmota', family='Sciuridae') - marmot_photo = open(TEST_IMAGE_PATH, 'r') # Retrieve a photo from disk + marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') marmot.photo.close() marmot.save() @@ -251,7 +251,7 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -274,7 +274,7 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -297,7 +297,7 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() @@ -320,7 +320,7 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'r')) + t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() t = TestImage.objects.first() From 156ca44a135a0bc832645b0292ca3ff0e02a8148 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 28 Jan 2013 16:49:34 +0000 Subject: [PATCH 138/464] Doc fix thanks to @jabapyth (#206) --- AUTHORS | 1 + mongoengine/queryset/queryset.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index fb895d7..7efd3ad 100644 --- a/AUTHORS +++ b/AUTHORS @@ -142,3 +142,4 @@ that much better: * lcya86 刘春洋 * Martin Alderete (https://github.com/malderete) * Nick Joyce + * Jared Forsyth diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4c66461..d313740 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -467,7 +467,8 @@ class QuerySet(object): def with_id(self, object_id): """Retrieve the object matching the id provided. Uses `object_id` only - and raises InvalidQueryError if a filter has been applied. + and raises InvalidQueryError if a filter has been applied. Returns + `None` if no document exists with that id. :param object_id: the value for the id of the document to look up From 025e17701bd6b403ea5aec41f484918d3c718e4e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 29 Jan 2013 10:33:13 +0000 Subject: [PATCH 139/464] Fixed inner queryset looping (#204) --- AUTHORS | 1 + docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 10 ++++---- tests/queryset/queryset.py | 41 ++++++++++++++++++++++++++++++++ tests/test_django.py | 18 ++++++++++++++ 5 files changed, 67 insertions(+), 4 deletions(-) diff --git a/AUTHORS b/AUTHORS index 7efd3ad..903542a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -143,3 +143,4 @@ that much better: * Martin Alderete (https://github.com/malderete) * Nick Joyce * Jared Forsyth + * Kenneth Falck diff --git a/docs/changelog.rst b/docs/changelog.rst index 213e6fb..e9783cd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -47,6 +47,7 @@ Changes in 0.8.X - Added the "get_decoded" method to the MongoSession class (#216) - Fixed invalid choices error bubbling (#214) - Updated Save so it calls $set and $unset in a single operation (#211) +- Fixed inner queryset looping (#204) Changes in 0.7.9 ================ diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index d313740..ba6134f 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -112,8 +112,11 @@ class QuerySet(object): def __iter__(self): """Support iterator protocol""" - self.rewind() - return self + queryset = self + if queryset._iter: + queryset = self.clone() + queryset.rewind() + return queryset def __len__(self): return self.count() @@ -159,7 +162,6 @@ class QuerySet(object): .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ - if self._iter: return '.. queryset mid-iteration ..' @@ -537,7 +539,7 @@ class QuerySet(object): c._cursor_obj = self._cursor_obj.clone() if self._slice: - c._cursor_obj[self._slice] + c._cursor[self._slice] return c diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 3d5c659..d5e80be 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3125,5 +3125,46 @@ class QuerySetTest(unittest.TestCase): Organization)) self.assertTrue(isinstance(qs.first().organization, Organization)) + def test_nested_queryset_iterator(self): + # Try iterating the same queryset twice, nested. + names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] + + class User(Document): + name = StringField() + + def __unicode__(self): + return self.name + + User.drop_collection() + + for name in names: + User(name=name).save() + + users = User.objects.all().order_by('name') + + outer_count = 0 + inner_count = 0 + inner_total_count = 0 + + self.assertEqual(len(users), 7) + + for i, outer_user in enumerate(users): + self.assertEqual(outer_user.name, names[i]) + outer_count += 1 + inner_count = 0 + + # Calling len might disrupt the inner loop if there are bugs + self.assertEqual(len(users), 7) + + for j, inner_user in enumerate(users): + self.assertEqual(inner_user.name, names[j]) + inner_count += 1 + inner_total_count += 1 + + self.assertEqual(inner_count, 7) # inner loop should always be executed seven times + + self.assertEqual(outer_count, 7) # outer loop should be executed seven times total + self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total + if __name__ == '__main__': unittest.main() diff --git a/tests/test_django.py b/tests/test_django.py index 563f407..dceeba2 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -136,7 +136,25 @@ class QuerySetTest(unittest.TestCase): start = end - 1 self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) + def test_nested_queryset_template_iterator(self): + # Try iterating the same queryset twice, nested, in a Django template. + names = ['A', 'B', 'C', 'D'] + class User(Document): + name = StringField() + + def __unicode__(self): + return self.name + + User.drop_collection() + + for name in names: + User(name=name).save() + + users = User.objects.all().order_by('name') + template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}") + rendered = template.render(Context({'users': users})) + self.assertEqual(rendered, 'AB ABCD CD') class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore From 2e18199eb2cad168f07caefd68f4cdd5f3078135 Mon Sep 17 00:00:00 2001 From: hellysmile Date: Fri, 1 Feb 2013 04:17:16 +0200 Subject: [PATCH 140/464] Django sessions TTL support --- docs/django.rst | 3 +++ mongoengine/django/sessions.py | 14 +++++++++++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/docs/django.rst b/docs/django.rst index 144baab..58eadc6 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -45,6 +45,9 @@ into you settings module:: SESSION_ENGINE = 'mongoengine.django.sessions' +Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ```'mongoengine.django.sessions'``` supports `mongodb TTL +`_. + .. versionadded:: 0.2.1 Storage diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 810b626..20e6b62 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -31,9 +31,17 @@ class MongoSession(Document): else fields.DictField() expire_date = fields.DateTimeField() - meta = {'collection': MONGOENGINE_SESSION_COLLECTION, - 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, - 'allow_inheritance': False} + meta = { + 'collection': MONGOENGINE_SESSION_COLLECTION, + 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, + 'allow_inheritance': False, + 'indexes': [ + { + 'fields': ['expire_date'], + 'expireAfterSeconds': settings.SESSION_COOKIE_AGE + } + ] + } class SessionStore(SessionBase): From d6b4ca7a985c1ba4c25692d9731b8b6cd12e7fb0 Mon Sep 17 00:00:00 2001 From: hellysmile Date: Fri, 1 Feb 2013 04:19:55 +0200 Subject: [PATCH 141/464] Fix docs quotes --- docs/django.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/django.rst b/docs/django.rst index 58eadc6..b2acee7 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -45,7 +45,7 @@ into you settings module:: SESSION_ENGINE = 'mongoengine.django.sessions' -Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ```'mongoengine.django.sessions'``` supports `mongodb TTL +Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL `_. .. versionadded:: 0.2.1 From 8df9ff90cb90aebc21b82fd90d2dabd084097d6e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 14 Feb 2013 08:26:36 +0000 Subject: [PATCH 142/464] Update LICENSE --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index cef91cc..45f233c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2009-2012 See AUTHORS +Copyright (c) 2009 See AUTHORS Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation From 3477b0107a227b88a1936fb23fa24ad7b26524a6 Mon Sep 17 00:00:00 2001 From: Loic Raucy Date: Tue, 26 Feb 2013 11:12:37 +0100 Subject: [PATCH 143/464] Added regression test for numerical string keys. --- tests/test_fields.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/test_fields.py b/tests/test_fields.py index 28af1b2..4766900 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -965,6 +965,24 @@ class FieldTest(unittest.TestCase): doc = self.db.test.find_one() self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) + def test_mapfield_numerical_index(self): + """Ensure that MapField accept numeric strings as indexes.""" + class Embedded(EmbeddedDocument): + name = StringField() + + class Test(Document): + my_map = MapField(EmbeddedDocumentField(Embedded)) + + Test.drop_collection() + + test = Test() + test.my_map['1'] = Embedded(name='test') + test.save() + test.my_map['1'].name = 'test updated' + test.save() + + Test.drop_collection() + def test_map_field_lookup(self): """Ensure MapField lookups succeed on Fields without a lookup method""" From d0245bb5ba3b0f4ca4ce654fd199c167ce8c5e96 Mon Sep 17 00:00:00 2001 From: Loic Raucy Date: Tue, 26 Feb 2013 11:14:47 +0100 Subject: [PATCH 144/464] Fixed #238: dictfields handle numerical strings indexes. --- mongoengine/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 013afe7..4f302a8 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1193,7 +1193,7 @@ class BaseDocument(object): for p in parts: if isinstance(d, DBRef): break - elif p.isdigit(): + elif isinstance(d, list) and p.isdigit(): d = d[int(p)] elif hasattr(d, 'get'): d = d.get(p) @@ -1224,7 +1224,7 @@ class BaseDocument(object): parts = path.split('.') db_field_name = parts.pop() for p in parts: - if p.isdigit(): + if isinstance(d, list) and p.isdigit(): d = d[int(p)] elif (hasattr(d, '__getattribute__') and not isinstance(d, dict)): From 3c78757778551bfe28495e3630b3e6b991e2a86f Mon Sep 17 00:00:00 2001 From: Benoit Louy Date: Tue, 26 Feb 2013 09:55:29 -0500 Subject: [PATCH 145/464] fix travis build: builds were failing because libz.so location changed. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 550cc6c..2dc8894 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ env: - PYMONGO=2.3 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi From 0d2e84b16b286fd7724676794a68aa0610285afc Mon Sep 17 00:00:00 2001 From: benoitlouy Date: Thu, 28 Feb 2013 00:37:34 -0500 Subject: [PATCH 146/464] Fix for issue #237: clearing changed fields recursively in EmbeddedDocuments after saving a Document --- mongoengine/base.py | 16 ++++++++++++++++ mongoengine/document.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 013afe7..521756c 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1123,6 +1123,22 @@ class BaseDocument(object): key not in self._changed_fields): self._changed_fields.append(key) + def _clear_changed_fields(self): + self._changed_fields = [] + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + for field_name, field in self._fields.iteritems(): + if (isinstance(field, ComplexBaseField) and + isinstance(field.field, EmbeddedDocumentField)): + field_value = getattr(self, field_name, None) + if field_value: + for idx in (field_value if isinstance(field_value, dict) + else xrange(len(field_value))): + field_value[idx]._clear_changed_fields() + elif isinstance(field, EmbeddedDocumentField): + field_value = getattr(self, field_name, None) + if field_value: + field_value._clear_changed_fields() + def _get_changed_fields(self, key='', inspected=None): """Returns a list of all fields that have explicitly been changed. """ diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b3afaf..0cf07a9 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -269,7 +269,7 @@ class Document(BaseDocument): if id_field not in self._meta.get('shard_key', []): self[id_field] = self._fields[id_field].to_python(object_id) - self._changed_fields = [] + self._clear_changed_fields() self._created = False signals.post_save.send(self.__class__, document=self, created=created) return self From 43327ea4e1ca2041de248b438d77b392bd59f6c3 Mon Sep 17 00:00:00 2001 From: benoitlouy Date: Fri, 1 Mar 2013 07:38:28 -0500 Subject: [PATCH 147/464] Add testcase for issue #237 --- tests/test_document.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/test_document.py b/tests/test_document.py index 3e8d813..134ba34 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -3368,6 +3368,40 @@ class DocumentTest(unittest.TestCase): } ) ]), "1,2") + def test_complex_nesting_document_and_embeddeddocument(self): + class Macro(EmbeddedDocument): + value = DynamicField(default="UNDEFINED") + + class Parameter(EmbeddedDocument): + macros = MapField(EmbeddedDocumentField(Macro)) + + def expand(self): + self.macros["test"] = Macro() + + class Node(Document): + parameters = MapField(EmbeddedDocumentField(Parameter)) + + def expand(self): + self.flattened_parameter = {} + for parameter_name, parameter in self.parameters.iteritems(): + parameter.expand() + + class System(Document): + name = StringField(required=True) + nodes = MapField(ReferenceField(Node, dbref=False)) + + def save(self, *args, **kwargs): + for node_name, node in self.nodes.iteritems(): + node.expand() + node.save(*args, **kwargs) + super(System, self).save(*args, **kwargs) + + system = System(name="system") + system.save() + system.nodes["node"] = Node() + system.save() + system.nodes["node"].parameters["param"] = Parameter() + system.save() class ValidatorErrorTest(unittest.TestCase): From 3e4a900279758667ebcaaa1683c86324b06ab6ec Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 7 Mar 2013 15:25:18 +0000 Subject: [PATCH 148/464] Adding google analytics --- docs/_templates/layout.html | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 0000000..8172f21 --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,16 @@ +{% extends "!layout.html" %} + +{% block footer %} +{{ super() }} + +{% endblock %} From eeb672feb9e35416d452b386c98313134f27cc2e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 7 Mar 2013 15:37:01 +0000 Subject: [PATCH 149/464] Removing custom layout --- docs/_templates/layout.html | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html deleted file mode 100644 index 8172f21..0000000 --- a/docs/_templates/layout.html +++ /dev/null @@ -1,16 +0,0 @@ -{% extends "!layout.html" %} - -{% block footer %} -{{ super() }} - -{% endblock %} From d36f6e7f24d5fc22076fc192d6f3174b0c1ee58c Mon Sep 17 00:00:00 2001 From: zmolodchenko Date: Sat, 9 Mar 2013 21:08:10 +0200 Subject: [PATCH 150/464] fix error reporting, where choices is list of flat values --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 013afe7..f73af4c 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -252,7 +252,7 @@ class BaseField(object): elif value_to_check not in self.choices: msg = ('Value must be %s of %s' % (err_msg, unicode(self.choices))) - self.error() + self.error(msg) # check validation argument if self.validation is not None: From 41a698b442881b7f40cef5b3c6954097d0965a10 Mon Sep 17 00:00:00 2001 From: Russ Weeks Date: Tue, 12 Mar 2013 10:28:29 -0700 Subject: [PATCH 151/464] Changed dereference.py to keep tuples as tuples --- mongoengine/dereference.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 386dbf4..fcb6d89 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -171,6 +171,7 @@ class DeReference(object): if not hasattr(items, 'items'): is_list = True + as_tuple = isinstance(items, tuple) iterator = enumerate(items) data = [] else: @@ -205,7 +206,7 @@ class DeReference(object): if instance and name: if is_list: - return BaseList(data, instance, name) + return tuple(data) if as_tuple else BaseList(data, instance, name) return BaseDict(data, instance, name) depth += 1 return data From f9cd8b1841631c693ebf40479d215bee4ed13d30 Mon Sep 17 00:00:00 2001 From: Russ Weeks Date: Tue, 12 Mar 2013 12:45:38 -0700 Subject: [PATCH 152/464] added unit test for dereference patch --- tests/test_dereference.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 0eb891c..d7438d2 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -997,3 +997,34 @@ class FieldTest(unittest.TestCase): msg = Message.objects.get(id=1) self.assertEqual(0, msg.comments[0].id) self.assertEqual(1, msg.comments[1].id) + + def test_tuples_as_tuples(self): + """ + Ensure that tuples remain tuples when they are + inside a ComplexBaseField + """ + from mongoengine.base import BaseField + class EnumField(BaseField): + def __init__(self, **kwargs): + super(EnumField,self).__init__(**kwargs) + + def to_mongo(self, value): + return value + + def to_python(self, value): + return tuple(value) + + class TestDoc(Document): + items = ListField(EnumField()) + + TestDoc.drop_collection() + tuples = [(100,'Testing')] + doc = TestDoc() + doc.items = tuples + doc.save() + x = TestDoc.objects().get() + self.assertTrue(x is not None) + self.assertTrue(len(x.items) == 1) + self.assertTrue(tuple(x.items[0]) in tuples) + self.assertTrue(x.items[0] in tuples) + From 2d6ae1691204b1036ee5b7595721303a8533a103 Mon Sep 17 00:00:00 2001 From: Jaepil Jeong Date: Thu, 14 Mar 2013 23:25:22 +0900 Subject: [PATCH 153/464] Added LongField to support 64-bit integer type. --- mongoengine/fields.py | 38 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index de484a1..c40491b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -27,7 +27,7 @@ except ImportError: Image = None ImageOps = None -__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', +__all__ = ['StringField', 'IntField', 'LongField', 'FloatField', 'BooleanField', 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', @@ -153,7 +153,7 @@ class EmailField(StringField): class IntField(BaseField): - """An integer field. + """An 32-bit integer field. """ def __init__(self, min_value=None, max_value=None, **kwargs): @@ -186,6 +186,40 @@ class IntField(BaseField): return int(value) +class LongField(BaseField): + """An 64-bit integer field. + """ + + def __init__(self, min_value=None, max_value=None, **kwargs): + self.min_value, self.max_value = min_value, max_value + super(LongField, self).__init__(**kwargs) + + def to_python(self, value): + try: + value = long(value) + except ValueError: + pass + return value + + def validate(self, value): + try: + value = long(value) + except: + self.error('%s could not be converted to long' % value) + + if self.min_value is not None and value < self.min_value: + self.error('Long value is too small') + + if self.max_value is not None and value > self.max_value: + self.error('Long value is too large') + + def prepare_query_value(self, op, value): + if value is None: + return value + + return long(value) + + class FloatField(BaseField): """An floating point number field. """ From e9464e32db4bcd10b332fb4764b3f9189e096f36 Mon Sep 17 00:00:00 2001 From: Jaepil Jeong Date: Thu, 14 Mar 2013 23:59:50 +0900 Subject: [PATCH 154/464] Added test cases for LongField. --- tests/test_fields.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_fields.py b/tests/test_fields.py index 28af1b2..3ceff8d 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -144,6 +144,17 @@ class FieldTest(unittest.TestCase): self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) + def test_long_ne_operator(self): + class TestDocument(Document): + long_fld = LongField() + + TestDocument.drop_collection() + + TestDocument(long_fld=None).save() + TestDocument(long_fld=1).save() + + self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) + def test_object_id_validation(self): """Ensure that invalid values cannot be assigned to string fields. """ @@ -217,6 +228,23 @@ class FieldTest(unittest.TestCase): person.age = 'ten' self.assertRaises(ValidationError, person.validate) + def test_long_validation(self): + """Ensure that invalid values cannot be assigned to long fields. + """ + class TestDocument(Document): + value = LongField(min_value=0, max_value=110) + + doc = TestDocument() + doc.value = 50 + doc.validate() + + doc.value = -1 + self.assertRaises(ValidationError, doc.validate) + doc.age = 120 + self.assertRaises(ValidationError, doc.validate) + doc.age = 'ten' + self.assertRaises(ValidationError, doc.validate) + def test_float_validation(self): """Ensure that invalid values cannot be assigned to float fields. """ From 67182713d96233d3d2feb0f67d39ddaf1789c692 Mon Sep 17 00:00:00 2001 From: Jaepil Jeong Date: Fri, 15 Mar 2013 00:12:48 +0900 Subject: [PATCH 155/464] Fixed potential overflow error. --- mongoengine/fields.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index c40491b..d7c7cf1 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -213,6 +213,9 @@ class LongField(BaseField): if self.max_value is not None and value > self.max_value: self.error('Long value is too large') + if value > 0x7FFFFFFFFFFFFFFF: + self.error('Long value is too large') + def prepare_query_value(self, op, value): if value is None: return value From a192029901c6c21b3cc949a6221a1090083ebf86 Mon Sep 17 00:00:00 2001 From: Aleksandr Sorokoumov Date: Sat, 16 Mar 2013 16:47:22 +0100 Subject: [PATCH 156/464] ReferenceField query chaining bug fixed. --- mongoengine/queryset.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index bff05fc..6c61ab9 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -367,6 +367,10 @@ class QuerySet(object): self._skip = None self._hint = -1 # Using -1 as None is a valid value for hint + def __deepcopy__(self, memo): + """Essential for chained queries with ReferenceFields involved""" + return self.clone() + def clone(self): """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` @@ -814,7 +818,6 @@ class QuerySet(object): mongo_query['$and'].append(value) else: mongo_query['$and'] = value - return mongo_query def get(self, *q_objs, **query): From a762a10decef552ccf6fbee413819d18bede05ea Mon Sep 17 00:00:00 2001 From: Jaepil Jeong Date: Mon, 18 Mar 2013 19:30:04 +0900 Subject: [PATCH 157/464] Revert "Fixed potential overflow error." This reverts commit 67182713d96233d3d2feb0f67d39ddaf1789c692. --- mongoengine/fields.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index d7c7cf1..c40491b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -213,9 +213,6 @@ class LongField(BaseField): if self.max_value is not None and value > self.max_value: self.error('Long value is too large') - if value > 0x7FFFFFFFFFFFFFFF: - self.error('Long value is too large') - def prepare_query_value(self, op, value): if value is None: return value From f7515cfca80d870728ae5f03535dcedcd2a6a5b1 Mon Sep 17 00:00:00 2001 From: Aleksandr Sorokoumov Date: Mon, 18 Mar 2013 12:22:55 +0100 Subject: [PATCH 158/464] add myself to AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 82a1dfa..fe62026 100644 --- a/AUTHORS +++ b/AUTHORS @@ -128,3 +128,4 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida + * Aleksandr Sorokoumov \ No newline at end of file From 165bea5bb97f24c4b06fedfe38ebb3d925052eaa Mon Sep 17 00:00:00 2001 From: Aleksandr Sorokoumov Date: Mon, 18 Mar 2013 12:32:49 +0100 Subject: [PATCH 159/464] QuerySet chaining test was supplemented with ReferenceField chaining test --- tests/test_queryset.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 5234cea..43bb70b 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -232,28 +232,33 @@ class QuerySetTest(unittest.TestCase): def test_chaining(self): class A(Document): - pass + s = StringField() class B(Document): - a = ReferenceField(A) + ref = ReferenceField(A) + boolfield = BooleanField(default=False) A.drop_collection() B.drop_collection() - a1 = A().save() - a2 = A().save() + a1 = A(s="test1").save() + a2 = A(s="test2").save() - B(a=a1).save() + B(ref=a1, boolfield=True).save() # Works - q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query + q1 = B.objects.filter(ref__in=[a1, a2], ref=a1)._query # Doesn't work - q2 = B.objects.filter(a__in=[a1, a2]) - q2 = q2.filter(a=a1)._query - + q2 = B.objects.filter(ref__in=[a1, a2]) + q2 = q2.filter(ref=a1)._query self.assertEqual(q1, q2) + a_objects = A.objects(s='test1') + query = B.objects(ref__in=a_objects) + query = query.filter(boolfield=True) + self.assertEquals(query.count(), 1) + def test_update_write_options(self): """Test that passing write_options works""" From faf840f924c6d8432c88c70ae949c8b14f0d72ed Mon Sep 17 00:00:00 2001 From: Paul Swartz Date: Mon, 25 Mar 2013 10:59:31 -0400 Subject: [PATCH 160/464] only mark a field as changed if the value has changed Prevents spurious changes from being recorded. --- AUTHORS | 1 + mongoengine/base.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/AUTHORS b/AUTHORS index 82a1dfa..3d05cc4 100644 --- a/AUTHORS +++ b/AUTHORS @@ -128,3 +128,4 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida + * Paul Swartz \ No newline at end of file diff --git a/mongoengine/base.py b/mongoengine/base.py index f73af4c..0e46248 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -205,8 +205,12 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - instance._data[self.name] = value - if instance._initialised: + changed = False + if (self.name not in instance._data or + instance._data[self.name] != value): + changed = True + instance._data[self.name] = value + if changed and instance._initialised: instance._mark_as_changed(self.name) def error(self, message="", errors=None, field_name=None): @@ -317,12 +321,6 @@ class ComplexBaseField(BaseField): return value - def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - instance._data[self.name] = value - instance._mark_as_changed(self.name) - def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. """ From 20cb0285f004def84e83b501af35672d544afe0f Mon Sep 17 00:00:00 2001 From: Paul Swartz Date: Wed, 27 Mar 2013 14:53:47 -0400 Subject: [PATCH 161/464] explicitly check for Document instances when dereferencing In particular, `collections.namedtuple` instances also have a `_fields` attribute which confuses the dereferencing. --- mongoengine/dereference.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 386dbf4..b227ed3 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -33,7 +33,7 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and instance._fields: + if instance and isinstance(instance, Document): doc_type = instance._fields.get(name) if hasattr(doc_type, 'field'): doc_type = doc_type.field @@ -84,7 +84,7 @@ class DeReference(object): # Recursively find dbreferences depth += 1 for k, item in iterator: - if hasattr(item, '_fields'): + if isinstance(item, Document): for field_name, field in item._fields.iteritems(): v = item._data.get(field_name, None) if isinstance(v, (DBRef)): @@ -187,7 +187,7 @@ class DeReference(object): if k in self.object_map and not is_list: data[k] = self.object_map[k] - elif hasattr(v, '_fields'): + elif isinstance(v, Document): for field_name, field in v._fields.iteritems(): v = data[k]._data.get(field_name, None) if isinstance(v, (DBRef)): From 74f3f4eb158ab5bd980578582af8454b0198577a Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 1 Apr 2013 16:17:17 -0700 Subject: [PATCH 162/464] more ordering unit tests --- tests/test_queryset.py | 91 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 81 insertions(+), 10 deletions(-) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 5234cea..a87611c 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -952,6 +952,11 @@ class QuerySetTest(unittest.TestCase): {'attachments.views.extracted': 'no'}]} self.assertEqual(expected, raw_query) + def assertSequence(self, qs, expected): + self.assertEqual(len(qs), len(expected)) + for i in range(len(qs)): + self.assertEqual(qs[i], expected[i]) + def test_ordering(self): """Ensure default ordering is applied and can be overridden. """ @@ -965,10 +970,10 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - blog_post_1 = BlogPost(title="Blog Post #1", - published_date=datetime(2010, 1, 5, 0, 0 ,0)) blog_post_2 = BlogPost(title="Blog Post #2", published_date=datetime(2010, 1, 6, 0, 0 ,0)) + blog_post_1 = BlogPost(title="Blog Post #1", + published_date=datetime(2010, 1, 5, 0, 0 ,0)) blog_post_3 = BlogPost(title="Blog Post #3", published_date=datetime(2010, 1, 7, 0, 0 ,0)) @@ -978,14 +983,13 @@ class QuerySetTest(unittest.TestCase): # get the "first" BlogPost using default ordering # from BlogPost.meta.ordering - latest_post = BlogPost.objects.first() - self.assertEqual(latest_post.title, "Blog Post #3") + expected = [blog_post_3, blog_post_2, blog_post_1] + self.assertSequence(BlogPost.objects.all(), expected) # override default ordering, order BlogPosts by "published_date" - first_post = BlogPost.objects.order_by("+published_date").first() - self.assertEqual(first_post.title, "Blog Post #1") - - BlogPost.drop_collection() + qs = BlogPost.objects.order_by("+published_date") + expected = [blog_post_1, blog_post_2, blog_post_3] + self.assertSequence(qs, expected) def test_only(self): """Ensure that QuerySet.only only returns the requested fields. @@ -1921,8 +1925,8 @@ class QuerySetTest(unittest.TestCase): def test_order_by(self): """Ensure that QuerySets may be ordered. """ - self.Person(name="User A", age=20).save() self.Person(name="User B", age=40).save() + self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() names = [p.name for p in self.Person.objects.order_by('-age')] @@ -1937,11 +1941,67 @@ class QuerySetTest(unittest.TestCase): ages = [p.age for p in self.Person.objects.order_by('-name')] self.assertEqual(ages, [30, 40, 20]) + def test_order_by_optional(self): + class BlogPost(Document): + title = StringField() + published_date = DateTimeField(required=False) + + BlogPost.drop_collection() + + blog_post_3 = BlogPost(title="Blog Post #3", + published_date=datetime(2010, 1, 6, 0, 0 ,0)) + blog_post_2 = BlogPost(title="Blog Post #2", + published_date=datetime(2010, 1, 5, 0, 0 ,0)) + blog_post_4 = BlogPost(title="Blog Post #4", + published_date=datetime(2010, 1, 7, 0, 0 ,0)) + blog_post_1 = BlogPost(title="Blog Post #1", published_date=None) + + blog_post_3.save() + blog_post_1.save() + blog_post_4.save() + blog_post_2.save() + + expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4] + self.assertSequence(BlogPost.objects.order_by('published_date'), + expected) + self.assertSequence(BlogPost.objects.order_by('+published_date'), + expected) + + expected.reverse() + self.assertSequence(BlogPost.objects.order_by('-published_date'), + expected) + + def test_order_by_list(self): + class BlogPost(Document): + title = StringField() + published_date = DateTimeField(required=False) + + BlogPost.drop_collection() + + blog_post_1 = BlogPost(title="A", + published_date=datetime(2010, 1, 6, 0, 0 ,0)) + blog_post_2 = BlogPost(title="B", + published_date=datetime(2010, 1, 6, 0, 0 ,0)) + blog_post_3 = BlogPost(title="C", + published_date=datetime(2010, 1, 7, 0, 0 ,0)) + + blog_post_2.save() + blog_post_3.save() + blog_post_1.save() + + qs = BlogPost.objects.order_by('published_date', 'title') + expected = [blog_post_1, blog_post_2, blog_post_3] + self.assertSequence(qs, expected) + + qs = BlogPost.objects.order_by('-published_date', '-title') + expected.reverse() + self.assertSequence(qs, expected) + def test_order_by_chaining(self): """Ensure that an order_by query chains properly and allows .only() """ - self.Person(name="User A", age=20).save() self.Person(name="User B", age=40).save() + self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() only_age = self.Person.objects.order_by('-age').only('age') @@ -1953,6 +2013,17 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(names, [None, None, None]) self.assertEqual(ages, [40, 30, 20]) + qs = self.Person.objects.all().limit(10) + qs = qs.order_by('-age') + ages = [p.age for p in qs] + self.assertEqual(ages, [40, 30, 20]) + + qs = self.Person.objects.all().skip(0) + qs = qs.order_by('-age') + ages = [p.age for p in qs] + self.assertEqual(ages, [40, 30, 20]) + + def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or denormalise""" From dfabfce01bd91f9104fde9cd1892f9ca58fbe41b Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 1 Apr 2013 17:17:01 -0700 Subject: [PATCH 163/464] show that order_by followed by limit works, but not the other way around --- tests/test_queryset.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index a87611c..56177ec 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -2013,6 +2013,11 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(names, [None, None, None]) self.assertEqual(ages, [40, 30, 20]) + qs = self.Person.objects.all().order_by('-age') + qs = qs.limit(10) + ages = [p.age for p in qs] + self.assertEqual(ages, [40, 30, 20]) + qs = self.Person.objects.all().limit(10) qs = qs.order_by('-age') ages = [p.age for p in qs] @@ -2023,7 +2028,6 @@ class QuerySetTest(unittest.TestCase): ages = [p.age for p in qs] self.assertEqual(ages, [40, 30, 20]) - def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or denormalise""" From 32d5c0c946f68a9328e23c688ad37b4472033e1c Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 3 Apr 2013 15:00:34 -0400 Subject: [PATCH 164/464] Store ordered list of field names, and return the ordered list when iterating a document instance. --- mongoengine/base.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index f73af4c..2f956cc 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -560,8 +560,11 @@ class DocumentMetaclass(type): # Set _fields and db_field maps attrs['_fields'] = doc_fields - attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) - for k, v in doc_fields.iteritems()]) + attrs['_fields_ordered'] = tuple(i[1] + for i in sorted((v.creation_counter, v.name) + for v in doc_fields.itervalues())) + attrs['_db_field_map'] = dict((k, getattr(v, 'db_field', k)) + for k, v in doc_fields.iteritems()) attrs['_reverse_db_field_map'] = dict( (v, k) for k, v in attrs['_db_field_map'].iteritems()) @@ -1302,7 +1305,10 @@ class BaseDocument(object): return value def __iter__(self): - return iter(self._fields) + if 'id' in self._fields and 'id' not in self._fields_ordered: + return iter(('id', ) + self._fields_ordered) + + return iter(self._fields_ordered) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. From fc1ce6d39bd5dac86111276eae26de407a194ce6 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 3 Apr 2013 15:00:51 -0400 Subject: [PATCH 165/464] Allow construction of document instances using positional arguments. --- mongoengine/base.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 2f956cc..36d7c29 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -907,7 +907,17 @@ class BaseDocument(object): _dynamic_lock = True _initialised = False - def __init__(self, **values): + def __init__(self, *args, **values): + if args: + # Combine positional arguments with named arguments. + # We only want named arguments. + field = iter(self._fields_ordered) + for value in args: + name = next(field) + if name in values: + raise TypeError("Multiple values for keyword argument '" + name + "'") + values[name] = value + signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} From 07d3e52e6a461eeca1251911e5440486e0832c2a Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 3 Apr 2013 15:03:33 -0400 Subject: [PATCH 166/464] Tests for construction using positional parameters. --- tests/test_document.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_document.py b/tests/test_document.py index 3e8d813..00059fa 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1386,6 +1386,28 @@ class DocumentTest(unittest.TestCase): person = self.Person(name="Test User", age=30) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 30) + + def test_positional_creation(self): + """Ensure that document may be created using positional arguments. + """ + person = self.Person("Test User", 42) + self.assertEqual(person.name, "Test User") + self.assertEqual(person.age, 42) + + def test_mixed_creation(self): + """Ensure that document may be created using mixed arguments. + """ + person = self.Person("Test User", age=42) + self.assertEqual(person.name, "Test User") + self.assertEqual(person.age, 42) + + def test_bad_mixed_creation(self): + """Ensure that document gives correct error when duplicating arguments + """ + def construct_bad_instance(): + return self.Person("Test User", 42, name="Bad User") + + self.assertRaises(TypeError, construct_bad_instance) def test_to_dbref(self): """Ensure that you can get a dbref of a document""" From 782d48594a3fabaad8dfe4b5444f9d18839b309a Mon Sep 17 00:00:00 2001 From: "bool.dev" Date: Thu, 4 Apr 2013 08:24:35 +0530 Subject: [PATCH 167/464] Fixes resolving to db_field from class field name, in distinct() query. --- mongoengine/queryset.py | 2 ++ tests/test_queryset.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index bff05fc..ff168c7 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1214,6 +1214,8 @@ class QuerySet(object): .. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.6 - Improved db_field refrence handling """ + field = [field] + field = self._fields_to_dbfields(field).pop() return self._dereference(self._cursor.distinct(field), 1, name=field, instance=self._document) diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 5234cea..df58e21 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -2481,6 +2481,23 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Foo.objects.distinct("bar"), [bar]) + def test_distinct_handles_db_field(self): + """Ensure that distinct resolves field name to db_field as expected. + """ + class Product(Document): + product_id=IntField(db_field='pid') + + Product.drop_collection() + + product_one = Product(product_id=1).save() + product_two = Product(product_id=2).save() + product_one_dup = Product(product_id=1).save() + + self.assertEqual(set(Product.objects.distinct('product_id')), + set([1, 2])) + + Product.drop_collection() + def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ From dd006a502eede24bad93361c74d6239fc3be4998 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristinn=20O=CC=88rn=20Sigur=C3=B0sson?= Date: Thu, 4 Apr 2013 17:09:05 +0200 Subject: [PATCH 168/464] Don't run unset on IntField if the value is 0 (zero). --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index f73af4c..c2d74f3 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1207,7 +1207,7 @@ class BaseDocument(object): # Determine if any changed items were actually unset. for path, value in set_data.items(): - if value or isinstance(value, bool): + if value or isinstance(value, bool) or isinstance(value, int): continue # If we've set a value that ain't the default value dont unset it. From 47df8deb58add4a7b93dedcc9da61be0be722d0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristinn=20O=CC=88rn=20Sigur=C3=B0sson?= Date: Thu, 4 Apr 2013 17:30:21 +0200 Subject: [PATCH 169/464] Fix the implementation. --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index c2d74f3..4209156 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1207,7 +1207,7 @@ class BaseDocument(object): # Determine if any changed items were actually unset. for path, value in set_data.items(): - if value or isinstance(value, bool) or isinstance(value, int): + if value or type(value) in [bool, int]: continue # If we've set a value that ain't the default value dont unset it. From 7e980a16d088d6b73f7e3e6b4bdb3e5b75bd759b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristinn=20O=CC=88rn=20Sigur=C3=B0sson?= Date: Fri, 5 Apr 2013 11:01:46 +0200 Subject: [PATCH 170/464] Don't run unset on IntField if the value is 0 (zero). The IntField in unset if the IntField value doesn't validate to "truthify" (therefore, is set as 0) and the default value of the IntField in question is 0. This is not a logical functionality in my opinion. Take this example. You have an IntField that is a counter which can be incremented and decremented. This counter has the default value of 0 and is a required field. Every time the counter reaches 0, the field is unset. --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 4209156..fa6f825 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1207,7 +1207,7 @@ class BaseDocument(object): # Determine if any changed items were actually unset. for path, value in set_data.items(): - if value or type(value) in [bool, int]: + if value or isinstance(value, (bool, int)): continue # If we've set a value that ain't the default value dont unset it. From d58341d7ae8e499f127dfc8ad0ce20be70b4c043 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Apr 2013 13:15:17 +0000 Subject: [PATCH 171/464] Fix doc generation path (#230) Add Lukaszb to Authors --- AUTHORS | 1 + docs/conf.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/AUTHORS b/AUTHORS index 903542a..c64aaec 100644 --- a/AUTHORS +++ b/AUTHORS @@ -144,3 +144,4 @@ that much better: * Nick Joyce * Jared Forsyth * Kenneth Falck + * Lukasz Balcerzak diff --git a/docs/conf.py b/docs/conf.py index 62fa150..3cfcef5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,7 +16,7 @@ import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('..')) # -- General configuration ----------------------------------------------------- @@ -38,7 +38,7 @@ master_doc = 'index' # General information about the project. project = u'MongoEngine' -copyright = u'2009-2012, MongoEngine Authors' +copyright = u'2009, MongoEngine Authors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From e9ff655b0ea1941837d3b9b155f530a023be81d1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Apr 2013 14:58:42 +0000 Subject: [PATCH 172/464] Trying to fix travis --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2dc8894..a85aac4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,8 +14,7 @@ env: install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib/; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pillow --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From d9b8ee7895b4c0539a134e998dc6a756da502548 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Apr 2013 15:47:53 +0000 Subject: [PATCH 173/464] next test --- .travis.yml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index a85aac4..5e20e65 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,8 @@ env: - PYMONGO=2.4.1 - PYMONGO=2.3 install: - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib/; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pillow --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install @@ -26,3 +25,8 @@ branches: only: - master - "0.8" +# # Get development headers for PIL +# before_install: +# - sudo apt-get update -qq +# - sudo apt-get build-dep -qq python-imaging +# - sudo apt-get install libjpeg-dev From b06f9dbf8d8da9b1f68007c9d9fd6e39de30b135 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 10:02:55 +0000 Subject: [PATCH 174/464] Travis travis travis --- .travis.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5e20e65..3968166 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ env: - PYMONGO=2.3 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pillow --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install diff --git a/setup.py b/setup.py index 6d9b51b..bcdb183 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["mongoengine.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django=1.4.2', 'pillow'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From cc5b60b004b19922ce2d59b33217059729134fe9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 10:30:52 +0000 Subject: [PATCH 175/464] Updated pymongo versions and pillow wont work --- .travis.yml | 11 +++-------- setup.py | 2 +- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3968166..7fb55e3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,11 +9,11 @@ python: - "3.3" env: - PYMONGO=dev - - PYMONGO=2.4.1 - - PYMONGO=2.3 + - PYMONGO=2.5 + - PYMONGO=2.4.2 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pillow --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install @@ -25,8 +25,3 @@ branches: only: - master - "0.8" -# # Get development headers for PIL -# before_install: -# - sudo apt-get update -qq -# - sudo apt-get build-dep -qq python-imaging -# - sudo apt-get install libjpeg-dev diff --git a/setup.py b/setup.py index bcdb183..1863df5 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["mongoengine.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django=1.4.2', 'pillow'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django=1.4.2', 'PIL'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From 19a7372ff9cbf8c192217bf6665794b6d210623b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 10:32:50 +0000 Subject: [PATCH 176/464] Fix test_require for Django --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1863df5..54c2cdc 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["mongoengine.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django=1.4.2', 'PIL'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From 5f1d5ea056a16d1df76463869d694c2238d13459 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 10:35:09 +0000 Subject: [PATCH 177/464] Try and fix wobbly test --- tests/test_all_warnings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_all_warnings.py b/tests/test_all_warnings.py index 7ef1f21..9b09034 100644 --- a/tests/test_all_warnings.py +++ b/tests/test_all_warnings.py @@ -76,7 +76,7 @@ class TestWarnings(unittest.TestCase): p2.parent.name = "Poppa Wilson" p2.save() - self.assertEqual(len(self.warning_list), 1) + self.assertTrue(len(self.warning_list) > 0) if len(self.warning_list) > 1: print self.warning_list warning = self.warning_list[0] From 49a7542b148686b2c4ab43ed1f87195138c651da Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 12:55:03 +0000 Subject: [PATCH 178/464] Fixing cloning in python 3 --- docs/changelog.rst | 5 +++++ mongoengine/queryset.py | 13 ++++--------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d93bf13..7957560 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,11 @@ Changelog ========= +Changes in 0.7.10 +================= +- Int fields no longer unset in save when changed to 0 (#272) +- Fixed ReferenceField query chaining bug fixed (#254) + Changes in 0.7.9 ================ - Better fix handling for old style _types diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6c61ab9..20b18b7 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -379,8 +379,8 @@ class QuerySet(object): c = self.__class__(self._document, self._collection_obj) copy_props = ('_initial_query', '_query_obj', '_where_clause', - '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_limit', '_skip', '_slave_okay', '_hint') + '_loaded_fields', '_ordering', '_snapshot', '_timeout', + '_limit', '_skip', '_slave_okay', '_hint') for prop in copy_props: val = getattr(self, prop) @@ -393,16 +393,11 @@ class QuerySet(object): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) if self._class_check: - if PY3: - query = SON(self._initial_query.items()) - query.update(self._mongo_query) - self._mongo_query = query - else: - self._mongo_query.update(self._initial_query) + self._mongo_query.update(self._initial_query) return self._mongo_query def ensure_index(self, key_or_list, drop_dups=False, background=False, - **kwargs): + **kwargs): """Ensure that the given indexes are in place. :param key_or_list: a single index key or a list of index keys (to From 836dc96f67a2773671da9844508843242a5d23d6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 12:56:15 +0000 Subject: [PATCH 179/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7957560..ccf099f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Fixed cloning querysets in PY3 - Int fields no longer unset in save when changed to 0 (#272) - Fixed ReferenceField query chaining bug fixed (#254) From 37740dc01042ec7c8ce60ee365eae591de283032 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 14:05:08 +0000 Subject: [PATCH 180/464] Added kwargs to doc.save to help interop with django (#223, #270) --- docs/changelog.rst | 1 + mongoengine/document.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ccf099f..8619a63 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Added kwargs to doc.save to help interop with django (#223, #270) - Fixed cloning querysets in PY3 - Int fields no longer unset in save when changed to 0 (#272) - Fixed ReferenceField query chaining bug fixed (#254) diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b3afaf..a251f58 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -164,7 +164,7 @@ class Document(BaseDocument): def save(self, safe=True, force_insert=False, validate=True, write_options=None, cascade=None, cascade_kwargs=None, - _refs=None): + _refs=None, **kwargs): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. From 63edd16a925b775251e04bbf362d3540748e138d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 14:20:44 +0000 Subject: [PATCH 181/464] Resolve field name to db field name when using distinct(#260, #264, #269) --- AUTHORS | 4 +++- docs/changelog.rst | 1 + mongoengine/queryset.py | 7 +++++-- tests/test_queryset.py | 19 +++++++++++++++++++ 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/AUTHORS b/AUTHORS index fe62026..9d31c95 100644 --- a/AUTHORS +++ b/AUTHORS @@ -128,4 +128,6 @@ that much better: * Peter Teichman * Jakub Kot * Jorge Bastida - * Aleksandr Sorokoumov \ No newline at end of file + * Aleksandr Sorokoumov + * Yohan Graterol + * bool-dev \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 8619a63..667413e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Resolve field name to db field name when using distinct(#260, #264, #269) - Added kwargs to doc.save to help interop with django (#223, #270) - Fixed cloning querysets in PY3 - Int fields no longer unset in save when changed to 0 (#272) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 20b18b7..4aeff83 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1212,8 +1212,11 @@ class QuerySet(object): .. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.6 - Improved db_field refrence handling """ - return self._dereference(self._cursor.distinct(field), 1, - name=field, instance=self._document) + try: + field = self._fields_to_dbfields([field]).pop() + finally: + return self._dereference(self._cursor.distinct(field), 1, + name=field, instance=self._document) def only(self, *fields): """Load only a subset of this document's fields. :: diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 43bb70b..88daa5f 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -2486,6 +2486,25 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Foo.objects.distinct("bar"), [bar]) + def test_distinct_handles_db_field(self): + """Ensure that distinct resolves field name to db_field as expected. + """ + class Product(Document): + product_id = IntField(db_field='pid') + + Product.drop_collection() + + Product(product_id=1).save() + Product(product_id=2).save() + Product(product_id=1).save() + + self.assertEqual(set(Product.objects.distinct('product_id')), + set([1, 2])) + self.assertEqual(set(Product.objects.distinct('pid')), + set([1, 2])) + + Product.drop_collection() + def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ From 2f19b22bb227097beabbc37ef5abcd86bb621074 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 14:25:43 +0000 Subject: [PATCH 182/464] Added dereference support for tuples (#250) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 9d31c95..aeb672c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -130,4 +130,5 @@ that much better: * Jorge Bastida * Aleksandr Sorokoumov * Yohan Graterol - * bool-dev \ No newline at end of file + * bool-dev + * Russ Weeks \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 667413e..5ecae62 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Added dereference support for tuples (#250) - Resolve field name to db field name when using distinct(#260, #264, #269) - Added kwargs to doc.save to help interop with django (#223, #270) - Fixed cloning querysets in PY3 From c9a5710554cca690f6ee2a574d03be05d93a3e78 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Apr 2013 15:56:40 +0000 Subject: [PATCH 183/464] Fixed order_by chaining issue (#265) --- docs/changelog.rst | 1 + mongoengine/queryset.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5ecae62..65a5aaf 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Fixed order_by chaining issue (#265) - Added dereference support for tuples (#250) - Resolve field name to db field name when using distinct(#260, #264, #269) - Added kwargs to doc.save to help interop with django (#223, #270) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 4aeff83..727f56e 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1321,7 +1321,8 @@ class QuerySet(object): key_list.append((key, direction)) self._ordering = key_list - + if self._cursor_obj: + self._cursor_obj.sort(key_list) return self def explain(self, format=False): From d92f992c011cec92e983cf7a1da45dc31624cc92 Mon Sep 17 00:00:00 2001 From: "bool.dev" Date: Sun, 14 Apr 2013 13:48:11 +0530 Subject: [PATCH 184/464] Removed merge trackers in code, merged correctly now. --- mongoengine/queryset.py | 7 ------- tests/test_queryset.py | 13 ------------- 2 files changed, 20 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 8d18942..727f56e 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1212,18 +1212,11 @@ class QuerySet(object): .. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.6 - Improved db_field refrence handling """ -<<<<<<< HEAD - field = [field] - field = self._fields_to_dbfields(field).pop() - return self._dereference(self._cursor.distinct(field), 1, - name=field, instance=self._document) -======= try: field = self._fields_to_dbfields([field]).pop() finally: return self._dereference(self._cursor.distinct(field), 1, name=field, instance=self._document) ->>>>>>> upstream/master def only(self, *fields): """Load only a subset of this document's fields. :: diff --git a/tests/test_queryset.py b/tests/test_queryset.py index 7bb55b0..6b56926 100644 --- a/tests/test_queryset.py +++ b/tests/test_queryset.py @@ -2565,18 +2565,6 @@ class QuerySetTest(unittest.TestCase): """Ensure that distinct resolves field name to db_field as expected. """ class Product(Document): -<<<<<<< HEAD - product_id=IntField(db_field='pid') - - Product.drop_collection() - - product_one = Product(product_id=1).save() - product_two = Product(product_id=2).save() - product_one_dup = Product(product_id=1).save() - - self.assertEqual(set(Product.objects.distinct('product_id')), - set([1, 2])) -======= product_id = IntField(db_field='pid') Product.drop_collection() @@ -2589,7 +2577,6 @@ class QuerySetTest(unittest.TestCase): set([1, 2])) self.assertEqual(set(Product.objects.distinct('pid')), set([1, 2])) ->>>>>>> upstream/master Product.drop_collection() From b6977a88ea02fba91809c19276852bb691e1c6ca Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 07:32:04 +0000 Subject: [PATCH 185/464] Explicitly check for Document instances when dereferencing (#261) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index aeb672c..991b10a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -131,4 +131,5 @@ that much better: * Aleksandr Sorokoumov * Yohan Graterol * bool-dev - * Russ Weeks \ No newline at end of file + * Russ Weeks + * Paul Swartz \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 65a5aaf..2ff2f6b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Explicitly check for Document instances when dereferencing (#261) - Fixed order_by chaining issue (#265) - Added dereference support for tuples (#250) - Resolve field name to db field name when using distinct(#260, #264, #269) From da7a8939dfce47d866641cdaa7ab29fc444db2c9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 07:41:04 +0000 Subject: [PATCH 186/464] Also check if a TopLevelMetaclass instance (#261) --- mongoengine/dereference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 4df1fe8..997b785 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -33,7 +33,7 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and isinstance(instance, Document): + if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)): doc_type = instance._fields.get(name) if hasattr(doc_type, 'field'): doc_type = doc_type.field From 97a98f004530c078a3a0ce5e687808b3d250f362 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 07:52:04 +0000 Subject: [PATCH 187/464] Only mark a field as changed if the value has changed (#258) --- tests/test_dereference.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index d7438d2..0900154 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -185,8 +185,9 @@ class FieldTest(unittest.TestCase): # Migrate the data for g in Group.objects(): - g.author = g.author - g.members = g.members + # Explicitly mark as changed so resets + g._mark_as_changed('author') + g._mark_as_changed('members') g.save() group = Group.objects.first() @@ -997,7 +998,7 @@ class FieldTest(unittest.TestCase): msg = Message.objects.get(id=1) self.assertEqual(0, msg.comments[0].id) self.assertEqual(1, msg.comments[1].id) - + def test_tuples_as_tuples(self): """ Ensure that tuples remain tuples when they are @@ -1007,16 +1008,16 @@ class FieldTest(unittest.TestCase): class EnumField(BaseField): def __init__(self, **kwargs): super(EnumField,self).__init__(**kwargs) - + def to_mongo(self, value): return value - + def to_python(self, value): return tuple(value) - + class TestDoc(Document): items = ListField(EnumField()) - + TestDoc.drop_collection() tuples = [(100,'Testing')] doc = TestDoc() From 757ff31661282d0296e1927b694a7951bfee6e9a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 07:53:57 +0000 Subject: [PATCH 188/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2ff2f6b..45b2e09 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Only mark a field as changed if the value has changed (#258) - Explicitly check for Document instances when dereferencing (#261) - Fixed order_by chaining issue (#265) - Added dereference support for tuples (#250) From b451cc567d1d3d461a1d15dace37fb79f88a5279 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 07:59:24 +0000 Subject: [PATCH 189/464] Return '_id' as the key for document.id in _data dictionary * Re #146 Conflicts: mongoengine/base.py --- mongoengine/base.py | 6 +++--- tests/test_document.py | 20 ++++++++++++++++++-- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 25c3bbd..7e6d0aa 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -192,7 +192,7 @@ class BaseField(object): return self # Get value from document instance if available, if not use default - value = instance._data.get(self.name) + value = instance._data.get(self.name or self.db_field) if value is None: value = self.default @@ -207,9 +207,9 @@ class BaseField(object): """ changed = False if (self.name not in instance._data or - instance._data[self.name] != value): + instance._data[self.name or self.db_field] != value): changed = True - instance._data[self.name] = value + instance._data[self.name or self.db_field] = value if changed and instance._initialised: instance._mark_as_changed(self.name) diff --git a/tests/test_document.py b/tests/test_document.py index 3e8d813..b5542c2 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1529,8 +1529,10 @@ class DocumentTest(unittest.TestCase): doc.validate() keys = doc._data.keys() self.assertEqual(2, len(keys)) - self.assertTrue(None in keys) self.assertTrue('e' in keys) + # Ensure that the _id field has the right id + self.assertTrue('_id' in keys) + self.assertEqual(doc._data.get('_id'), doc.id) def test_save(self): """Ensure that a document may be saved in the database. @@ -3368,6 +3370,21 @@ class DocumentTest(unittest.TestCase): } ) ]), "1,2") + def test_data_contains_idfield(self): + """Ensure that asking for _data returns 'id' + """ + class Person(Document): + name = StringField() + + Person.drop_collection() + person = Person() + person.name = "Harry Potter" + person.save(cascade=False) + + person = Person.objects.first() + self.assertTrue('_id' in person._data.keys()) + self.assertEqual(person._data.get('_id'), person.id) + class ValidatorErrorTest(unittest.TestCase): @@ -3521,6 +3538,5 @@ class ValidatorErrorTest(unittest.TestCase): self.assertRaises(OperationError, change_shard_key) - if __name__ == '__main__': unittest.main() From 6186691259898c8e5d9da8983a74792d440622cb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 08:01:24 +0000 Subject: [PATCH 190/464] Updated changelog and AUTHORS (#255) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 991b10a..aa223ed 100644 --- a/AUTHORS +++ b/AUTHORS @@ -132,4 +132,5 @@ that much better: * Yohan Graterol * bool-dev * Russ Weeks - * Paul Swartz \ No newline at end of file + * Paul Swartz + * Sundar Raman \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 45b2e09..90d4d66 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Added "_id" to _data dictionary (#255) - Only mark a field as changed if the value has changed (#258) - Explicitly check for Document instances when dereferencing (#261) - Fixed order_by chaining issue (#265) From d80b1a774934ae1d79e8b43ded5fbff8e86c9d8a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 15 Apr 2013 08:03:51 +0000 Subject: [PATCH 191/464] Test clean up (#255) --- tests/test_document.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_document.py b/tests/test_document.py index b5542c2..051dc2a 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -3377,9 +3377,7 @@ class DocumentTest(unittest.TestCase): name = StringField() Person.drop_collection() - person = Person() - person.name = "Harry Potter" - person.save(cascade=False) + Person(name="Harry Potter").save() person = Person.objects.first() self.assertTrue('_id' in person._data.keys()) From add0b463f5fc191161814a0eebf5d3e133ede6b0 Mon Sep 17 00:00:00 2001 From: Daniil Sharou Date: Tue, 16 Apr 2013 21:12:57 +0400 Subject: [PATCH 192/464] fix UnicodeEncodeError for dbref Fix "UnicodeEncodeError: 'ascii' codec can't encode character ..." error in case dbref contains non-ascii characters --- mongoengine/dereference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 997b785..e1b0a03 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -115,7 +115,7 @@ class DeReference(object): object_map = {} for col, dbrefs in self.reference_map.iteritems(): keys = object_map.keys() - refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) + refs = list(set([dbref for dbref in dbrefs if dbref.encode('utf-8') not in keys])) if hasattr(col, 'objects'): # We have a document class for the refs references = col.objects.in_bulk(refs) for key, doc in references.iteritems(): From cc0a2cbc6f20756c5edac3c0785ac132be68df45 Mon Sep 17 00:00:00 2001 From: Daniil Sharou Date: Tue, 16 Apr 2013 22:34:33 +0400 Subject: [PATCH 193/464] fix UnicodeEncodeError for dbref Fix "UnicodeEncodeError: 'ascii' codec can't encode character ..." error in case dbref contains non-ascii characters --- mongoengine/dereference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index e1b0a03..ed75615 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -115,7 +115,7 @@ class DeReference(object): object_map = {} for col, dbrefs in self.reference_map.iteritems(): keys = object_map.keys() - refs = list(set([dbref for dbref in dbrefs if dbref.encode('utf-8') not in keys])) + refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys])) if hasattr(col, 'objects'): # We have a document class for the refs references = col.objects.in_bulk(refs) for key, doc in references.iteritems(): From a5257643596836f2f58fccce55157e83dc4b4f27 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:12:01 +0000 Subject: [PATCH 194/464] Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index aa223ed..1c72b31 100644 --- a/AUTHORS +++ b/AUTHORS @@ -133,4 +133,5 @@ that much better: * bool-dev * Russ Weeks * Paul Swartz - * Sundar Raman \ No newline at end of file + * Sundar Raman + * Benoit Louy \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 90d4d66..99aa49a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) - Added "_id" to _data dictionary (#255) - Only mark a field as changed if the value has changed (#258) - Explicitly check for Document instances when dereferencing (#261) From 6fe074fb13606ad82bb392f523acf412f65a66df Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:21:11 +0000 Subject: [PATCH 195/464] Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 1c72b31..01f6f22 100644 --- a/AUTHORS +++ b/AUTHORS @@ -134,4 +134,5 @@ that much better: * Russ Weeks * Paul Swartz * Sundar Raman - * Benoit Louy \ No newline at end of file + * Benoit Louy + * lraucy \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 99aa49a..269a422 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) - Added "_id" to _data dictionary (#255) - Only mark a field as changed if the value has changed (#258) From d02de0798f8ee0b04bbb33e2ca0c71651f32af09 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:26:23 +0000 Subject: [PATCH 196/464] Documentation fix explaining adding a dummy backend for django (#172) --- docs/django.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/django.rst b/docs/django.rst index 144baab..5859746 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -10,6 +10,16 @@ In your **settings.py** file, ignore the standard database settings (unless you also plan to use the ORM in your project), and instead call :func:`~mongoengine.connect` somewhere in the settings module. +.. note :: + If you are not using another Database backend make sure you add a dummy + backend, by adding the following to ``settings.py``:: + + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.dummy' + } + } + Authentication ============== MongoEngine includes a Django authentication backend, which uses MongoDB. The From 1f9ec0c888f6a5ea5a6e83267d361b917e7df104 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:30:40 +0000 Subject: [PATCH 197/464] Added Django sessions TTL support (#224) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 01f6f22..a689ec6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -135,4 +135,5 @@ that much better: * Paul Swartz * Sundar Raman * Benoit Louy - * lraucy \ No newline at end of file + * lraucy + * hellysmile \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 269a422..df15855 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Added Django sessions TTL support (#224) - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) - Added "_id" to _data dictionary (#255) From 3a85422e8f332484984a0b519f8cd614b7445148 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:35:29 +0000 Subject: [PATCH 198/464] Added 64-bit integer support (#251) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index a689ec6..370f082 100644 --- a/AUTHORS +++ b/AUTHORS @@ -136,4 +136,5 @@ that much better: * Sundar Raman * Benoit Louy * lraucy - * hellysmile \ No newline at end of file + * hellysmile + * Jaepil Jeong \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index df15855..8ff95b5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Added 64-bit integer support (#251) - Added Django sessions TTL support (#224) - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) From b562e209d17afda19413c2971e69e2633f1f09e5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:46:02 +0000 Subject: [PATCH 199/464] Updated EmailField length to support long domains (#243) --- docs/changelog.rst | 1 + mongoengine/fields.py | 2 +- tests/test_fields.py | 18 ++++++++++++++++-- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8ff95b5..91d4da0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Updated EmailField length to support long domains (#243) - Added 64-bit integer support (#251) - Added Django sessions TTL support (#224) - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index c40491b..2d1ee71 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -143,7 +143,7 @@ class EmailField(StringField): EMAIL_REGEX = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string - r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain + r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain ) def validate(self, value): diff --git a/tests/test_fields.py b/tests/test_fields.py index 1e693e8..55ac6fb 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -2371,12 +2371,26 @@ class FieldTest(unittest.TestCase): self.assertTrue(1 in error_dict['comments']) self.assertTrue('content' in error_dict['comments'][1]) self.assertEqual(error_dict['comments'][1]['content'], - u'Field is required') - + u'Field is required') post.comments[1].content = 'here we go' post.validate() + def test_email_field(self): + class User(Document): + email = EmailField() + + user = User(email="ross@example.com") + self.assertTrue(user.validate() is None) + + user = User(email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S" + "ucictfqpdkK9iS1zeFw8sg7s7cwAF7suIfUfeyueLpfosjn3" + "aJIazqqWkm7.net")) + self.assertTrue(user.validate() is None) + + user = User(email='me@localhost') + self.assertRaises(ValidationError, user.validate) + def test_email_field_honors_regex(self): class User(Document): email = EmailField(regex=r'\w+@example.com') From b4d87d91282247be21d46107935f31891a1802d6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Apr 2013 20:50:34 +0000 Subject: [PATCH 200/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 91d4da0..0443f74 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Allow construction using positional parameters (#268) - Updated EmailField length to support long domains (#243) - Added 64-bit integer support (#251) - Added Django sessions TTL support (#224) From c2d77f51bba6e75d427f11845ad61ccca85b9803 Mon Sep 17 00:00:00 2001 From: daniil Date: Wed, 17 Apr 2013 12:14:07 +0400 Subject: [PATCH 201/464] test for #278 issue --- tests/test_dereference.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 0900154..8caefd3 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import with_statement import unittest @@ -1029,3 +1030,26 @@ class FieldTest(unittest.TestCase): self.assertTrue(tuple(x.items[0]) in tuples) self.assertTrue(x.items[0] in tuples) + def test_non_ascii_pk(self): + """ + Ensure that dbref conversion to string does not fail when + non-ascii characters are used in primary key + """ + class Brand(Document): + title = StringField(max_length=255, primary_key=True) + + class BrandGroup(Document): + title = StringField(max_length=255, primary_key=True) + brands = SortedListField(ReferenceField("Brand", dbref=True)) + + Brand.drop_collection() + BrandGroup.drop_collection() + + brand1 = Brand(title="Moschino").save() + brand2 = Brand(title=u"Денис Симачёв").save() + + BrandGroup(title="top_brands", brands=[brand1, brand2]).save() + brand_groups = BrandGroup.objects().all() + + self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) + From 420376d036708a1077b535ca7dfaa9471c168496 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Apr 2013 14:27:33 +0000 Subject: [PATCH 202/464] Merge fixes --- mongoengine/base/document.py | 4 ++-- mongoengine/queryset/queryset.py | 8 ++++++++ tests/queryset/queryset.py | 20 ++++++++++++-------- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ebb3410..7ec672f 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -30,7 +30,7 @@ class BaseDocument(object): _dynamic_lock = True _initialised = False - def __init__(self, __auto_convert=True, *args, **values): + def __init__(self, *args, **values): """ Initialise a document or embedded document @@ -46,7 +46,7 @@ class BaseDocument(object): if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value - + __auto_convert = values.pop("__auto_convert", True) signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index c299190..28a9618 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1138,8 +1138,13 @@ class QuerySet(object): if self._hint != -1: self._cursor_obj.hint(self._hint) + return self._cursor_obj + def __deepcopy__(self, memo): + """Essential for chained queries with ReferenceFields involved""" + return self.clone() + @property def _query(self): if self._mongo_query is None: @@ -1302,6 +1307,9 @@ class QuerySet(object): except: pass key_list.append((key, direction)) + + if self._cursor_obj: + self._cursor_obj.sort(key_list) return key_list def _get_scalar(self, doc): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index da0e89a..1dccdb1 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -249,6 +249,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(list(A.objects.none()), []) self.assertEqual(list(A.objects.none().all()), []) + def test_chaining(self): + class A(Document): + s = StringField() + class B(Document): ref = ReferenceField(A) boolfield = BooleanField(default=False) @@ -282,7 +286,7 @@ class QuerySetTest(unittest.TestCase): write_options = {"fsync": True} author, created = self.Person.objects.get_or_create( - name='Test User', write_options=write_options) + name='Test User', write_options=write_options) author.save(write_options=write_options) self.Person.objects.update(set__name='Ross', @@ -1475,7 +1479,6 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - def test_set_list_embedded_documents(self): class Author(EmbeddedDocument): @@ -1533,11 +1536,11 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_3 = BlogPost(title="Blog Post #3", - published_date=datetime(2010, 1, 6, 0, 0 ,0)) + published_date=datetime(2010, 1, 6, 0, 0, 0)) blog_post_2 = BlogPost(title="Blog Post #2", - published_date=datetime(2010, 1, 5, 0, 0 ,0)) + published_date=datetime(2010, 1, 5, 0, 0, 0)) blog_post_4 = BlogPost(title="Blog Post #4", - published_date=datetime(2010, 1, 7, 0, 0 ,0)) + published_date=datetime(2010, 1, 7, 0, 0, 0)) blog_post_1 = BlogPost(title="Blog Post #1", published_date=None) blog_post_3.save() @@ -1563,11 +1566,11 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_1 = BlogPost(title="A", - published_date=datetime(2010, 1, 6, 0, 0 ,0)) + published_date=datetime(2010, 1, 6, 0, 0, 0)) blog_post_2 = BlogPost(title="B", - published_date=datetime(2010, 1, 6, 0, 0 ,0)) + published_date=datetime(2010, 1, 6, 0, 0, 0)) blog_post_3 = BlogPost(title="C", - published_date=datetime(2010, 1, 7, 0, 0 ,0)) + published_date=datetime(2010, 1, 7, 0, 0, 0)) blog_post_2.save() blog_post_3.save() @@ -1604,6 +1607,7 @@ class QuerySetTest(unittest.TestCase): qs = self.Person.objects.all().limit(10) qs = qs.order_by('-age') + ages = [p.age for p in qs] self.assertEqual(ages, [40, 30, 20]) From ec639cd6e9acb10f43228487b634e74f11b54bcd Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Wed, 17 Apr 2013 16:19:53 +0200 Subject: [PATCH 203/464] Fix datetime call in UserManager --- mongoengine/django/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 024ae9c..d22f086 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -142,7 +142,7 @@ class UserManager(models.Manager): """ Creates and saves a User with the given username, e-mail and password. """ - now = datetime.datetime.now() + now = datetime_now() # Normalize the address by lowercasing the domain part of the email # address. From dcf3c86dce243962e9edc232c0ac4bea63f81ec5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Apr 2013 15:07:57 +0000 Subject: [PATCH 204/464] Using "id" in data not "_id" as its a mapping of fieldnames (#255) --- docs/changelog.rst | 2 +- mongoengine/base.py | 13 +++++++------ tests/test_document.py | 16 ++++++++-------- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0443f74..1496495 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,7 +10,7 @@ Changes in 0.7.10 - Added Django sessions TTL support (#224) - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) -- Added "_id" to _data dictionary (#255) +- Added "id" back to _data dictionary (#255) - Only mark a field as changed if the value has changed (#258) - Explicitly check for Document instances when dereferencing (#261) - Fixed order_by chaining issue (#265) diff --git a/mongoengine/base.py b/mongoengine/base.py index 0f13643..a7eb17b 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -192,7 +192,7 @@ class BaseField(object): return self # Get value from document instance if available, if not use default - value = instance._data.get(self.name or self.db_field) + value = instance._data.get(self.name) if value is None: value = self.default @@ -207,9 +207,9 @@ class BaseField(object): """ changed = False if (self.name not in instance._data or - instance._data[self.name or self.db_field] != value): + instance._data[self.name] != value): changed = True - instance._data[self.name or self.db_field] = value + instance._data[self.name] = value if changed and instance._initialised: instance._mark_as_changed(self.name) @@ -825,6 +825,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): if not new_class._meta.get('id_field'): new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') + new_class._fields['id'].name = 'id' new_class.id = new_class._fields['id'] # Merge in exceptions with parent hierarchy @@ -915,7 +916,7 @@ class BaseDocument(object): if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value - + signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} @@ -1138,7 +1139,7 @@ class BaseDocument(object): self._changed_fields = [] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") for field_name, field in self._fields.iteritems(): - if (isinstance(field, ComplexBaseField) and + if (isinstance(field, ComplexBaseField) and isinstance(field.field, EmbeddedDocumentField)): field_value = getattr(self, field_name, None) if field_value: @@ -1331,7 +1332,7 @@ class BaseDocument(object): def __iter__(self): if 'id' in self._fields and 'id' not in self._fields_ordered: return iter(('id', ) + self._fields_ordered) - + return iter(self._fields_ordered) def __getitem__(self, name): diff --git a/tests/test_document.py b/tests/test_document.py index ce00703..5b30a96 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1386,27 +1386,27 @@ class DocumentTest(unittest.TestCase): person = self.Person(name="Test User", age=30) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 30) - + def test_positional_creation(self): """Ensure that document may be created using positional arguments. """ person = self.Person("Test User", 42) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) - + def test_mixed_creation(self): """Ensure that document may be created using mixed arguments. """ person = self.Person("Test User", age=42) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) - + def test_bad_mixed_creation(self): """Ensure that document gives correct error when duplicating arguments """ def construct_bad_instance(): return self.Person("Test User", 42, name="Bad User") - + self.assertRaises(TypeError, construct_bad_instance) def test_to_dbref(self): @@ -1553,8 +1553,8 @@ class DocumentTest(unittest.TestCase): self.assertEqual(2, len(keys)) self.assertTrue('e' in keys) # Ensure that the _id field has the right id - self.assertTrue('_id' in keys) - self.assertEqual(doc._data.get('_id'), doc.id) + self.assertTrue('id' in keys) + self.assertEqual(doc._data.get('id'), doc.id) def test_save(self): """Ensure that a document may be saved in the database. @@ -3402,8 +3402,8 @@ class DocumentTest(unittest.TestCase): Person(name="Harry Potter").save() person = Person.objects.first() - self.assertTrue('_id' in person._data.keys()) - self.assertEqual(person._data.get('_id'), person.id) + self.assertTrue('id' in person._data.keys()) + self.assertEqual(person._data.get('id'), person.id) def test_complex_nesting_document_and_embedded_document(self): From 03bfd018621233629467d131c8af52764c05e559 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Apr 2013 15:54:32 +0000 Subject: [PATCH 205/464] Updated field iteration for py2.5 --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index a7eb17b..1e1a021 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -912,7 +912,7 @@ class BaseDocument(object): # We only want named arguments. field = iter(self._fields_ordered) for value in args: - name = next(field) + name = field.next() if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value From 073091a06e823b95234679d8f7a349714bf67bae Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Wed, 17 Apr 2013 21:45:54 +0200 Subject: [PATCH 206/464] Do not fail on delete() when blinker is not available --- mongoengine/queryset/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 28a9618..15c8e63 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -366,7 +366,7 @@ class QuerySet(object): queryset = self.clone() doc = queryset._document - has_delete_signal = ( + has_delete_signal = signals.signals_available and ( signals.pre_delete.has_receivers_for(self._document) or signals.post_delete.has_receivers_for(self._document)) From 3f4992329831d063448c18d7a3005204e289eb65 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 18 Apr 2013 13:21:36 +0000 Subject: [PATCH 207/464] Update AUTHORS and changelog (#278) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 370f082..1262669 100644 --- a/AUTHORS +++ b/AUTHORS @@ -137,4 +137,5 @@ that much better: * Benoit Louy * lraucy * hellysmile - * Jaepil Jeong \ No newline at end of file + * Jaepil Jeong + * Daniil Sharou diff --git a/docs/changelog.rst b/docs/changelog.rst index 1496495..386daa5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.7.10 ================= +- Fix UnicodeEncodeError for dbref (#278) - Allow construction using positional parameters (#268) - Updated EmailField length to support long domains (#243) - Added 64-bit integer support (#251) From 11085863033b71aa4026dcd3fe2c26adea54f8a4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 18 Apr 2013 13:26:35 +0000 Subject: [PATCH 208/464] Updated queryset --- tests/queryset/queryset.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 1dccdb1..37670b0 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -389,6 +389,8 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(post.comments[1].by, 'jane') self.assertEqual(post.comments[1].votes, 8) + def test_update_using_positional_operator_matches_first(self): + # Currently the $ operator only applies to the first matched item in # the query From 5de4812477841e6458752562b0a936126c0ced6c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 18 Apr 2013 13:38:36 +0000 Subject: [PATCH 209/464] Updating AUTHORS (#283) --- AUTHORS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index b00b069..e388a04 100644 --- a/AUTHORS +++ b/AUTHORS @@ -156,3 +156,5 @@ that much better: * Jared Forsyth * Kenneth Falck * Lukasz Balcerzak + * Nicolas Cortot + From 6dcd7006d05d7ce749786a9680496c5b221a85fe Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 19 Apr 2013 12:47:19 +0000 Subject: [PATCH 210/464] Fix test --- tests/test_dereference.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 8caefd3..a3517d2 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1020,7 +1020,7 @@ class FieldTest(unittest.TestCase): items = ListField(EnumField()) TestDoc.drop_collection() - tuples = [(100,'Testing')] + tuples = [(100, 'Testing')] doc = TestDoc() doc.items = tuples doc.save() @@ -1040,7 +1040,7 @@ class FieldTest(unittest.TestCase): class BrandGroup(Document): title = StringField(max_length=255, primary_key=True) - brands = SortedListField(ReferenceField("Brand", dbref=True)) + brands = ListField(ReferenceField("Brand", dbref=True)) Brand.drop_collection() BrandGroup.drop_collection() From e3600ef4de6ab13090f18a6155d5f515b7ae2e8d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 19 Apr 2013 12:53:46 +0000 Subject: [PATCH 211/464] Updated version --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index b67512d..64adb92 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -12,7 +12,7 @@ from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) -VERSION = (0, 7, 9) +VERSION = (0, 7, 10) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index b1ec336..ed4a872 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.9 +Version: 0.7.10 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 6affbbe86570a47d8110dd9c967ea2babe856bec Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 19 Apr 2013 13:08:46 +0000 Subject: [PATCH 212/464] Update changelog location --- python-mongoengine.spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python-mongoengine.spec b/python-mongoengine.spec index ed4a872..eaf478d 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT # %{python_sitearch}/* %changelog -* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html \ No newline at end of file +* See: http://docs.mongoengine.org/en/latest/changelog.html \ No newline at end of file From 485047f20b1f9b3a52ce0b11a426505ad03437eb Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Wed, 17 Apr 2013 21:38:11 +0200 Subject: [PATCH 213/464] Custom User Model for Django 1.5 --- docs/django.rst | 36 +++++++++ mongoengine/django/auth.py | 3 + mongoengine/django/mongo_auth/__init__.py | 0 mongoengine/django/mongo_auth/models.py | 90 +++++++++++++++++++++++ tests/test_django.py | 52 ++++++++++++- 5 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 mongoengine/django/mongo_auth/__init__.py create mode 100644 mongoengine/django/mongo_auth/models.py diff --git a/docs/django.rst b/docs/django.rst index 6f27b90..2d58f95 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -42,6 +42,42 @@ The :mod:`~mongoengine.django.auth` module also contains a .. versionadded:: 0.1.3 +Custom User model +================= +Django 1.5 introduced `Custom user Models +` +which can be used as an alternative the Mongoengine authentication backend. + +The main advantage of this option is that other components relying on +:mod:`django.contrib.auth` and supporting the new swappable user model are more +likely to work. For example, you can use the ``createsuperuser`` management +command as usual. + +To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'`` +in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user +user model to use. In your **settings.py** file you will have:: + + INSTALLED_APPS = ( + ... + 'django.contrib.auth', + 'mongoengine.django.mongo_auth', + ... + ) + + AUTH_USER_MODEL = 'mongo_auth.MongoUser' + +An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the +:class:`~mongoengine.django.auth.User` class with another class of your choice:: + + MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' + +The custom :class:`User` must be a :class:`~mongoengine.Document` class, but +otherwise has the same requirements as a standard custom user model, +as specified in the `Django Documentation +`. +In particular, the custom class must define :attr:`USERNAME_FIELD` and +:attr:`REQUIRED_FIELDS` attributes. + Sessions ======== Django allows the use of different backend stores for its sessions. MongoEngine diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index d22f086..371f0e3 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -209,6 +209,9 @@ class User(Document): date_joined = DateTimeField(default=datetime_now, verbose_name=_('date joined')) + USERNAME_FIELD = 'username' + REQUIRED_FIELDS = ['email'] + meta = { 'allow_inheritance': True, 'indexes': [ diff --git a/mongoengine/django/mongo_auth/__init__.py b/mongoengine/django/mongo_auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py new file mode 100644 index 0000000..9629e64 --- /dev/null +++ b/mongoengine/django/mongo_auth/models.py @@ -0,0 +1,90 @@ +from importlib import import_module + +from django.conf import settings +from django.contrib.auth.models import UserManager +from django.core.exceptions import ImproperlyConfigured +from django.db import models +from django.utils.translation import ugettext_lazy as _ + + +MONGOENGINE_USER_DOCUMENT = getattr( + settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') + + +class MongoUserManager(UserManager): + """A User manager wich allows the use of MongoEngine documents in Django. + + To use the manager, you must tell django.contrib.auth to use MongoUser as + the user model. In you settings.py, you need: + + INSTALLED_APPS = ( + ... + 'django.contrib.auth', + 'mongoengine.django.mongo_auth', + ... + ) + AUTH_USER_MODEL = 'mongo_auth.MongoUser' + + Django will use the model object to access the custom Manager, which will + replace the original queryset with MongoEngine querysets. + + By default, mongoengine.django.auth.User will be used to store users. You + can specify another document class in MONGOENGINE_USER_DOCUMENT in your + settings.py. + + The User Document class has the same requirements as a standard custom user + model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/ + + In particular, the User Document class must define USERNAME_FIELD and + REQUIRED_FIELDS. + + `AUTH_USER_MODEL` has been added in Django 1.5. + + """ + + def contribute_to_class(self, model, name): + super(MongoUserManager, self).contribute_to_class(model, name) + self.dj_model = self.model + self.model = self._get_user_document() + + self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD + username = models.CharField(_('username'), max_length=30, unique=True) + username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) + + self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS + for name in self.dj_model.REQUIRED_FIELDS: + field = models.CharField(_(name), max_length=30) + field.contribute_to_class(self.dj_model, name) + + def _get_user_document(self): + try: + name = MONGOENGINE_USER_DOCUMENT + dot = name.rindex('.') + module = import_module(name[:dot]) + return getattr(module, name[dot + 1:]) + except ImportError: + raise ImproperlyConfigured("Error importing %s, please check " + "settings.MONGOENGINE_USER_DOCUMENT" + % name) + + def get(self, *args, **kwargs): + try: + return self.get_query_set().get(*args, **kwargs) + except self.model.DoesNotExist: + # ModelBackend expects this exception + raise self.dj_model.DoesNotExist + + @property + def db(self): + raise NotImplementedError + + def get_empty_query_set(self): + return self.model.objects.none() + + def get_query_set(self): + return self.model.objects + + +class MongoUser(models.Model): + objects = MongoUserManager() + diff --git a/tests/test_django.py b/tests/test_django.py index dceeba2..6f4b6ea 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -14,9 +14,16 @@ try: from django.conf import settings from django.core.paginator import Paginator - settings.configure(USE_TZ=True) + settings.configure( + USE_TZ=True, + INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), + AUTH_USER_MODEL=('mongo_auth.MongoUser'), + ) + from django.contrib.auth import authenticate, get_user_model from django.contrib.sessions.tests import SessionTestsMixin + from mongoengine.django.auth import User + from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager from mongoengine.django.sessions import SessionStore, MongoSession except Exception, err: if PY3: @@ -156,6 +163,7 @@ class QuerySetTest(unittest.TestCase): rendered = template.render(Context({'users': users})) self.assertEqual(rendered, 'AB ABCD CD') + class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore @@ -184,5 +192,47 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): session = SessionStore(key) self.assertTrue('test_expire' in session, 'Session has expired before it is expected') + +class MongoAuthTest(unittest.TestCase): + user_data = { + 'username': 'user', + 'email': 'user@example.com', + 'password': 'test', + } + + def setUp(self): + if PY3: + raise SkipTest('django does not have Python 3 support') + connect(db='mongoenginetest') + User.drop_collection() + super(MongoAuthTest, self).setUp() + + def test_user_model(self): + self.assertEqual(get_user_model(), MongoUser) + + def test_user_manager(self): + manager = get_user_model()._default_manager + self.assertIsInstance(manager, MongoUserManager) + + def test_user_manager_exception(self): + manager = get_user_model()._default_manager + self.assertRaises(MongoUser.DoesNotExist, manager.get, + username='not found') + + def test_create_user(self): + manager = get_user_model()._default_manager + user = manager.create_user(**self.user_data) + self.assertIsInstance(user, User) + db_user = User.objects.get(username='user') + self.assertEqual(user.id, db_user.id) + + def test_authenticate(self): + get_user_model()._default_manager.create_user(**self.user_data) + user = authenticate(username='user', password='fail') + self.assertIsNone(user) + user = authenticate(username='user', password='test') + db_user = User.objects.get(username='user') + self.assertEqual(user.id, db_user.id) + if __name__ == '__main__': unittest.main() From dff44ef74e8e03bc82b82fa86582af45848ceabf Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Fri, 19 Apr 2013 17:50:15 +0200 Subject: [PATCH 214/464] Fixing warning which prevented tests from succeeding Now that we're importing the auth classes in the tests, no warning can be raised or test_dbref_reference_field_future_warning will fail. --- mongoengine/django/auth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 371f0e3..0839b14 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -96,7 +96,7 @@ class Permission(Document): Three basic permissions -- add, change and delete -- are automatically created for each Django model. """ name = StringField(max_length=50, verbose_name=_('username')) - content_type = ReferenceField(ContentType) + content_type = ReferenceField(ContentType, dbref=True) codename = StringField(max_length=100, verbose_name=_('codename')) # FIXME: don't access field of the other class # unique_with=['content_type__app_label', 'content_type__model']) @@ -128,7 +128,7 @@ class Group(Document): """ name = StringField(max_length=80, unique=True, verbose_name=_('name')) # permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True) - permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) + permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False, dbref=True)) class Meta: verbose_name = _('group') From d39d10b9fb09c0a69dc00eb226db0429048e6b42 Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Fri, 19 Apr 2013 18:28:45 +0200 Subject: [PATCH 215/464] Tests should not require Django 1.5 to run --- tests/test_django.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_django.py b/tests/test_django.py index 6f4b6ea..01a105a 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -20,10 +20,14 @@ try: AUTH_USER_MODEL=('mongo_auth.MongoUser'), ) - from django.contrib.auth import authenticate, get_user_model + try: + from django.contrib.auth import authenticate, get_user_model + from mongoengine.django.auth import User + from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager + DJ15 = True + except Exception: + DJ15 = False from django.contrib.sessions.tests import SessionTestsMixin - from mongoengine.django.auth import User - from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager from mongoengine.django.sessions import SessionStore, MongoSession except Exception, err: if PY3: @@ -203,6 +207,8 @@ class MongoAuthTest(unittest.TestCase): def setUp(self): if PY3: raise SkipTest('django does not have Python 3 support') + if not DJ15: + raise SkipTest('mongo_auth requires Django 1.5') connect(db='mongoenginetest') User.drop_collection() super(MongoAuthTest, self).setUp() From 681b74a41cd040e5e6b7e7a9cf1fb505964289f7 Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Fri, 19 Apr 2013 18:53:42 +0200 Subject: [PATCH 216/464] Travis: adding Django-1.5.1 to env --- .travis.yml | 15 ++++++++++++--- setup.py | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1b9f5b7..ad2678f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,12 +7,21 @@ python: - "3.2" - "3.3" env: - - PYMONGO=dev - - PYMONGO=2.5 - - PYMONGO=2.4.2 + - PYMONGO=dev DJANGO=1.5.1 + - PYMONGO=dev DJANGO=1.4.2 + - PYMONGO=2.5 DJANGO=1.5.1 + - PYMONGO=2.5 DJANGO=1.4.2 + - PYMONGO=2.4.2 DJANGO=1.4.2 +matrix: + exclude: + - python: "2.6" + env: PYMONGO=dev DJANGO=1.5.1 + - python: "2.6" + env: PYMONGO=2.5 DJANGO=1.5.1 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install diff --git a/setup.py b/setup.py index ba538fa..c6270d9 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From 80db9e771671135e9260d088d4139e01b04b678c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 13:06:29 +0000 Subject: [PATCH 217/464] Updated travis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1b9f5b7..0e47cb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,4 +23,3 @@ notifications: branches: only: - master - - "0.8" From c16e6d74e6fe1271b628af476c4f8db2027aff66 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 15:07:15 +0000 Subject: [PATCH 218/464] Updated connection to use MongoClient (#262, #274) --- docs/changelog.rst | 1 + docs/guide/connecting.rst | 2 +- docs/upgrade.rst | 79 ++++++++++++++++------ mongoengine/connection.py | 8 +-- mongoengine/django/sessions.py | 2 +- mongoengine/document.py | 47 ++++++------- mongoengine/queryset/queryset.py | 111 ++++++++++++++++--------------- tests/document/indexes.py | 14 +++- tests/queryset/queryset.py | 33 +++++---- tests/test_connection.py | 9 ++- 10 files changed, 181 insertions(+), 125 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4547000..d7d010c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) - Deprecated `get_or_create` (#35) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index ebd61a9..de6794c 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -29,7 +29,7 @@ name - just supply the uri as the :attr:`host` to ReplicaSets =========== -MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` +MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient` to use them please use a URI style connection and provide the `replicaSet` name in the connection kwargs. diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 8724503..356f510 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -1,15 +1,15 @@ -========= +######### Upgrading -========= +######### 0.7 to 0.8 -========== +********** Inheritance ------------ +=========== Data Model -~~~~~~~~~~ +---------- The inheritance model has changed, we no longer need to store an array of :attr:`types` with the model we can just use the classname in :attr:`_cls`. @@ -44,7 +44,7 @@ inherited classes like so: :: Document Definition -~~~~~~~~~~~~~~~~~~~ +------------------- The default for inheritance has changed - its now off by default and :attr:`_cls` will not be stored automatically with the class. So if you extend @@ -77,7 +77,7 @@ the case and the data is set only in the ``document._data`` dictionary: :: AttributeError: 'Animal' object has no attribute 'size' Querysets -~~~~~~~~~ +========= Querysets now return clones and should no longer be considered editable in place. This brings us in line with how Django's querysets work and removes a @@ -98,8 +98,47 @@ update your code like so: :: mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals [m for m in mammals] # This will return all carnivores +Client +====== +PyMongo 2.4 came with a new connection client; MongoClient_ and started the +depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine +now uses the latest `MongoClient` for connections. By default operations were +`safe` but if you turned them off or used the connection directly this will +impact your queries. + +Querysets +--------- + +Safe +^^^^ + +`safe` has been depreciated in the new MongoClient connection. Please use +`write_concern` instead. As `safe` always defaulted as `True` normally no code +change is required. To disable confirmation of the write just pass `{"w": 0}` +eg: :: + + # Old + Animal(name="Dinasour").save(safe=False) + + # new code: + Animal(name="Dinasour").save(write_concern={"w": 0}) + +Write Concern +^^^^^^^^^^^^^ + +`write_options` has been replaced with `write_concern` to bring it inline with +pymongo. To upgrade simply rename any instances where you used the `write_option` +keyword to `write_concern` like so:: + + # Old code: + Animal(name="Dinasour").save(write_options={"w": 2}) + + # new code: + Animal(name="Dinasour").save(write_concern={"w": 2}) + + Indexes -------- +======= Index methods are no longer tied to querysets but rather to the document class. Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist. @@ -107,17 +146,19 @@ They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / :func:`~mongoengine.Document.ensure_index`. SequenceFields --------------- +============== :class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to allow flexible storage of the calculated value. As such MIN and MAX settings are no longer handled. +.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient + 0.6 to 0.7 -========== +********** Cascade saves -------------- +============= Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set to True. This is because in 0.8 it will default to False. If you require @@ -135,7 +176,7 @@ via `save` eg :: Remember: cascading saves **do not** cascade through lists. ReferenceFields ---------------- +=============== ReferenceFields now can store references as ObjectId strings instead of DBRefs. This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` @@ -164,7 +205,7 @@ migrate :: item_frequencies ----------------- +================ In the 0.6 series we added support for null / zero / false values in item_frequencies. A side effect was to return keys in the value they are @@ -173,14 +214,14 @@ updated to handle native types rather than strings keys for the results of item frequency queries. BinaryFields ------------- +============ Binary fields have been updated so that they are native binary types. If you previously were doing `str` comparisons with binary field values you will have to update and wrap the value in a `str`. 0.5 to 0.6 -========== +********** Embedded Documents - if you had a `pk` field you will have to rename it from `_id` to `pk` as pk is no longer a property of Embedded Documents. @@ -200,13 +241,13 @@ don't define :attr:`allow_inheritance` in their meta. You may need to update pyMongo to 2.0 for use with Sharding. 0.4 to 0.5 -=========== +********** There have been the following backwards incompatibilities from 0.4 to 0.5. The main areas of changed are: choices in fields, map_reduce and collection names. Choice options: ---------------- +=============== Are now expected to be an iterable of tuples, with the first element in each tuple being the actual value to be stored. The second element is the @@ -214,7 +255,7 @@ human-readable name for the option. PyMongo / MongoDB ------------------ +================= map reduce now requires pymongo 1.11+- The pymongo `merge_output` and `reduce_output` parameters, have been depreciated. @@ -228,7 +269,7 @@ such the following have been changed: Default collection naming -------------------------- +========================= Previously it was just lowercase, its now much more pythonic and readable as its lowercase and underscores, previously :: diff --git a/mongoengine/connection.py b/mongoengine/connection.py index a47be44..3c53ea3 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,5 +1,5 @@ import pymongo -from pymongo import Connection, ReplicaSetConnection, uri_parser +from pymongo import MongoClient, MongoReplicaSetClient, uri_parser __all__ = ['ConnectionError', 'connect', 'register_connection', @@ -112,15 +112,15 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): conn_settings['slaves'] = slaves conn_settings.pop('read_preference', None) - connection_class = Connection + connection_class = MongoClient if 'replicaSet' in conn_settings: conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) - # Discard port since it can't be used on ReplicaSetConnection + # Discard port since it can't be used on MongoReplicaSetClient conn_settings.pop('port', None) # Discard replicaSet if not base string if not isinstance(conn_settings['replicaSet'], basestring): conn_settings.pop('replicaSet', None) - connection_class = ReplicaSetConnection + connection_class = MongoReplicaSetClient try: _connections[alias] = connection_class(**conn_settings) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 0d199a6..29583f5 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -88,7 +88,7 @@ class SessionStore(SessionBase): s.session_data = self._get_session(no_load=must_create) s.expire_date = self.get_expiry_date() try: - s.save(force_insert=must_create, safe=True) + s.save(force_insert=must_create) except OperationError: if must_create: raise CreateError diff --git a/mongoengine/document.py b/mongoengine/document.py index 9057075..54b55df 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -142,7 +142,7 @@ class Document(BaseDocument): options.get('size') != max_size: msg = (('Cannot create collection "%s" as a capped ' 'collection as it already exists') - % cls._collection) + % cls._collection) raise InvalidCollectionError(msg) else: # Create the collection as a capped collection @@ -158,28 +158,24 @@ class Document(BaseDocument): cls.ensure_indexes() return cls._collection - def save(self, safe=True, force_insert=False, validate=True, clean=True, - write_options=None, cascade=None, cascade_kwargs=None, + def save(self, force_insert=False, validate=True, clean=True, + write_concern=None, cascade=None, cascade_kwargs=None, _refs=None, **kwargs): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. - If ``safe=True`` and the operation is unsuccessful, an - :class:`~mongoengine.OperationError` will be raised. - - :param safe: check if the operation succeeded before returning :param force_insert: only try to create a new document, don't allow updates of existing documents :param validate: validates the document; set to ``False`` to skip. :param clean: call the document clean method, requires `validate` to be True. - :param write_options: Extra keyword arguments are passed down to + :param write_concern: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.save` OR :meth:`~pymongo.collection.Collection.insert` which will be used as options for the resultant ``getLastError`` command. For example, - ``save(..., write_options={w: 2, fsync: True}, ...)`` will + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. :param cascade: Sets the flag for cascading saves. You can set a @@ -205,8 +201,8 @@ class Document(BaseDocument): if validate: self.validate(clean=clean) - if not write_options: - write_options = {} + if not write_concern: + write_concern = {} doc = self.to_mongo() @@ -216,11 +212,9 @@ class Document(BaseDocument): collection = self._get_collection() if created: if force_insert: - object_id = collection.insert(doc, safe=safe, - **write_options) + object_id = collection.insert(doc, **write_concern) else: - object_id = collection.save(doc, safe=safe, - **write_options) + object_id = collection.save(doc, **write_concern) else: object_id = doc['_id'] updates, removals = self._delta() @@ -247,7 +241,7 @@ class Document(BaseDocument): update_query["$unset"] = removals if updates or removals: last_error = collection.update(select_dict, update_query, - upsert=upsert, safe=safe, **write_options) + upsert=upsert, **write_concern) created = is_new_object(last_error) warn_cascade = not cascade and 'cascade' not in self._meta @@ -255,10 +249,9 @@ class Document(BaseDocument): if cascade is None else cascade) if cascade: kwargs = { - "safe": safe, "force_insert": force_insert, "validate": validate, - "write_options": write_options, + "write_concern": write_concern, "cascade": cascade } if cascade_kwargs: # Allow granular control over cascades @@ -305,7 +298,7 @@ class Document(BaseDocument): if ref and ref_id not in _refs: if warn_cascade: msg = ("Cascading saves will default to off in 0.8, " - "please explicitly set `.save(cascade=True)`") + "please explicitly set `.save(cascade=True)`") warnings.warn(msg, FutureWarning) _refs.append(ref_id) kwargs["_refs"] = _refs @@ -344,16 +337,21 @@ class Document(BaseDocument): # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) - def delete(self, safe=False): + def delete(self, **write_concern): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. - :param safe: check if the operation succeeded before returning + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. """ signals.pre_delete.send(self.__class__, document=self) try: - self._qs.filter(**self._object_key).delete(safe=safe) + self._qs.filter(**self._object_key).delete(write_concern=write_concern) except pymongo.errors.OperationFailure, err: message = u'Could not delete document (%s)' % err.message raise OperationError(message) @@ -428,9 +426,8 @@ class Document(BaseDocument): .. versionchanged:: 0.6 Now chainable """ id_field = self._meta['id_field'] - obj = self._qs.filter( - **{id_field: self[id_field]} - ).limit(1).select_related(max_depth=max_depth) + obj = self._qs.filter(**{id_field: self[id_field]} + ).limit(1).select_related(max_depth=max_depth) if obj: obj = obj[0] else: diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 15c8e63..71332b9 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -221,7 +221,7 @@ class QuerySet(object): """ return self._document(**kwargs).save() - def get_or_create(self, write_options=None, auto_save=True, + def get_or_create(self, write_concern=None, auto_save=True, *q_objs, **query): """Retrieve unique object or create, if it doesn't exist. Returns a tuple of ``(object, created)``, where ``object`` is the retrieved or @@ -239,9 +239,9 @@ class QuerySet(object): don't accidently duplicate data when using this method. This is now scheduled to be removed before 1.0 - :param write_options: optional extra keyword arguments used if we + :param write_concern: optional extra keyword arguments used if we have to create a new document. - Passes any write_options onto :meth:`~mongoengine.Document.save` + Passes any write_concern onto :meth:`~mongoengine.Document.save` :param auto_save: if the object is to be saved automatically if not found. @@ -266,7 +266,7 @@ class QuerySet(object): doc = self._document(**query) if auto_save: - doc.save(write_options=write_options) + doc.save(write_concern=write_concern) return doc, True def first(self): @@ -279,18 +279,13 @@ class QuerySet(object): result = None return result - def insert(self, doc_or_docs, load_bulk=True, safe=False, - write_options=None): + def insert(self, doc_or_docs, load_bulk=True, write_concern=None): """bulk insert documents - If ``safe=True`` and the operation is unsuccessful, an - :class:`~mongoengine.OperationError` will be raised. - :param docs_or_doc: a document or list of documents to be inserted :param load_bulk (optional): If True returns the list of document instances - :param safe: check if the operation succeeded before returning - :param write_options: Extra keyword arguments are passed down to + :param write_concern: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.insert` which will be used as options for the resultant ``getLastError`` command. For example, @@ -305,9 +300,8 @@ class QuerySet(object): """ Document = _import_class('Document') - if not write_options: - write_options = {} - write_options.update({'safe': safe}) + if not write_concern: + write_concern = {} docs = doc_or_docs return_one = False @@ -319,7 +313,7 @@ class QuerySet(object): for doc in docs: if not isinstance(doc, self._document): msg = ("Some documents inserted aren't instances of %s" - % str(self._document)) + % str(self._document)) raise OperationError(msg) if doc.pk and not doc._created: msg = "Some documents have ObjectIds use doc.update() instead" @@ -328,7 +322,7 @@ class QuerySet(object): signals.pre_bulk_insert.send(self._document, documents=docs) try: - ids = self._collection.insert(raw, **write_options) + ids = self._collection.insert(raw, **write_concern) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if re.match('^E1100[01] duplicate key', unicode(err)): @@ -340,7 +334,7 @@ class QuerySet(object): if not load_bulk: signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False) + self._document, documents=docs, loaded=False) return return_one and ids[0] or ids documents = self.in_bulk(ids) @@ -348,7 +342,7 @@ class QuerySet(object): for obj_id in ids: results.append(documents.get(obj_id)) signals.post_bulk_insert.send( - self._document, documents=results, loaded=True) + self._document, documents=results, loaded=True) return return_one and results[0] or results def count(self): @@ -358,10 +352,15 @@ class QuerySet(object): return 0 return self._cursor.count(with_limit_and_skip=True) - def delete(self, safe=False): + def delete(self, write_concern=None): """Delete the documents matched by the query. - :param safe: check if the operation succeeded before returning + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. """ queryset = self.clone() doc = queryset._document @@ -370,11 +369,14 @@ class QuerySet(object): signals.pre_delete.has_receivers_for(self._document) or signals.post_delete.has_receivers_for(self._document)) + if not write_concern: + write_concern = {} + # Handle deletes where skips or limits have been applied or has a # delete signal if queryset._skip or queryset._limit or has_delete_signal: for doc in queryset: - doc.delete(safe=safe) + doc.delete(write_concern=write_concern) return delete_rules = doc._meta.get('delete_rules') or {} @@ -386,7 +388,7 @@ class QuerySet(object): if rule == DENY and document_cls.objects( **{field_name + '__in': self}).count() > 0: msg = ("Could not delete document (%s.%s refers to it)" - % (document_cls.__name__, field_name)) + % (document_cls.__name__, field_name)) raise OperationError(msg) for rule_entry in delete_rules: @@ -396,36 +398,38 @@ class QuerySet(object): ref_q = document_cls.objects(**{field_name + '__in': self}) ref_q_count = ref_q.count() if (doc != document_cls and ref_q_count > 0 - or (doc == document_cls and ref_q_count > 0)): - ref_q.delete(safe=safe) + or (doc == document_cls and ref_q_count > 0)): + ref_q.delete(write_concern=write_concern) elif rule == NULLIFY: document_cls.objects(**{field_name + '__in': self}).update( - safe_update=safe, - **{'unset__%s' % field_name: 1}) + write_concern=write_concern, **{'unset__%s' % field_name: 1}) elif rule == PULL: document_cls.objects(**{field_name + '__in': self}).update( - safe_update=safe, - **{'pull_all__%s' % field_name: self}) + write_concern=write_concern, + **{'pull_all__%s' % field_name: self}) - queryset._collection.remove(queryset._query, safe=safe) + queryset._collection.remove(queryset._query, write_concern=write_concern) - def update(self, safe_update=True, upsert=False, multi=True, - write_options=None, **update): - """Perform an atomic update on the fields matched by the query. When - ``safe_update`` is used, the number of affected documents is returned. + def update(self, upsert=False, multi=True, write_concern=None, **update): + """Perform an atomic update on the fields matched by the query. - :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for - :meth:`~pymongo.collection.Collection.update` + :param multi: Update multiple documents. + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param update: Django-style update keyword arguments .. versionadded:: 0.2 """ if not update: raise OperationError("No update parameters, would remove data") - if not write_options: - write_options = {} + if not write_concern: + write_concern = {} queryset = self.clone() query = queryset._query @@ -441,8 +445,7 @@ class QuerySet(object): try: ret = queryset._collection.update(query, update, multi=multi, - upsert=upsert, safe=safe_update, - **write_options) + upsert=upsert, **write_concern) if ret is not None and 'n' in ret: return ret['n'] except pymongo.errors.OperationFailure, err: @@ -451,21 +454,21 @@ class QuerySet(object): raise OperationError(message) raise OperationError(u'Update failed (%s)' % unicode(err)) - def update_one(self, safe_update=True, upsert=False, write_options=None, - **update): - """Perform an atomic update on first field matched by the query. When - ``safe_update`` is used, the number of affected documents is returned. + def update_one(self, upsert=False, write_concern=None, **update): + """Perform an atomic update on first field matched by the query. - :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. - :param write_options: extra keyword arguments for - :meth:`~pymongo.collection.Collection.update` + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. :param update: Django-style update keyword arguments .. versionadded:: 0.2 """ - return self.update(safe_update=True, upsert=upsert, multi=False, - write_options=None, **update) + return self.update(upsert=upsert, multi=False, write_concern=None, **update) def with_id(self, object_id): """Retrieve the object matching the id provided. Uses `object_id` only @@ -498,7 +501,7 @@ class QuerySet(object): if self._scalar: for doc in docs: doc_map[doc['_id']] = self._get_scalar( - self._document._from_son(doc)) + self._document._from_son(doc)) elif self._as_pymongo: for doc in docs: doc_map[doc['_id']] = self._get_as_pymongo(doc) @@ -523,10 +526,10 @@ class QuerySet(object): c = self.__class__(self._document, self._collection_obj) copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_class_check', '_slave_okay', '_read_preference', - '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', - '_limit', '_skip', '_hint', '_auto_dereference') + '_where_clause', '_loaded_fields', '_ordering', '_snapshot', + '_timeout', '_class_check', '_slave_okay', '_read_preference', + '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', + '_limit', '_skip', '_hint', '_auto_dereference') for prop in copy_props: val = getattr(self, prop) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index ff08ef1..fea63a5 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -314,19 +314,27 @@ class IndexesTest(unittest.TestCase): """ class User(Document): meta = { + 'allow_inheritance': True, 'indexes': ['user_guid'], 'auto_create_index': False } user_guid = StringField(required=True) + class MongoUser(User): + pass + User.drop_collection() - u = User(user_guid='123') - u.save() + User(user_guid='123').save() + MongoUser(user_guid='123').save() - self.assertEqual(1, User.objects.count()) + self.assertEqual(2, User.objects.count()) info = User.objects._collection.index_information() self.assertEqual(info.keys(), ['_id_']) + + User.ensure_indexes() + info = User.objects._collection.index_information() + self.assertEqual(info.keys(), ['_cls_1_user_guid_1', '_id_']) User.drop_collection() def test_embedded_document_index(self): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 37670b0..42e98ae 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -278,24 +278,24 @@ class QuerySetTest(unittest.TestCase): query = query.filter(boolfield=True) self.assertEquals(query.count(), 1) - def test_update_write_options(self): - """Test that passing write_options works""" + def test_update_write_concern(self): + """Test that passing write_concern works""" self.Person.drop_collection() - write_options = {"fsync": True} + write_concern = {"fsync": True} author, created = self.Person.objects.get_or_create( - name='Test User', write_options=write_options) - author.save(write_options=write_options) + name='Test User', write_concern=write_concern) + author.save(write_concern=write_concern) self.Person.objects.update(set__name='Ross', - write_options=write_options) + write_concern=write_concern) author = self.Person.objects.first() self.assertEqual(author.name, 'Ross') - self.Person.objects.update_one(set__name='Test User', write_options=write_options) + self.Person.objects.update_one(set__name='Test User', write_concern=write_concern) author = self.Person.objects.first() self.assertEqual(author.name, 'Test User') @@ -592,10 +592,17 @@ class QuerySetTest(unittest.TestCase): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) - self.assertEqual(q, 1) # 1 for the insert + self.assertEqual(q, 1) # 1 for the insert + + Blog.drop_collection() + with query_counter() as q: + self.assertEqual(q, 0) + + Blog.ensure_indexes() + self.assertEqual(q, 1) Blog.objects.insert(blogs) - self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total) + self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total) Blog.drop_collection() @@ -619,7 +626,7 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(OperationError, throw_operation_error) # Test can insert new doc - new_post = Blog(title="code", id=ObjectId()) + new_post = Blog(title="code123", id=ObjectId()) Blog.objects.insert(new_post) # test handles other classes being inserted @@ -655,13 +662,13 @@ class QuerySetTest(unittest.TestCase): Blog.objects.insert([blog1, blog2]) def throw_operation_error_not_unique(): - Blog.objects.insert([blog2, blog3], safe=True) + Blog.objects.insert([blog2, blog3]) self.assertRaises(NotUniqueError, throw_operation_error_not_unique) self.assertEqual(Blog.objects.count(), 2) - Blog.objects.insert([blog2, blog3], write_options={ - 'continue_on_error': True}) + Blog.objects.insert([blog2, blog3], write_concern={"w": 0, + 'continue_on_error': True}) self.assertEqual(Blog.objects.count(), 3) def test_get_changed_fields_query_count(self): diff --git a/tests/test_connection.py b/tests/test_connection.py index 5b9743d..4b8a3d1 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -10,7 +10,6 @@ from bson.tz_util import utc from mongoengine import * import mongoengine.connection from mongoengine.connection import get_db, get_connection, ConnectionError -from mongoengine.context_managers import switch_db class ConnectionTest(unittest.TestCase): @@ -26,7 +25,7 @@ class ConnectionTest(unittest.TestCase): connect('mongoenginetest') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.connection.Connection)) + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) @@ -34,7 +33,7 @@ class ConnectionTest(unittest.TestCase): connect('mongoenginetest2', alias='testdb') conn = get_connection('testdb') - self.assertTrue(isinstance(conn, pymongo.connection.Connection)) + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) def test_connect_uri(self): """Ensure that the connect() method works properly with uri's @@ -52,7 +51,7 @@ class ConnectionTest(unittest.TestCase): connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') conn = get_connection() - self.assertTrue(isinstance(conn, pymongo.connection.Connection)) + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) @@ -65,7 +64,7 @@ class ConnectionTest(unittest.TestCase): self.assertRaises(ConnectionError, get_connection) conn = get_connection('testdb') - self.assertTrue(isinstance(conn, pymongo.connection.Connection)) + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) db = get_db('testdb') self.assertTrue(isinstance(db, pymongo.database.Database)) From efad628a87e3bb3e4ec55f1ddcaab68853917e36 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 15:32:11 +0000 Subject: [PATCH 219/464] Objects queryset manager now inherited (#256) --- docs/changelog.rst | 1 + mongoengine/base/metaclasses.py | 10 ++++----- tests/queryset/queryset.py | 36 +++++++++++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d7d010c..01f5a54 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Objects manager now inherited (#256) - Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index a53744d..2704011 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -315,8 +315,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # may set allow_inheritance to False simple_class = all([b._meta.get('abstract') for b in flattened_bases if hasattr(b, '_meta')]) - if (not simple_class and meta['allow_inheritance'] == False and - not meta['abstract']): + if (not simple_class and meta['allow_inheritance'] is False and + not meta['abstract']): raise ValueError('Only direct subclasses of Document may set ' '"allow_inheritance" to False') @@ -339,9 +339,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): if callable(collection): new_class._meta['collection'] = collection(new_class) - # Provide a default queryset unless one has been set - manager = attrs.get('objects', QuerySetManager()) - new_class.objects = manager + # Provide a default queryset unless exists or one has been set + if not hasattr(new_class, 'objects'): + new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed for field_name, field in new_class._fields.iteritems(): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 42e98ae..0d3ebf3 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2233,6 +2233,42 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(0, Foo.with_inactive.count()) self.assertEqual(1, Foo.objects.count()) + def test_inherit_objects(self): + + class Foo(Document): + meta = {'allow_inheritance': True} + active = BooleanField(default=True) + + @queryset_manager + def objects(klass, queryset): + return queryset(active=True) + + class Bar(Foo): + pass + + Bar.drop_collection() + Bar.objects.create(active=False) + self.assertEqual(0, Bar.objects.count()) + + def test_inherit_objects_override(self): + + class Foo(Document): + meta = {'allow_inheritance': True} + active = BooleanField(default=True) + + @queryset_manager + def objects(klass, queryset): + return queryset(active=True) + + class Bar(Foo): + @queryset_manager + def objects(klass, queryset): + return queryset(active=False) + + Bar.drop_collection() + Bar.objects.create(active=False) + self.assertEqual(0, Foo.objects.count()) + self.assertEqual(1, Bar.objects.count()) def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. From 0d0befe23e7ec141e4c81d7f01a3ba7e57c43865 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 16:19:55 +0000 Subject: [PATCH 220/464] Removed __len__ from queryset (#247) --- docs/changelog.rst | 3 +- docs/code/tumblelog.py | 2 +- docs/upgrade.rst | 16 +++++++ mongoengine/queryset/queryset.py | 9 ++-- tests/document/instance.py | 32 +++++++------- tests/queryset/queryset.py | 74 ++++++++++++++++---------------- tests/queryset/transform.py | 8 ++-- tests/queryset/visitor.py | 8 ++-- 8 files changed, 82 insertions(+), 70 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 01f5a54..da1424e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,8 @@ Changelog Changes in 0.8.X ================ -- Objects manager now inherited (#256) +- Removed __len__ from queryset (#247) +- Objects queryset manager now inherited (#256) - Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) diff --git a/docs/code/tumblelog.py b/docs/code/tumblelog.py index 6ba1eee..0e40e89 100644 --- a/docs/code/tumblelog.py +++ b/docs/code/tumblelog.py @@ -45,7 +45,7 @@ print 'ALL POSTS' print for post in Post.objects: print post.title - print '=' * len(post.title) + print '=' * post.title.count() if isinstance(post, TextPost): print post.content diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 356f510..4f549d6 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -79,6 +79,9 @@ the case and the data is set only in the ``document._data`` dictionary: :: Querysets ========= +Attack of the clones +-------------------- + Querysets now return clones and should no longer be considered editable in place. This brings us in line with how Django's querysets work and removes a long running gotcha. If you edit your querysets inplace you will have to @@ -98,6 +101,19 @@ update your code like so: :: mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals [m for m in mammals] # This will return all carnivores +No more len +----------- + +If you ever did len(queryset) it previously did a count() under the covers, this +caused some unusual issues - so now it has been removed in favour of the +explicit `queryset.count()` to update:: + + # Old code + len(Animal.objects(type="mammal")) + + # New code + Animal.objects(type="mammal").count()) + Client ====== PyMongo 2.4 came with a new connection client; MongoClient_ and started the diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 71332b9..cc0b70f 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -118,9 +118,6 @@ class QuerySet(object): queryset.rewind() return queryset - def __len__(self): - return self.count() - def __getitem__(self, key): """Support skip and limit using getitem and slicing syntax. """ @@ -149,12 +146,12 @@ class QuerySet(object): elif isinstance(key, int): if queryset._scalar: return queryset._get_scalar( - queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference)) + queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference)) if queryset._as_pymongo: return queryset._get_as_pymongo(queryset._cursor.next()) return queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference) + _auto_dereference=self._auto_dereference) raise AttributeError def __repr__(self): diff --git a/tests/document/instance.py b/tests/document/instance.py index 07991c1..1adc140 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -65,11 +65,11 @@ class InstanceTest(unittest.TestCase): for _ in range(10): Log().save() - self.assertEqual(len(Log.objects), 10) + self.assertEqual(Log.objects.count(), 10) # Check that extra documents don't increase the size Log().save() - self.assertEqual(len(Log.objects), 10) + self.assertEqual(Log.objects.count(), 10) options = Log.objects._collection.options() self.assertEqual(options['capped'], True) @@ -1040,9 +1040,9 @@ class InstanceTest(unittest.TestCase): """ person = self.Person(name="Test User", age=30) person.save() - self.assertEqual(len(self.Person.objects), 1) + self.assertEqual(self.Person.objects.count(), 1) person.delete() - self.assertEqual(len(self.Person.objects), 0) + self.assertEqual(self.Person.objects.count(), 0) def test_save_custom_id(self): """Ensure that a document may be saved with a custom _id. @@ -1356,12 +1356,12 @@ class InstanceTest(unittest.TestCase): post.save() reviewer.delete() - self.assertEqual(len(BlogPost.objects), 1) # No effect on the BlogPost + self.assertEqual(BlogPost.objects.count(), 1) # No effect on the BlogPost self.assertEqual(BlogPost.objects.get().reviewer, None) # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(len(BlogPost.objects), 0) + self.assertEqual(BlogPost.objects.count(), 0) def test_reverse_delete_rule_with_document_inheritance(self): """Ensure that a referenced document is also deleted upon deletion @@ -1391,12 +1391,12 @@ class InstanceTest(unittest.TestCase): post.save() reviewer.delete() - self.assertEqual(len(BlogPost.objects), 1) + self.assertEqual(BlogPost.objects.count(), 1) self.assertEqual(BlogPost.objects.get().reviewer, None) # Delete the Writer should lead to deletion of the BlogPost author.delete() - self.assertEqual(len(BlogPost.objects), 0) + self.assertEqual(BlogPost.objects.count(), 0) def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon deletion for @@ -1425,12 +1425,12 @@ class InstanceTest(unittest.TestCase): # Deleting the reviewer should have no effect on the BlogPost reviewer.delete() - self.assertEqual(len(BlogPost.objects), 1) + self.assertEqual(BlogPost.objects.count(), 1) self.assertEqual(BlogPost.objects.get().reviewers, []) # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(len(BlogPost.objects), 0) + self.assertEqual(BlogPost.objects.count(), 0) def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): ''' ensure the pre_delete signal is triggered upon a cascading deletion @@ -1498,7 +1498,7 @@ class InstanceTest(unittest.TestCase): f.delete() - self.assertEqual(len(Bar.objects), 1) # No effect on the BlogPost + self.assertEqual(Bar.objects.count(), 1) # No effect on the BlogPost self.assertEqual(Bar.objects.get().foo, None) def test_invalid_reverse_delete_rules_raise_errors(self): @@ -1549,7 +1549,7 @@ class InstanceTest(unittest.TestCase): # Delete the Person, which should lead to deletion of the BlogPost, and, # recursively to the Comment, too author.delete() - self.assertEqual(len(Comment.objects), 0) + self.assertEqual(Comment.objects.count(), 0) self.Person.drop_collection() BlogPost.drop_collection() @@ -1576,16 +1576,16 @@ class InstanceTest(unittest.TestCase): # Delete the Person should be denied self.assertRaises(OperationError, author.delete) # Should raise denied error - self.assertEqual(len(BlogPost.objects), 1) # No objects may have been deleted - self.assertEqual(len(self.Person.objects), 1) + self.assertEqual(BlogPost.objects.count(), 1) # No objects may have been deleted + self.assertEqual(self.Person.objects.count(), 1) # Other users, that don't have BlogPosts must be removable, like normal author = self.Person(name='Another User') author.save() - self.assertEqual(len(self.Person.objects), 2) + self.assertEqual(self.Person.objects.count(), 2) author.delete() - self.assertEqual(len(self.Person.objects), 1) + self.assertEqual(self.Person.objects.count(), 1) self.Person.drop_collection() BlogPost.drop_collection() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 0d3ebf3..f5aec7e 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -72,7 +72,7 @@ class QuerySetTest(unittest.TestCase): # Find all people in the collection people = self.Person.objects - self.assertEqual(len(people), 2) + self.assertEqual(people.count(), 2) results = list(people) self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) @@ -83,7 +83,7 @@ class QuerySetTest(unittest.TestCase): # Use a query to filter the people found to just person1 people = self.Person.objects(age=20) - self.assertEqual(len(people), 1) + self.assertEqual(people.count(), 1) person = people.next() self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) @@ -130,7 +130,7 @@ class QuerySetTest(unittest.TestCase): for i in xrange(55): self.Person(name='A%s' % i, age=i).save() - self.assertEqual(len(self.Person.objects), 55) + self.assertEqual(self.Person.objects.count(), 55) self.assertEqual("Person object", "%s" % self.Person.objects[0]) self.assertEqual("[, ]", "%s" % self.Person.objects[1:3]) self.assertEqual("[, ]", "%s" % self.Person.objects[51:53]) @@ -211,10 +211,10 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() Blog.objects.create(tags=['a', 'b']) - self.assertEqual(len(Blog.objects(tags__0='a')), 1) - self.assertEqual(len(Blog.objects(tags__0='b')), 0) - self.assertEqual(len(Blog.objects(tags__1='a')), 0) - self.assertEqual(len(Blog.objects(tags__1='b')), 1) + self.assertEqual(Blog.objects(tags__0='a').count(), 1) + self.assertEqual(Blog.objects(tags__0='b').count(), 0) + self.assertEqual(Blog.objects(tags__1='a').count(), 0) + self.assertEqual(Blog.objects(tags__1='b').count(), 1) Blog.drop_collection() @@ -229,13 +229,13 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(blog, blog1) query = Blog.objects(posts__1__comments__1__name='testb') - self.assertEqual(len(query), 2) + self.assertEqual(query.count(), 2) query = Blog.objects(posts__1__comments__1__name='testa') - self.assertEqual(len(query), 0) + self.assertEqual(query.count(), 0) query = Blog.objects(posts__0__comments__1__name='testa') - self.assertEqual(len(query), 0) + self.assertEqual(query.count(), 0) Blog.drop_collection() @@ -344,7 +344,7 @@ class QuerySetTest(unittest.TestCase): # Update all of the first comments of second posts of all blogs blog = Blog.objects().update(set__posts__1__comments__0__name="testc") testc_blogs = Blog.objects(posts__1__comments__0__name="testc") - self.assertEqual(len(testc_blogs), 2) + self.assertEqual(testc_blogs.count(), 2) Blog.drop_collection() @@ -355,7 +355,7 @@ class QuerySetTest(unittest.TestCase): blog = Blog.objects().update_one( set__posts__1__comments__1__name="testc") testc_blogs = Blog.objects(posts__1__comments__1__name="testc") - self.assertEqual(len(testc_blogs), 1) + self.assertEqual(testc_blogs.count(), 1) # Check that using this indexing syntax on a non-list fails def non_list_indexing(): @@ -793,7 +793,7 @@ class QuerySetTest(unittest.TestCase): number = IntField() def __repr__(self): - return "" % self.number + return "" % self.number Doc.drop_collection() @@ -803,20 +803,17 @@ class QuerySetTest(unittest.TestCase): docs = Doc.objects.order_by('number') self.assertEqual(docs.count(), 1000) - self.assertEqual(len(docs), 1000) docs_string = "%s" % docs self.assertTrue("Doc: 0" in docs_string) self.assertEqual(docs.count(), 1000) - self.assertEqual(len(docs), 1000) # Limit and skip docs = docs[1:4] self.assertEqual('[, , ]', "%s" % docs) self.assertEqual(docs.count(), 3) - self.assertEqual(len(docs), 3) for doc in docs: self.assertEqual('.. queryset mid-iteration ..', repr(docs)) @@ -945,8 +942,10 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() def assertSequence(self, qs, expected): + qs = list(qs) + expected = list(expected) self.assertEqual(len(qs), len(expected)) - for i in range(len(qs)): + for i in xrange(len(qs)): self.assertEqual(qs[i], expected[i]) def test_ordering(self): @@ -1124,13 +1123,13 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User B", age=30).save() self.Person(name="User C", age=40).save() - self.assertEqual(len(self.Person.objects), 3) + self.assertEqual(self.Person.objects.count(), 3) self.Person.objects(age__lt=30).delete() - self.assertEqual(len(self.Person.objects), 2) + self.assertEqual(self.Person.objects.count(), 2) self.Person.objects.delete() - self.assertEqual(len(self.Person.objects), 0) + self.assertEqual(self.Person.objects.count(), 0) def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. @@ -2332,8 +2331,8 @@ class QuerySetTest(unittest.TestCase): t = Test(testdict={'f': 'Value'}) t.save() - self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 1) - self.assertEqual(len(Test.objects(testdict__f='Value')), 1) + self.assertEqual(Test.objects(testdict__f__startswith='Val').count(), 1) + self.assertEqual(Test.objects(testdict__f='Value').count(), 1) Test.drop_collection() class Test(Document): @@ -2342,8 +2341,8 @@ class QuerySetTest(unittest.TestCase): t = Test(testdict={'f': 'Value'}) t.save() - self.assertEqual(len(Test.objects(testdict__f='Value')), 1) - self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 1) + self.assertEqual(Test.objects(testdict__f='Value').count(), 1) + self.assertEqual(Test.objects(testdict__f__startswith='Val').count(), 1) Test.drop_collection() def test_bulk(self): @@ -2539,8 +2538,7 @@ class QuerySetTest(unittest.TestCase): # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( - location__within_spherical_distance=[[-122, 36.5], 60/earth_radius] - ); + location__within_spherical_distance=[[-122, 36.5], 60/earth_radius]) self.assertEqual(points.count(), 1) self.assertEqual(points[0].id, south_point.id) @@ -2551,7 +2549,7 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): def not_empty(self): - return len(self) > 0 + return self.count() > 0 class Post(Document): meta = {'queryset_class': CustomQuerySet} @@ -2572,7 +2570,7 @@ class QuerySetTest(unittest.TestCase): class CustomQuerySet(QuerySet): def not_empty(self): - return len(self) > 0 + return self.count() > 0 class CustomQuerySetManager(QuerySetManager): queryset_class = CustomQuerySet @@ -2619,7 +2617,7 @@ class QuerySetTest(unittest.TestCase): class CustomQuerySet(QuerySet): def not_empty(self): - return len(self) > 0 + return self.count() > 0 class Base(Document): meta = {'abstract': True, 'queryset_class': CustomQuerySet} @@ -2642,7 +2640,7 @@ class QuerySetTest(unittest.TestCase): class CustomQuerySet(QuerySet): def not_empty(self): - return len(self) > 0 + return self.count() > 0 class CustomQuerySetManager(QuerySetManager): queryset_class = CustomQuerySet @@ -3044,14 +3042,14 @@ class QuerySetTest(unittest.TestCase): # Find all people in the collection people = self.Person.objects.scalar('name') - self.assertEqual(len(people), 2) + self.assertEqual(people.count(), 2) results = list(people) self.assertEqual(results[0], "User A") self.assertEqual(results[1], "User B") # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar('name') - self.assertEqual(len(people), 1) + self.assertEqual(people.count(), 1) person = people.next() self.assertEqual(person, "User A") @@ -3097,7 +3095,7 @@ class QuerySetTest(unittest.TestCase): for i in xrange(55): self.Person(name='A%s' % i, age=i).save() - self.assertEqual(len(self.Person.objects.scalar('name')), 55) + self.assertEqual(self.Person.objects.scalar('name').count(), 55) self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) if PY3: @@ -3314,7 +3312,7 @@ class QuerySetTest(unittest.TestCase): inner_count = 0 inner_total_count = 0 - self.assertEqual(len(users), 7) + self.assertEqual(users.count(), 7) for i, outer_user in enumerate(users): self.assertEqual(outer_user.name, names[i]) @@ -3322,17 +3320,17 @@ class QuerySetTest(unittest.TestCase): inner_count = 0 # Calling len might disrupt the inner loop if there are bugs - self.assertEqual(len(users), 7) + self.assertEqual(users.count(), 7) for j, inner_user in enumerate(users): self.assertEqual(inner_user.name, names[j]) inner_count += 1 inner_total_count += 1 - self.assertEqual(inner_count, 7) # inner loop should always be executed seven times + self.assertEqual(inner_count, 7) # inner loop should always be executed seven times - self.assertEqual(outer_count, 7) # outer loop should be executed seven times total - self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total + self.assertEqual(outer_count, 7) # outer loop should be executed seven times total + self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index d38cbfd..bde4b6f 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -53,14 +53,14 @@ class TransformTest(unittest.TestCase): BlogPost.objects(title=data['title'])._query) self.assertFalse('title' in BlogPost.objects(title=data['title'])._query) - self.assertEqual(len(BlogPost.objects(title=data['title'])), 1) + self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) - self.assertEqual(len(BlogPost.objects(pk=post.id)), 1) + self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) self.assertTrue('postComments.commentContent' in BlogPost.objects(comments__content='test')._query) - self.assertEqual(len(BlogPost.objects(comments__content='test')), 1) + self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) BlogPost.drop_collection() @@ -79,7 +79,7 @@ class TransformTest(unittest.TestCase): self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) - self.assertEqual(len(BlogPost.objects(pk=data['title'])), 1) + self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) BlogPost.drop_collection() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 98815db..bd81a65 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -268,8 +268,8 @@ class QTest(unittest.TestCase): self.Person(name='user3', age=30).save() self.Person(name='user4', age=40).save() - self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2) - self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3) + self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2) + self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) # Test invalid query objs def wrong_query_objs(): @@ -311,8 +311,8 @@ class QTest(unittest.TestCase): BlogPost(tags=['python', 'mongo']).save() BlogPost(tags=['python']).save() - self.assertEqual(len(BlogPost.objects(Q(tags='mongo'))), 1) - self.assertEqual(len(BlogPost.objects(Q(tags='python'))), 2) + self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1) + self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2) BlogPost.drop_collection() From 14b6c471cf519df6df5c737faaff512a338ef918 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 16:37:09 +0000 Subject: [PATCH 221/464] Fix PY3 hasattr connecting to the db at define time --- mongoengine/base/metaclasses.py | 2 +- tests/document/instance.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 2704011..def8a05 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -340,7 +340,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless exists or one has been set - if not hasattr(new_class, 'objects'): + if 'objects' not in dir(new_class): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed diff --git a/tests/document/instance.py b/tests/document/instance.py index 1adc140..5513ed8 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1857,6 +1857,8 @@ class InstanceTest(unittest.TestCase): def test_db_alias_propagates(self): """db_alias propagates? """ + register_connection('testdb-1', 'mongoenginetest2') + class A(Document): name = StringField() meta = {"db_alias": "testdb-1", "allow_inheritance": True} From e4f38b5665a3281eb26e365e1b5c8b050820efa1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 22 Apr 2013 16:46:59 +0000 Subject: [PATCH 222/464] Fragile test fix --- tests/document/indexes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index fea63a5..61e3c0e 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -334,7 +334,7 @@ class IndexesTest(unittest.TestCase): User.ensure_indexes() info = User.objects._collection.index_information() - self.assertEqual(info.keys(), ['_cls_1_user_guid_1', '_id_']) + self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) User.drop_collection() def test_embedded_document_index(self): From 81c7007f80a1147e8577ac2682f802f209e3338a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 10:38:32 +0000 Subject: [PATCH 223/464] Added with_limit_and_skip support to count() (#235) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 8 +++-- tests/queryset/queryset.py | 52 ++++++++++++++++++-------------- 3 files changed, 36 insertions(+), 25 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index da1424e..4c0da7f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Added with_limit_and_skip support to count() (#235) - Removed __len__ from queryset (#247) - Objects queryset manager now inherited (#256) - Updated connection to use MongoClient (#262, #274) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index cc0b70f..37d07a8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -342,12 +342,16 @@ class QuerySet(object): self._document, documents=results, loaded=True) return return_one and results[0] or results - def count(self): + def count(self, with_limit_and_skip=True): """Count the selected elements in the query. + + :param with_limit_and_skip (optional): take any :meth:`limit` or + :meth:`skip` that has been applied to this cursor into account when + getting the count """ if self._limit == 0: return 0 - return self._cursor.count(with_limit_and_skip=True) + return self._cursor.count(with_limit_and_skip=with_limit_and_skip) def delete(self, write_concern=None): """Delete the documents matched by the query. diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index f5aec7e..c7c4c7c 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -20,7 +20,7 @@ from mongoengine.python_support import PY3 from mongoengine.context_managers import query_counter from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, - QueryFieldList, queryset_manager) + queryset_manager) from mongoengine.errors import InvalidQueryError __all__ = ("QuerySetTest",) @@ -65,10 +65,8 @@ class QuerySetTest(unittest.TestCase): def test_find(self): """Ensure that a query returns a valid set of results. """ - person1 = self.Person(name="User A", age=20) - person1.save() - person2 = self.Person(name="User B", age=30) - person2.save() + self.Person(name="User A", age=20).save() + self.Person(name="User B", age=30).save() # Find all people in the collection people = self.Person.objects @@ -338,21 +336,20 @@ class QuerySetTest(unittest.TestCase): comment2 = Comment(name='testb') post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) - blog1 = Blog.objects.create(posts=[post1, post2]) - blog2 = Blog.objects.create(posts=[post2, post1]) + Blog.objects.create(posts=[post1, post2]) + Blog.objects.create(posts=[post2, post1]) # Update all of the first comments of second posts of all blogs - blog = Blog.objects().update(set__posts__1__comments__0__name="testc") + Blog.objects().update(set__posts__1__comments__0__name="testc") testc_blogs = Blog.objects(posts__1__comments__0__name="testc") self.assertEqual(testc_blogs.count(), 2) Blog.drop_collection() - - blog1 = Blog.objects.create(posts=[post1, post2]) - blog2 = Blog.objects.create(posts=[post2, post1]) + Blog.objects.create(posts=[post1, post2]) + Blog.objects.create(posts=[post2, post1]) # Update only the first blog returned by the query - blog = Blog.objects().update_one( + Blog.objects().update_one( set__posts__1__comments__1__name="testc") testc_blogs = Blog.objects(posts__1__comments__1__name="testc") self.assertEqual(testc_blogs.count(), 1) @@ -2661,6 +2658,19 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() + def test_count_limit_and_skip(self): + class Post(Document): + title = StringField() + + Post.drop_collection() + + for i in xrange(10): + Post(title="Post %s" % i).save() + + self.assertEqual(5, Post.objects.limit(5).skip(5).count()) + + self.assertEqual(10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False)) + def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ @@ -2669,10 +2679,8 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() - post1 = Post(title="Post 1") - post1.save() - post2 = Post(title="Post 2") - post2.save() + Post(title="Post 1").save() + Post(title="Post 2").save() posts = Post.objects.all()[0:1] self.assertEqual(len(list(posts())), 1) @@ -3205,20 +3213,18 @@ class QuerySetTest(unittest.TestCase): float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) - embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, - default=lambda: EmbeddedDoc()) + embedded_document_field = EmbeddedDocumentField( + EmbeddedDoc, default=lambda: EmbeddedDoc()) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=ObjectId) - reference_field = ReferenceField(Simple, default=lambda: - Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) - generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) + generic_reference_field = GenericReferenceField(default=lambda: Simple().save()) sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") @@ -3226,7 +3232,7 @@ class QuerySetTest(unittest.TestCase): sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc()) Simple.drop_collection() Doc.drop_collection() From e2f3406e897fe0a033ae340665427c2b15ad9ce1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 14:06:29 +0000 Subject: [PATCH 224/464] Updated .only() behaviour - now like exclude it is chainable (#202) --- docs/changelog.rst | 1 + docs/upgrade.rst | 15 +++++++++++++ mongoengine/queryset/field_list.py | 23 +++++++++++++++---- mongoengine/queryset/queryset.py | 23 ++++++++++++++++--- tests/queryset/field_list.py | 36 +++++++++++++++--------------- 5 files changed, 73 insertions(+), 25 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4c0da7f..fb9c35d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Updated .only() behaviour - now like exclude it is chainable (#202) - Added with_limit_and_skip support to count() (#235) - Removed __len__ from queryset (#247) - Objects queryset manager now inherited (#256) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 4f549d6..c4273f0 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -114,6 +114,21 @@ explicit `queryset.count()` to update:: # New code Animal.objects(type="mammal").count()) + +.only() now inline with .exclude() +---------------------------------- + +The behaviour of `.only()` was highly ambious, now it works in the mirror fashion +to `.exclude()`. Chaining `.only()` calls will increase the fields required:: + + # Old code + Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name` + + # New code + Animal.objects().only('name') + Animal.objects().only(['name']).only('order') # Would return `name` and `order` + + Client ====== PyMongo 2.4 came with a new connection client; MongoClient_ and started the diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 7b2b0cb..73d3cc2 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -7,11 +7,20 @@ class QueryFieldList(object): ONLY = 1 EXCLUDE = 0 - def __init__(self, fields=[], value=ONLY, always_include=[]): + def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): + """The QueryFieldList builder + + :param fields: A list of fields used in `.only()` or `.exclude()` + :param value: How to handle the fields; either `ONLY` or `EXCLUDE` + :param always_include: Any fields to always_include eg `_cls` + :param _only_called: Has `.only()` been called? If so its a set of fields + otherwise it performs a union. + """ self.value = value - self.fields = set(fields) - self.always_include = set(always_include) + self.fields = set(fields or []) + self.always_include = set(always_include or []) self._id = None + self._only_called = _only_called self.slice = {} def __add__(self, f): @@ -26,7 +35,10 @@ class QueryFieldList(object): self.slice = {} elif self.value is self.ONLY and f.value is self.ONLY: self._clean_slice() - self.fields = self.fields.intersection(f.fields) + if self._only_called: + self.fields = self.fields.union(f.fields) + else: + self.fields = f.fields elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: self.fields = self.fields.union(f.fields) self._clean_slice() @@ -46,6 +58,9 @@ class QueryFieldList(object): self.fields = self.fields.union(self.always_include) else: self.fields -= self.always_include + + if getattr(f, '_only_called', False): + self._only_called = True return self def __nonzero__(self): diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 37d07a8..dcfb240 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -624,19 +624,35 @@ class QuerySet(object): post = BlogPost.objects(...).only("title", "author.name") + .. note :: `only()` is chainable and will perform a union :: + So with the following it will fetch both: `title` and `author.name`:: + + post = BlogPost.objects.only("title").only("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + :param fields: fields to include .. versionadded:: 0.3 .. versionchanged:: 0.5 - Added subfield support """ fields = dict([(f, QueryFieldList.ONLY) for f in fields]) - return self.fields(**fields) + return self.fields(True, **fields) def exclude(self, *fields): """Opposite to .only(), exclude some document's fields. :: post = BlogPost.objects(...).exclude("comments") + .. note :: `exclude()` is chainable and will perform a union :: + So with the following it will exclude both: `title` and `author.name`:: + + post = BlogPost.objects.exclude("title").exclude("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + :param fields: fields to exclude .. versionadded:: 0.5 @@ -644,7 +660,7 @@ class QuerySet(object): fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) return self.fields(**fields) - def fields(self, **kwargs): + def fields(self, _only_called=False, **kwargs): """Manipulate how you load this document's fields. Used by `.only()` and `.exclude()` to manipulate which fields to retrieve. Fields also allows for a greater level of control for example: @@ -678,7 +694,8 @@ class QuerySet(object): for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] fields = queryset._fields_to_dbfields(fields) - queryset._loaded_fields += QueryFieldList(fields, value=value) + queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) + return queryset def all_fields(self): diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 4a8a72b..2bdfce1 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -20,47 +20,47 @@ class QueryFieldListTest(unittest.TestCase): def test_include_include(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True) + self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'b': True}) + self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1}) def test_include_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': True}) + self.assertEqual(q.as_dict(), {'a': 1}) def test_exclude_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) + self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0}) def test_exclude_include(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'c': True}) + self.assertEqual(q.as_dict(), {'c': 1}) def test_always_include(self): q = QueryFieldList(always_include=['x', 'y']) q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) def test_reset(self): q = QueryFieldList(always_include=['x', 'y']) q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) q.reset() self.assertFalse(q) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) + self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1}) def test_using_a_slice(self): q = QueryFieldList() @@ -97,7 +97,7 @@ class OnlyExcludeAllTest(unittest.TestCase): qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) self.assertEqual(qs._loaded_fields.as_dict(), - {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) + {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) qs = qs.only(*only) self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) qs = qs.exclude(*exclude) @@ -134,15 +134,15 @@ class OnlyExcludeAllTest(unittest.TestCase): qs = qs.only(*only) qs = qs.fields(slice__b=5) self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': 1}) + {'b': {'$slice': 5}, 'c': 1}) qs = qs.fields(slice__c=[5, 1]) self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) + {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) qs = qs.exclude('c') self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}}) + {'b': {'$slice': 5}}) def test_only(self): """Ensure that QuerySet.only only returns the requested fields. @@ -328,7 +328,7 @@ class OnlyExcludeAllTest(unittest.TestCase): Numbers.drop_collection() - numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) numbers.save() # first three @@ -368,7 +368,7 @@ class OnlyExcludeAllTest(unittest.TestCase): Numbers.drop_collection() numbers = Numbers() - numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) numbers.save() # first three From a692316293c1cdea8c8d10089e0651e7478dcc28 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 14:09:41 +0000 Subject: [PATCH 225/464] Update to upgrade docs --- docs/upgrade.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index c4273f0..5490757 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -126,7 +126,9 @@ to `.exclude()`. Chaining `.only()` calls will increase the fields required:: # New code Animal.objects().only('name') - Animal.objects().only(['name']).only('order') # Would return `name` and `order` + + # Note: + Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order` Client From 94d1e566c018bc26875b1cbc585ba2215d88f80d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 14:44:17 +0000 Subject: [PATCH 226/464] Added SequenceField.set_next_value(value) helper (#159) --- docs/changelog.rst | 1 + mongoengine/fields.py | 19 +++++++++++++++---- tests/fields/fields.py | 40 +++++++++++++++++++++++++--------------- 3 files changed, 41 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index fb9c35d..d22fc60 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Added SequenceField.set_next_value(value) helper (#159) - Updated .only() behaviour - now like exclude it is chainable (#202) - Added with_limit_and_skip support to count() (#235) - Removed __len__ from queryset (#247) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 690e7ac..4fc65c7 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1408,13 +1408,13 @@ class SequenceField(BaseField): COLLECTION_NAME = 'mongoengine.counters' VALUE_DECORATOR = int - def __init__(self, collection_name=None, db_alias=None, - sequence_name=None, value_decorator=None, *args, **kwargs): + def __init__(self, collection_name=None, db_alias=None, sequence_name=None, + value_decorator=None, *args, **kwargs): self.collection_name = collection_name or self.COLLECTION_NAME self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name self.value_decorator = (callable(value_decorator) and - value_decorator or self.VALUE_DECORATOR) + value_decorator or self.VALUE_DECORATOR) return super(SequenceField, self).__init__(*args, **kwargs) def generate(self): @@ -1430,6 +1430,17 @@ class SequenceField(BaseField): upsert=True) return self.value_decorator(counter['next']) + def set_next_value(self, value): + """Helper method to set the next sequence value""" + sequence_name = self.get_sequence_name() + sequence_id = "%s.%s" % (sequence_name, self.name) + collection = get_db(alias=self.db_alias)[self.collection_name] + counter = collection.find_and_modify(query={"_id": sequence_id}, + update={"$set": {"next": value}}, + new=True, + upsert=True) + return self.value_decorator(counter['next']) + def get_sequence_name(self): if self.sequence_name: return self.sequence_name @@ -1438,7 +1449,7 @@ class SequenceField(BaseField): return owner._get_collection_name() else: return ''.join('_%s' % c if c.isupper() else c - for c in owner._class_name).strip('_').lower() + for c in owner._class_name).strip('_').lower() def __get__(self, instance, owner): value = super(SequenceField, self).__get__(instance, owner) diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 9a7b82f..ade44b8 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -2164,18 +2164,21 @@ class FieldTest(unittest.TestCase): Person.drop_collection() for x in xrange(10): - p = Person(name="Person %s" % x) - p.save() + Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + self.assertEqual(ids, xrange(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) + Person.id.set_next_value(1000) + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 1000) + def test_sequence_field_sequence_name(self): class Person(Document): id = SequenceField(primary_key=True, sequence_name='jelly') @@ -2185,8 +2188,7 @@ class FieldTest(unittest.TestCase): Person.drop_collection() for x in xrange(10): - p = Person(name="Person %s" % x) - p.save() + Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) self.assertEqual(c['next'], 10) @@ -2197,6 +2199,10 @@ class FieldTest(unittest.TestCase): c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) self.assertEqual(c['next'], 10) + Person.id.set_next_value(1000) + c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) + self.assertEqual(c['next'], 1000) + def test_multiple_sequence_fields(self): class Person(Document): id = SequenceField(primary_key=True) @@ -2207,21 +2213,28 @@ class FieldTest(unittest.TestCase): Person.drop_collection() for x in xrange(10): - p = Person(name="Person %s" % x) - p.save() + Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + self.assertEqual(ids, xrange(1, 11)) counters = [i.counter for i in Person.objects] - self.assertEqual(counters, range(1, 11)) + self.assertEqual(counters, xrange(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) + Person.id.set_next_value(1000) + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 1000) + + Person.counter.set_next_value(999) + c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) + self.assertEqual(c['next'], 999) + def test_sequence_fields_reload(self): class Animal(Document): counter = SequenceField() @@ -2230,8 +2243,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Animal.drop_collection() - a = Animal(name="Boi") - a.save() + a = Animal(name="Boi").save() self.assertEqual(a.counter, 1) a.reload() @@ -2261,10 +2273,8 @@ class FieldTest(unittest.TestCase): Person.drop_collection() for x in xrange(10): - a = Animal(name="Animal %s" % x) - a.save() - p = Person(name="Person %s" % x) - p.save() + Animal(name="Animal %s" % x).save() + Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) From 3653981416146bdceb1ff6106c9c3d35d9c7db0e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 15:12:57 +0000 Subject: [PATCH 227/464] Added ImageField support for inline replacements (#86) --- docs/changelog.rst | 1 + mongoengine/fields.py | 24 ++++++++++-------------- tests/fields/file_tests.py | 30 ++++++++++++++++++++++++++++++ tests/fields/mongodb_leaf.png | Bin 0 -> 4971 bytes 4 files changed, 41 insertions(+), 14 deletions(-) create mode 100644 tests/fields/mongodb_leaf.png diff --git a/docs/changelog.rst b/docs/changelog.rst index d22fc60..476753d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- ImageFields now support inline replacements (#86) - Added SequenceField.set_next_value(value) helper (#159) - Updated .only() behaviour - now like exclude it is chainable (#202) - Added with_limit_and_skip support to count() (#235) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 4fc65c7..4530429 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1165,13 +1165,11 @@ class FileField(BaseField): grid_file.delete() except: pass - # Create a new file with the new data - grid_file.put(value) - else: - # Create a new proxy object as we don't already have one - instance._data[key] = self.proxy_class(key=key, instance=instance, - collection_name=self.collection_name) - instance._data[key].put(value) + + # Create a new proxy object as we don't already have one + instance._data[key] = self.proxy_class(key=key, instance=instance, + collection_name=self.collection_name) + instance._data[key].put(value) else: instance._data[key] = value @@ -1208,6 +1206,8 @@ class ImageGridFsProxy(GridFSProxy): Insert a image in database applying field properties (size, thumbnail_size) """ + if not self.instance: + import ipdb; ipdb.set_trace(); field = self.instance._fields[self.key] try: @@ -1235,10 +1235,7 @@ class ImageGridFsProxy(GridFSProxy): size = field.thumbnail_size if size['force']: - thumbnail = ImageOps.fit(img, - (size['width'], - size['height']), - Image.ANTIALIAS) + thumbnail = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS) else: thumbnail = img.copy() thumbnail.thumbnail((size['width'], @@ -1246,8 +1243,7 @@ class ImageGridFsProxy(GridFSProxy): Image.ANTIALIAS) if thumbnail: - thumb_id = self._put_thumbnail(thumbnail, - img_format) + thumb_id = self._put_thumbnail(thumbnail, img_format) else: thumb_id = None @@ -1350,7 +1346,7 @@ class ImageField(FileField): if isinstance(att, (tuple, list)): if PY3: value = dict(itertools.zip_longest(params_size, att, - fillvalue=None)) + fillvalue=None)) else: value = dict(map(None, params_size, att)) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index 44d2862..c5842d8 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -16,6 +16,7 @@ from mongoengine.connection import get_db from mongoengine.python_support import PY3, b, StringIO TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') +TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') class FileTest(unittest.TestCase): @@ -217,6 +218,19 @@ class FileTest(unittest.TestCase): self.assertEqual(marmot.photo.content_type, 'image/jpeg') self.assertEqual(marmot.photo.foo, 'bar') + def test_file_reassigning(self): + class TestFile(Document): + the_file = FileField() + TestFile.drop_collection() + + test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() + self.assertEqual(test_file.the_file.get().length, 8313) + + test_file = TestFile.objects.first() + test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') + test_file.save() + self.assertEqual(test_file.the_file.get().length, 4971) + def test_file_boolean(self): """Ensure that a boolean test of a FileField indicates its presence """ @@ -264,6 +278,22 @@ class FileTest(unittest.TestCase): t.image.delete() + def test_image_field_reassigning(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + + class TestFile(Document): + the_file = ImageField() + TestFile.drop_collection() + + test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() + self.assertEqual(test_file.the_file.size, (371, 76)) + + test_file = TestFile.objects.first() + test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') + test_file.save() + self.assertEqual(test_file.the_file.size, (45, 101)) + def test_image_field_resize(self): if PY3: raise SkipTest('PIL does not have Python 3 support') diff --git a/tests/fields/mongodb_leaf.png b/tests/fields/mongodb_leaf.png new file mode 100644 index 0000000000000000000000000000000000000000..36661cefc824b1707a9a6063d133cefe3566e1a9 GIT binary patch literal 4971 zcmV-x6O`4Tx05}naRo`#hR1`jmZ&IWdKOk5~hl<6oRa0BJ8yc;~21%2p?MfD<>DVeH z9(p*dx19w`~g7O0}n_%Aq@s%d)fBDv`JHkDym6Hd+5XuAtvnwRpGmK zVkc9?T=n|PIo~X-eVh__(Z?q}P9Z-Dj?gOW6|D%o20XmjW-qs4UjrD(li^iv8@eK9k+ZFm zVRFymFOPAzG5-%Pn|1W;U4vNroTa&AxDScmEA~{ri9gr1^c?U@uwSpaNnw8l_>cP1 zd;)kMQS_;jeRSUEM_*s96y65j1$)tOrwdK{YIQMt92l|D^(E_=$Rjw{b!QT@q!)ni zR`|5oW9X5n$Wv+HVc@|^eX5yXnsHX8PF3UX~a6)MwxDE0HaPjyrlI!;jX{6Kvuh*8ej?;85ekN$?5uuCiS zBTvvVG+XTxAO{m@bvM#Jr)z6J><&E22D|vq?Y?Vkbo_DijopiF$2PET#mZ8eu=y$(ArYkv7@Ex`GL?QCc!_*KFrd&;n1r7 zqW-CFs9&fT)ZaU5gc&=gBz-DaCw(vdOp0__x+47~U6sC(E(JNe@4cTT*n6*E zVH4eoU1-&7pEV~_PRe`a7v+@vy!^5}8?Y3)UmlaER00009a7bBm000XU000XU0RWnu7ytkc?MXyIRCoc^ zTx*OTRT(~W`M$liv?9=Q-7ZBg?UscIL7{S~{J>Oftboy=iB=P-L@Ce^2(~VzNNU(d z)MEHiLIP?a0#-wiU<~39ei#f=%GRb7soO493zb{Tx8EF}=RN1lH#6US*PTt^Y-i5x zy*%eV&zv)t-72lMD*CwU@vA&UCcAY(6%|IIH?>+sr&yCzP1E$5~jDW6IrmiiO zN*^82*KGUJ@~YMsmV)rug&T)g&qTBfjr7XLSDqDV^+bg0q+AM=^8C9#)Sy>9wqm)` z>P1iU0*qAP1GG|e7T&dFOVPj!iS(6^T=`uU>E8ovNdvV4`oMRlz3Ya4G;%?az@6o; zZ7%=y@*gTs{|q=m2fzhC5NH*K5^1sPMf#Zqca98o?Ifc(Ezno|>WX#Ri`Fx;S^%R2 z@TiHKhyyUiS-z)VICpz$ z z8eMW9)jCaWgI@Z;#cN>vQobA<&|M0 zRQ}W-^ZFXKwFG_s{TEyh!@~|JZ-!)*=#(Oj5U^dVQ%~J6{L^M4$-btbhqqidOZjTE z0EVQllF@WxV~ng3MTn%`M9D~d*AH%3yto~mrl8B=yPIJ!q#+tk!f=r{ZKgL#5$=fD zJ34kcmH{(0OhxcQ~=6-gaOwUMd;0fav*DKcODdq>(hts zSn~b+-dbxz(3PsU$v8=HxGjo69w_&R|$9Xg> z**tMYzOD36L^jgYCc`V4R6*HkJq1FJ5nZQ&|f1 z8C%XhAHDPFj7dwxk&6d_l?gyEw3=-+GB814?8IjpZgk|V>$p?AF4j#HQlRmYWt9O2 z+hUwRQRspq1{`%0KogGJDtda};OL^uv$84Bi0s#l0BOM=`m}lM9EEl$lJP|K&J(6~ z%DXkI7lS_SzOz3GF#Q4&9-NbIDI&A&jKFIhhqFj*U02~77qC093N6cu#-OVebrrcv z;ub=Y9F&PM;F_oba{!s=jc&oZVXSDi6oW?73L32$HpDP(`4q9l&s{~##Rw)Pqn0e^ z45|hzThghg8v*T&UWWH?>MXfSBVYLgv~B>Th}6Xq`jB(klgF&Y?WzordMt=cZs4+r z>grZqf=-uskWRf;6VRu8?BFFv6XFdhBD(~ELIh_S5RLPQlb|&%Mk4NMSRkn6L4$U| zGQPx#R)gynU*n7jXsxQ3h!gS|J;p6X3_R0jfjR*KbXzv$k{eAP;3P1X&3+9JFq1sk zl}us_3J{AaVs7xYx=`jELzIgGS{m;UF)h+eBQZtnF!AaG{g4lkb2mdy@1~vdX3_+l zaY{3_B*<%3qhadQO(v;p{VCTb5LD!y$2sAMML9tcdS^hTvyF|$7=bNoO6H#BIB|(! zfA+_~7QI+DS@4c^uhV?Ko`U)ub{j`&iqN!i;#?s&O{UO*B2GFBO=c{KC0mYdC;b?3 z)yk2tQrti3s3nUf8W47oE^HOa0!r2s;A~ttPDx}A4^Fr%+x${A;rYa`o+V_h8ARC4_U?p*&m;n^m(UnPtQfM4v2R~4vU?@Q&wCg2S5w^Jyn5pH2^CFZnnm&w( zdX(ef_{LOjUnzrd85<(dQ$GO12d>v`;9q>qy~oTxB5o$aDA96yxzWunF>&6r@#(9QYWU?09GRG!hvJ_#n3?^x} z9-Of%!tp7V;*Cqo7Wlph)3c=)4ge;-`Jc^J?qzbLnEYXFG-4vREGZHP& z;$^Bj*9VB%hFEl0i-^<`tBA2^U~yokWCmg?XyMXf!BNIcN3G4+= zia02UBJN%!0(F8w_#^JxJsEIDTGsfON2ZNct{#Wwt$ku*If%Sfq$NcdR2UV@{e9K2DCd_5! zVrwEIJV%om4BX00V1=-45>S~L^dWb0)FVjIM^3Bma5w&DEj4DA^(%IW&Ro z$$wqMoR0AxM~BTs%3R8^Fo9Zt>`Mw*ibx|n+|da*@smE$#on>K?}vDU^onfbG|1M1 zR*|&3&T0a~s<51h|C?;~y|DVCL`e{{+olrIsR{P^ zlQ-;q)y%Ec65LdG@NM29MI5IckZdjv+=+m)0<6cwFrBzzc#W;RyqNyRaqwlM@RU~@ zlMTE9fIe{5e8Hle9gJNfr7mi`7lAHi#i>{_r(`&jAT!`!&Fq#kq-)+;`aeroZ;v~7Y-WMp(TIa=)` zi8CnVX@lBpdp2#KbRscmHsID~OH6@|C}Lh6$V{2h$Lw{3u6roA3-DYKGK!xX+x*}4gKfEJpCk6W7H|*FC-&hNqQ-si3Z8T=(m4lh!-x1tD*iA4@MUPaOr!S6g#C{Cgh*AfK_PHYND0;O3`?qsnj z-Vg${{Pa_2OZ3Ml z?%w&A6c$wKKvMv4@EhjSqe#`FN(Pd)IY=JyfY_K!_WnfPxlAekT3ubr1wDE5_#OoJ z7j~ZDfo`lrMx7p|38w}W5za%d6}Wg5w8z6LfE+LH8yip0IJ%i~L9?U(o%h$^A@4nw zCAC2jBVs5LZ{ZG1HqRLiMNC3TYy0)swbk;5p!clb!&eL6XtvR0R}q>O$GnK##!~au zvXmId;}iGnitjbDiVZk$ntJ1N^Ke+beK+m+GZv5^z`eBa zOu%xbOA}7CWH%aq2%TH)G4~&J;H=S12FUyOy}om0ra8CT7HDb$_?^J?JDE$=1-Q)x ztSj4mo*IA*e_ADxs)T;!68#8&H=%WE8#GN#p0wkO2;z1z+k_$v?tE`RXi^Jw?WvY~ zsl0@TF>V2Vqa?_;1_58jExUnp(SW*?s5(72y}Oyf*|m+5`jnY|H)Ck{Sp~s@E?W-cvD? z3~~I8`^I)|&UCe`b_|*p=!02UHkpW#+6@0v2(1&kOkgq@Fj_}TX_l)1( z5@C)&SDMTumMUrlv;RhN7102DbhS@zE6<*FJnrVwx2V*`$INw;WBn%L=BxhDtk)86ObH}b?Z0PyLw9hE*0ey= z#NIXA5B7S8&jF~X<-$Vf2l16pzPAAQwS%8V7dfO3dfNXC?kToT2QYQ9@-Rfjq``Yf z?wX@}Ze8`ki1K)E0J(@x=_X%x}jE%YdrR4EL0v#rQ9BcQ`l@OKtz=z1(O0vdo1;sR8qatYDy z>l$1m#Tfxz_QJRDe<1*LkLv6Dh=BIWex(Fzl(8fB^z`*MideVpGZN{Zp3 Date: Tue, 23 Apr 2013 15:32:10 +0000 Subject: [PATCH 228/464] Update build assets for PY3 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ba538fa..e545ba2 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ if sys.version_info[0] == 3: extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") - extra_opts['package_data'] = {"tests": ["fields/mongoengine.png"]} + extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL'] extra_opts['packages'] = find_packages(exclude=('tests',)) From 1e1e48732a92cc5e355301e7178b2337f2e48626 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 15:33:38 +0000 Subject: [PATCH 229/464] Update setup.py python versions / langauge in changelog --- docs/changelog.rst | 2 +- setup.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 476753d..49ee0ec 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,7 @@ Changelog Changes in 0.8.X ================ -- ImageFields now support inline replacements (#86) +- Added ImageField support for inline replacements (#86) - Added SequenceField.set_next_value(value) helper (#159) - Updated .only() behaviour - now like exclude it is chainable (#202) - Added with_limit_and_skip support to count() (#235) diff --git a/setup.py b/setup.py index e545ba2..13c11a9 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,6 @@ CLASSIFIERS = [ 'Operating System :: OS Independent', 'Programming Language :: Python', "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", From 88f96b08386fcf9607600a79f6d5768e81c65a67 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 15:59:23 +0000 Subject: [PATCH 230/464] Ensure all field params are documented (#97) --- docs/apireference.rst | 49 +++++++++++++++++++++++------------------ docs/django.rst | 2 +- docs/guide/querying.rst | 4 ++-- docs/upgrade.rst | 2 +- mongoengine/__init__.py | 4 +++- mongoengine/errors.py | 4 +++- mongoengine/fields.py | 23 ++++++++++--------- 7 files changed, 50 insertions(+), 38 deletions(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index 049cc30..0040f45 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -54,28 +54,33 @@ Querying Fields ====== -.. autoclass:: mongoengine.BinaryField -.. autoclass:: mongoengine.BooleanField -.. autoclass:: mongoengine.ComplexDateTimeField -.. autoclass:: mongoengine.DateTimeField -.. autoclass:: mongoengine.DecimalField -.. autoclass:: mongoengine.DictField -.. autoclass:: mongoengine.DynamicField -.. autoclass:: mongoengine.EmailField -.. autoclass:: mongoengine.EmbeddedDocumentField -.. autoclass:: mongoengine.FileField -.. autoclass:: mongoengine.FloatField -.. autoclass:: mongoengine.GenericEmbeddedDocumentField -.. autoclass:: mongoengine.GenericReferenceField -.. autoclass:: mongoengine.GeoPointField -.. autoclass:: mongoengine.ImageField -.. autoclass:: mongoengine.IntField -.. autoclass:: mongoengine.ListField -.. autoclass:: mongoengine.MapField -.. autoclass:: mongoengine.ObjectIdField -.. autoclass:: mongoengine.ReferenceField -.. autoclass:: mongoengine.SequenceField -.. autoclass:: mongoengine.SortedListField .. autoclass:: mongoengine.StringField .. autoclass:: mongoengine.URLField +.. autoclass:: mongoengine.EmailField +.. autoclass:: mongoengine.IntField +.. autoclass:: mongoengine.LongField +.. autoclass:: mongoengine.FloatField +.. autoclass:: mongoengine.DecimalField +.. autoclass:: mongoengine.BooleanField +.. autoclass:: mongoengine.DateTimeField +.. autoclass:: mongoengine.ComplexDateTimeField +.. autoclass:: mongoengine.EmbeddedDocumentField +.. autoclass:: mongoengine.GenericEmbeddedDocumentField +.. autoclass:: mongoengine.DynamicField +.. autoclass:: mongoengine.ListField +.. autoclass:: mongoengine.SortedListField +.. autoclass:: mongoengine.DictField +.. autoclass:: mongoengine.MapField +.. autoclass:: mongoengine.ReferenceField +.. autoclass:: mongoengine.GenericReferenceField +.. autoclass:: mongoengine.BinaryField +.. autoclass:: mongoengine.FileField +.. autoclass:: mongoengine.ImageField +.. autoclass:: mongoengine.GeoPointField +.. autoclass:: mongoengine.SequenceField +.. autoclass:: mongoengine.ObjectIdField .. autoclass:: mongoengine.UUIDField +.. autoclass:: mongoengine.GridFSError +.. autoclass:: mongoengine.GridFSProxy +.. autoclass:: mongoengine.ImageGridFsProxy +.. autoclass:: mongoengine.ImproperlyConfigured diff --git a/docs/django.rst b/docs/django.rst index 6f27b90..5c9e7bf 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -10,7 +10,7 @@ In your **settings.py** file, ignore the standard database settings (unless you also plan to use the ORM in your project), and instead call :func:`~mongoengine.connect` somewhere in the settings module. -.. note :: +.. note:: If you are not using another Database backend you may need to add a dummy database backend to ``settings.py`` eg:: diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 3279853..5e250ce 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -450,7 +450,7 @@ modifier comes before the field, not after it:: >>> post.tags ['database', 'nosql'] -.. note :: +.. note:: In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates on changed documents by tracking changes to that document. @@ -466,7 +466,7 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: >>> post.tags ['database', 'mongodb'] -.. note :: +.. note:: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional operator `_. diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 5490757..564b7f6 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -205,7 +205,7 @@ via `save` eg :: # Or in code: my_document.save(cascade=True) -.. note :: +.. note:: Remember: cascading saves **do not** cascade through lists. ReferenceFields diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index da72e53..6fe6d08 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -8,10 +8,12 @@ import queryset from queryset import * import signals from signals import * +from errors import * +import errors import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + - list(queryset.__all__) + signals.__all__) + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) VERSION = (0, 8, 0, '+') diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 9cfcd1d..4b6b562 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -3,7 +3,9 @@ from collections import defaultdict from mongoengine.python_support import txt_type -__all__ = ('NotRegistered', 'InvalidDocumentError', 'ValidationError') +__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', + 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', + 'OperationError', 'NotUniqueError', 'ValidationError') class NotRegistered(Exception): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 4530429..e23b90a 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -4,7 +4,6 @@ import itertools import re import time import urllib2 -import urlparse import uuid import warnings from operator import itemgetter @@ -16,7 +15,7 @@ from mongoengine.errors import ValidationError from mongoengine.python_support import (PY3, bin_type, txt_type, str_types, StringIO) from base import (BaseField, ComplexBaseField, ObjectIdField, - get_document, BaseDocument, ALLOW_INHERITANCE) + get_document, BaseDocument) from queryset import DO_NOTHING, QuerySet from document import Document, EmbeddedDocument from connection import get_db, DEFAULT_CONNECTION_NAME @@ -27,13 +26,17 @@ except ImportError: Image = None ImageOps = None -__all__ = ['StringField', 'IntField', 'LongField', 'FloatField', 'BooleanField', - 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', - 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', - 'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', - 'GenericReferenceField', 'FileField', 'BinaryField', - 'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', - 'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] +__all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField', + 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', + 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', + 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', + 'SortedListField', 'DictField', 'MapField', 'ReferenceField', + 'GenericReferenceField', 'BinaryField', 'GridFSError', + 'GridFSProxy', 'FileField', 'ImageGridFsProxy', + 'ImproperlyConfigured', 'ImageField', 'GeoPointField', + 'SequenceField', 'UUIDField'] + + RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -351,7 +354,7 @@ class DateTimeField(BaseField): kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.datetime(*time.strptime(value, - '%Y-%m-%d %H:%M:%S')[:6], **kwargs) + '%Y-%m-%d %H:%M:%S')[:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.datetime(*time.strptime(value, From 8a7b619b77417bd2fa4e0dbc1fe14a25320018c0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 20:37:05 +0000 Subject: [PATCH 231/464] Tutorial updates --- docs/guide/connecting.rst | 11 +++++- docs/index.rst | 2 +- docs/tutorial.rst | 81 +++++++++++++++++++++++++-------------- 3 files changed, 63 insertions(+), 31 deletions(-) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index de6794c..dea6a3d 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -74,9 +74,13 @@ to point across databases and collections. Below is an example schema, using Switch Database Context Manager =============================== -Sometimes you might want to switch the database to query against for a class. +Sometimes you may want to switch the database to query against for a class, +for example, you archive older data into a separate database for performance +reasons. + The :class:`~mongoengine.context_managers.switch_db` context manager allows -you to change the database alias for a class eg :: +you to change the database alias for a given class allowing quick and easy +access to the same User document across databases.eg :: from mongoengine.context_managers import switch_db @@ -87,3 +91,6 @@ you to change the database alias for a class eg :: with switch_db(User, 'archive-user-db') as User: User(name="Ross").save() # Saves the 'archive-user-db' + +.. note:: Make sure any aliases have been registered with + :func:`~mongoengine.register_connection` before using the context manager. diff --git a/docs/index.rst b/docs/index.rst index f6d44b5..4d0d211 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,7 +7,7 @@ MongoDB. To install it, simply run .. code-block:: console - # pip install -U mongoengine + $ pip install -U mongoengine :doc:`tutorial` Start here for a quick overview. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index c4b69c4..423df9b 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1,6 +1,7 @@ ======== Tutorial ======== + This tutorial introduces **MongoEngine** by means of example --- we will walk through how to create a simple **Tumblelog** application. A Tumblelog is a type of blog where posts are not constrained to being conventional text-based posts. @@ -12,23 +13,29 @@ interface. Getting started =============== + Before we start, make sure that a copy of MongoDB is running in an accessible location --- running it locally will be easier, but if that is not an option -then it may be run on a remote server. +then it may be run on a remote server. If you haven't installed mongoengine, +simply use pip to install it like so:: + + $ pip install mongoengine Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` -function. The only argument we need to provide is the name of the MongoDB -database to use:: +function. If running locally the only argument we need to provide is the name +of the MongoDB database to use:: from mongoengine import * connect('tumblelog') -For more information about connecting to MongoDB see :ref:`guide-connecting`. +There are lots of options for connecting to MongoDB, for more information about +them see the :ref:`guide-connecting` guide. Defining our documents ====================== + MongoDB is *schemaless*, which means that no schema is enforced by the database --- we may add and remove fields however we want and MongoDB won't complain. This makes life a lot easier in many regards, especially when there is a change @@ -39,17 +46,19 @@ define utility methods on our documents in the same way that traditional In our Tumblelog application we need to store several different types of information. We will need to have a collection of **users**, so that we may -link posts to an individual. We also need to store our different types -**posts** (text, image and link) in the database. To aid navigation of our +link posts to an individual. We also need to store our different types of +**posts** (eg: text, image and link) in the database. To aid navigation of our Tumblelog, posts may have **tags** associated with them, so that the list of posts shown to the user may be limited to posts that have been assigned a -specified tag. Finally, it would be nice if **comments** could be added to -posts. We'll start with **users**, as the others are slightly more involved. +specific tag. Finally, it would be nice if **comments** could be added to +posts. We'll start with **users**, as the other document models are slightly +more involved. Users ----- + Just as if we were using a relational database with an ORM, we need to define -which fields a :class:`User` may have, and what their types will be:: +which fields a :class:`User` may have, and what types of data they might store:: class User(Document): email = StringField(required=True) @@ -58,11 +67,13 @@ which fields a :class:`User` may have, and what their types will be:: This looks similar to how a the structure of a table would be defined in a regular ORM. The key difference is that this schema will never be passed on to -MongoDB --- this will only be enforced at the application level. Also, the User -documents will be stored in a MongoDB *collection* rather than a table. +MongoDB --- this will only be enforced at the application level, making future +changes easy to manage. Also, the User documents will be stored in a +MongoDB *collection* rather than a table. Posts, Comments and Tags ------------------------ + Now we'll think about how to store the rest of the information. If we were using a relational database, we would most likely have a table of **posts**, a table of **comments** and a table of **tags**. To associate the comments with @@ -75,16 +86,17 @@ of them stand out as particularly intuitive solutions. Posts ^^^^^ -But MongoDB *isn't* a relational database, so we're not going to do it that + +Happily mongoDB *isn't* a relational database, so we're not going to do it that way. As it turns out, we can use MongoDB's schemaless nature to provide us with -a much nicer solution. We will store all of the posts in *one collection* --- -each post type will just have the fields it needs. If we later want to add +a much nicer solution. We will store all of the posts in *one collection* and +each post type will only store the fields it needs. If we later want to add video posts, we don't have to modify the collection at all, we just *start using* the new fields we need to support video posts. This fits with the Object-Oriented principle of *inheritance* nicely. We can think of :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports -this kind of modelling out of the box - all you need do is turn on inheritance +this kind of modelling out of the box --- all you need do is turn on inheritance by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: class Post(Document): @@ -109,6 +121,7 @@ when they are saved, and dereferenced when they are loaded. Tags ^^^^ + Now that we have our Post models figured out, how will we attach tags to them? MongoDB allows us to store lists of items natively, so rather than having a link table, we can just store a list of tags in each post. So, for both @@ -126,11 +139,14 @@ size of our database. So let's take a look that the code our modified The :class:`~mongoengine.ListField` object that is used to define a Post's tags takes a field object as its first argument --- this means that you can have -lists of any type of field (including lists). Note that we don't need to -modify the specialised post types as they all inherit from :class:`Post`. +lists of any type of field (including lists). + +.. note:: We don't need to modify the specialised post types as they all + inherit from :class:`Post`. Comments ^^^^^^^^ + A comment is typically associated with *one* post. In a relational database, to display a post with its comments, we would have to retrieve the post from the database, then query the database again for the comments associated with the @@ -181,15 +197,15 @@ Now that we've defined how our documents will be structured, let's start adding some documents to the database. Firstly, we'll need to create a :class:`User` object:: - john = User(email='jdoe@example.com', first_name='John', last_name='Doe') - john.save() + ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save() -Note that we could have also defined our user using attribute syntax:: +.. note:: + We could have also defined our user using attribute syntax:: - john = User(email='jdoe@example.com') - john.first_name = 'John' - john.last_name = 'Doe' - john.save() + ross = User(email='ross@example.com') + ross.first_name = 'Ross' + ross.last_name = 'Lawley' + ross.save() Now that we've got our user in the database, let's add a couple of posts:: @@ -198,16 +214,17 @@ Now that we've got our user in the database, let's add a couple of posts:: post1.tags = ['mongodb', 'mongoengine'] post1.save() - post2 = LinkPost(title='MongoEngine Documentation', author=john) - post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' + post2 = LinkPost(title='MongoEngine Documentation', author=ross) + post2.link_url = 'http://docs.mongoengine.com/' post2.tags = ['mongoengine'] post2.save() -Note that if you change a field on a object that has already been saved, then -call :meth:`save` again, the document will be updated. +.. note:: If you change a field on a object that has already been saved, then + call :meth:`save` again, the document will be updated. Accessing our data ================== + So now we've got a couple of posts in our database, how do we display them? Each document class (i.e. any class that inherits either directly or indirectly from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is @@ -219,6 +236,7 @@ class. So let's see how we can get our posts' titles:: Retrieving type-specific information ------------------------------------ + This will print the titles of our posts, one on each line. But What if we want to access the type-specific data (link_url, content, etc.)? One way is simply to use the :attr:`objects` attribute of a subclass of :class:`Post`:: @@ -257,6 +275,7 @@ text post, and "Link: " if it was a link post. Searching our posts by tag -------------------------- + The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the database only when you need the data. It may also be filtered to narrow down @@ -275,3 +294,9 @@ used on :class:`~mongoengine.queryset.QuerySet` objects:: num_posts = Post.objects(tags='mongodb').count() print 'Found %d posts with tag "mongodb"' % num_posts +Learning more about mongoengine +------------------------------- + +If you got this far you've made a great start, so well done! The next step on +your mongoengine journey is the `full user guide `_, where you +can learn indepth about how to use mongoengine and mongodb. \ No newline at end of file From 5271f3b4a0a75753d6e984267b177b22f67c3b5e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 20:49:22 +0000 Subject: [PATCH 232/464] More doc updates --- docs/guide/installing.rst | 4 ++-- docs/index.rst | 45 ++++++++++++++++++++++----------------- 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index f15d3db..e93f048 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -22,10 +22,10 @@ Alternatively, if you don't have setuptools installed, `download it from PyPi $ python setup.py install To use the bleeding-edge version of MongoEngine, you can get the source from -`GitHub `_ and install it as above: +`GitHub `_ and install it as above: .. code-block:: console - $ git clone git://github.com/hmarr/mongoengine + $ git clone git://github.com/mongoengine/mongoengine $ cd mongoengine $ python setup.py install diff --git a/docs/index.rst b/docs/index.rst index 4d0d211..4aca82d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,13 +10,15 @@ MongoDB. To install it, simply run $ pip install -U mongoengine :doc:`tutorial` - Start here for a quick overview. + A quick tutorial building a tumblelog to get you up and running with + MongoEngine. :doc:`guide/index` - The Full guide to MongoEngine + The Full guide to MongoEngine - from modeling documents to storing files, + from querying for data to firing signals and *everything* between. :doc:`apireference` - The complete API documentation. + The complete API documentation --- the innards of documents, querysets and fields. :doc:`upgrade` How to upgrade MongoEngine. @@ -28,35 +30,40 @@ Community --------- To get help with using MongoEngine, use the `MongoEngine Users mailing list -`_ or come chat on the -`#mongoengine IRC channel `_. +`_ or the ever popular +`stackoverflow `_. Contributing ------------ -The source is available on `GitHub `_ and -contributions are always encouraged. Contributions can be as simple as -minor tweaks to this documentation. To contribute, fork the project on +**Yes please!** We are always looking for contributions, additions and improvements. + +The source is available on `GitHub `_ +and contributions are always encouraged. Contributions can be as simple as +minor tweaks to this documentation, the website or the core. + +To contribute, fork the project on `GitHub `_ and send a pull request. -Also, you can join the developers' `mailing list -`_. - Changes ------- + See the :doc:`changelog` for a full list of changes to MongoEngine and :doc:`upgrade` for upgrade information. -.. toctree:: - :hidden: +.. note:: Always read and test the `upgrade `_ documentation before + putting updates live in production **;)** - tutorial - guide/index - apireference - django - changelog - upgrade +.. toctree:: + :hidden: + + tutorial + guide/index + apireference + django + changelog + upgrade Indices and tables ------------------ From 9bd8b3e9a536f07020bfeb36624ac844c253f6e5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Apr 2013 20:55:37 +0000 Subject: [PATCH 233/464] Connection doc updates --- docs/guide/connecting.rst | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index dea6a3d..8674b5e 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -6,20 +6,23 @@ Connecting to MongoDB To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` function. The first argument is the name of the -database to connect to. If the database does not exist, it will be created. If -the database requires authentication, :attr:`username` and :attr:`password` -arguments may be provided:: +database to connect to:: from mongoengine import connect - connect('project1', username='webapp', password='pwd123') + connect('project1') By default, MongoEngine assumes that the :program:`mongod` instance is running -on **localhost** on port **27017**. If MongoDB is running elsewhere, you may -provide :attr:`host` and :attr:`port` arguments to +on **localhost** on port **27017**. If MongoDB is running elsewhere, you should +provide the :attr:`host` and :attr:`port` arguments to :func:`~mongoengine.connect`:: connect('project1', host='192.168.1.35', port=12345) +If the database requires authentication, :attr:`username` and :attr:`password` +arguments should be provided:: + + connect('project1', username='webapp', password='pwd123') + Uri style connections are also supported as long as you include the database name - just supply the uri as the :attr:`host` to :func:`~mongoengine.connect`:: @@ -74,8 +77,8 @@ to point across databases and collections. Below is an example schema, using Switch Database Context Manager =============================== -Sometimes you may want to switch the database to query against for a class, -for example, you archive older data into a separate database for performance +Sometimes you may want to switch the database to query against for a class +for example, archiving older data into a separate database for performance reasons. The :class:`~mongoengine.context_managers.switch_db` context manager allows From c59ea268451246a8693c4dd0987ec9bd39cdcd96 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 24 Apr 2013 11:17:21 +0000 Subject: [PATCH 234/464] Updated docs to clarify or != | (##288) --- docs/guide/querying.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 5e250ce..60702ec 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -392,6 +392,7 @@ You can also turn off all dereferencing for a fixed period by using the Advanced queries ================ + Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword arguments can't fully express the query you want to use -- for example if you need to combine a number of constraints using *and* and *or*. This is made @@ -410,6 +411,11 @@ calling it with keyword arguments:: # Get top posts Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) +.. warning:: You have to use bitwise operators. You cannot use ``or``, ``and`` + to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as + ``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is + the same as ``Q(a=a)``. + .. _guide-atomic-updates: Atomic updates From c60ea4082807a35412e936d5cd6049cedae9e164 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 24 Apr 2013 12:14:34 +0000 Subject: [PATCH 235/464] ReferenceField now store ObjectId's by default rather than DBRef (#290) --- docs/changelog.rst | 1 + docs/upgrade.rst | 29 +++++++++++ mongoengine/fields.py | 9 +--- tests/all_warnings/__init__.py | 22 -------- tests/migration/__init__.py | 4 +- ...py => convert_to_new_inheritance_model.py} | 2 +- .../refrencefield_dbref_to_object_id.py | 52 +++++++++++++++++++ tests/test_signals.py | 5 +- 8 files changed, 92 insertions(+), 32 deletions(-) rename tests/migration/{test_convert_to_new_inheritance_model.py => convert_to_new_inheritance_model.py} (97%) create mode 100644 tests/migration/refrencefield_dbref_to_object_id.py diff --git a/docs/changelog.rst b/docs/changelog.rst index 49ee0ec..7b51a79 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- ReferenceField now store ObjectId's by default rather than DBRef (#290) - Added ImageField support for inline replacements (#86) - Added SequenceField.set_next_value(value) helper (#159) - Updated .only() behaviour - now like exclude it is chainable (#202) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 564b7f6..eec9d62 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -76,6 +76,35 @@ the case and the data is set only in the ``document._data`` dictionary: :: File "", line 1, in AttributeError: 'Animal' object has no attribute 'size' +ReferenceField +-------------- + +ReferenceFields now store ObjectId's by default - this is more efficient than +DBRefs as we already know what Document types they reference. + + # Old code + class Animal(Document): + name = ReferenceField('self') + + # New code to keep dbrefs + class Animal(Document): + name = ReferenceField('self', dbref=True) + +To migrate all the references you need to touch each object and mark it as dirty +eg:: + + # Doc definition + class Person(Document): + name = StringField() + parent = ReferenceField('self') + friends = ListField(ReferenceField('self')) + + # Mark all ReferenceFields as dirty and save + for p in Person.objects: + p._mark_as_dirty('parent') + p._mark_as_dirty('friends') + p.save() + Querysets ========= diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e23b90a..979699c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -772,7 +772,7 @@ class ReferenceField(BaseField): .. versionchanged:: 0.5 added `reverse_delete_rule` """ - def __init__(self, document_type, dbref=None, + def __init__(self, document_type, dbref=False, reverse_delete_rule=DO_NOTHING, **kwargs): """Initialises the Reference Field. @@ -786,12 +786,7 @@ class ReferenceField(BaseField): self.error('Argument to ReferenceField constructor must be a ' 'document class or a string') - if dbref is None: - msg = ("ReferenceFields will default to using ObjectId " - "in 0.8, set DBRef=True if this isn't desired") - warnings.warn(msg, FutureWarning) - - self.dbref = dbref if dbref is not None else True # To change in 0.8 + self.dbref = dbref self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule super(ReferenceField, self).__init__(**kwargs) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 74533de..d74d39e 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -30,28 +30,6 @@ class AllWarnings(unittest.TestCase): # restore default handling of warnings warnings.showwarning = self.showwarning_default - def test_dbref_reference_field_future_warning(self): - - class Person(Document): - name = StringField() - parent = ReferenceField('self') - - Person.drop_collection() - - p1 = Person() - p1.parent = None - p1.save() - - p2 = Person(name="Wilson Jr") - p2.parent = p1 - p2.save(cascade=False) - - self.assertTrue(len(self.warning_list) > 0) - warning = self.warning_list[0] - self.assertEqual(FutureWarning, warning["category"]) - self.assertTrue("ReferenceFields will default to using ObjectId" - in str(warning["message"])) - def test_document_save_cascade_future_warning(self): class Person(Document): diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py index 882e737..f7ad674 100644 --- a/tests/migration/__init__.py +++ b/tests/migration/__init__.py @@ -1,4 +1,6 @@ +from convert_to_new_inheritance_model import * +from refrencefield_dbref_to_object_id import * from turn_off_inheritance import * if __name__ == '__main__': - unittest.main() \ No newline at end of file + unittest.main() diff --git a/tests/migration/test_convert_to_new_inheritance_model.py b/tests/migration/convert_to_new_inheritance_model.py similarity index 97% rename from tests/migration/test_convert_to_new_inheritance_model.py rename to tests/migration/convert_to_new_inheritance_model.py index d4337bf..89ee9e9 100644 --- a/tests/migration/test_convert_to_new_inheritance_model.py +++ b/tests/migration/convert_to_new_inheritance_model.py @@ -38,7 +38,7 @@ class ConvertToNewInheritanceModel(unittest.TestCase): # 3. Confirm extra data is removed count = collection.find({'_types': {"$exists": True}}).count() - assert count == 0 + self.assertEqual(0, count) # 4. Remove indexes info = collection.index_information() diff --git a/tests/migration/refrencefield_dbref_to_object_id.py b/tests/migration/refrencefield_dbref_to_object_id.py new file mode 100644 index 0000000..d3acbe9 --- /dev/null +++ b/tests/migration/refrencefield_dbref_to_object_id.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +import unittest + +from mongoengine import Document, connect +from mongoengine.connection import get_db +from mongoengine.fields import StringField, ReferenceField, ListField + +__all__ = ('ConvertToObjectIdsModel', ) + + +class ConvertToObjectIdsModel(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def test_how_to_convert_to_object_id_reference_fields(self): + """Demonstrates migrating from 0.7 to 0.8 + """ + + # 1. Old definition - using dbrefs + class Person(Document): + name = StringField() + parent = ReferenceField('self', dbref=True) + friends = ListField(ReferenceField('self', dbref=True)) + + Person.drop_collection() + + p1 = Person(name="Wilson", parent=None).save() + f1 = Person(name="John", parent=None).save() + f2 = Person(name="Paul", parent=None).save() + f3 = Person(name="George", parent=None).save() + f4 = Person(name="Ringo", parent=None).save() + Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save() + + # 2. Start the migration by changing the schema + # Change ReferenceField as now dbref defaults to False + class Person(Document): + name = StringField() + parent = ReferenceField('self') + friends = ListField(ReferenceField('self')) + + # 3. Loop all the objects and mark parent as changed + for p in Person.objects: + p._mark_as_changed('parent') + p._mark_as_changed('friends') + p.save() + + # 4. Confirmation of the fix! + wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] + self.assertEqual(p1.id, wilson['parent']) + self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends']) diff --git a/tests/test_signals.py b/tests/test_signals.py index fc638cf..32517dd 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -72,6 +72,7 @@ class SignalTests(unittest.TestCase): else: signal_output.append('Not loaded') self.Author = Author + Author.drop_collection() class Another(Document): name = StringField() @@ -110,6 +111,7 @@ class SignalTests(unittest.TestCase): signal_output.append('post_delete Another signal, %s' % document) self.Another = Another + Another.drop_collection() class ExplicitId(Document): id = IntField(primary_key=True) @@ -123,7 +125,8 @@ class SignalTests(unittest.TestCase): signal_output.append('Is updated') self.ExplicitId = ExplicitId - self.ExplicitId.objects.delete() + ExplicitId.drop_collection() + # Save up the number of connected signals so that we can check at the # end that all the signals we register get properly unregistered self.pre_signals = ( From fe62c3aacb0f9a3e06f96f758bf1e3496f753d7d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 10:24:33 +0000 Subject: [PATCH 236/464] Cascading saves now default to off (#291) --- docs/changelog.rst | 1 + docs/upgrade.rst | 31 ++++++++++++++++++++++++++++--- mongoengine/document.py | 9 ++------- tests/all_warnings/__init__.py | 30 +----------------------------- tests/document/instance.py | 27 ++++++++++++++++++++++++++- 5 files changed, 58 insertions(+), 40 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7b51a79..6c19933 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Cascading saves now default to off (#291) - ReferenceField now store ObjectId's by default rather than DBRef (#290) - Added ImageField support for inline replacements (#86) - Added SequenceField.set_next_value(value) helper (#159) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index eec9d62..86d9f9d 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -5,11 +5,21 @@ Upgrading 0.7 to 0.8 ********** -Inheritance -=========== +There have been numerous backwards breaking changes in 0.8. The reasons for +these are ensure that MongoEngine has sane defaults going forward and +performs the best it can out the box. Where possible there have been +FutureWarnings to help get you ready for the change, but that hasn't been +possible for the whole of the release. + +.. warning:: Breaking changes - test upgrading on a test system before putting +live. There maybe multiple manual steps in migrating and these are best honed +on a staging / test system. Data Model ----------- +========== + +Inheritance +----------- The inheritance model has changed, we no longer need to store an array of :attr:`types` with the model we can just use the classname in :attr:`_cls`. @@ -105,6 +115,21 @@ eg:: p._mark_as_dirty('friends') p.save() + +Cascading Saves +--------------- +To improve performance document saves will no longer automatically cascade. +Any changes to a Documents references will either have to be saved manually or +you will have to explicitly tell it to cascade on save:: + + # At the class level: + class Person(Document): + meta = {'cascade': True} + + # Or on save: + my_document.save(cascade=True) + + Querysets ========= diff --git a/mongoengine/document.py b/mongoengine/document.py index 54b55df..d0cafa3 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -244,7 +244,6 @@ class Document(BaseDocument): upsert=upsert, **write_concern) created = is_new_object(last_error) - warn_cascade = not cascade and 'cascade' not in self._meta cascade = (self._meta.get('cascade', True) if cascade is None else cascade) if cascade: @@ -257,7 +256,7 @@ class Document(BaseDocument): if cascade_kwargs: # Allow granular control over cascades kwargs.update(cascade_kwargs) kwargs['_refs'] = _refs - self.cascade_save(warn_cascade=warn_cascade, **kwargs) + self.cascade_save(**kwargs) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' @@ -276,7 +275,7 @@ class Document(BaseDocument): signals.post_save.send(self.__class__, document=self, created=created) return self - def cascade_save(self, warn_cascade=None, *args, **kwargs): + def cascade_save(self, *args, **kwargs): """Recursively saves any references / generic references on an objects""" import fields @@ -296,10 +295,6 @@ class Document(BaseDocument): ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: - if warn_cascade: - msg = ("Cascading saves will default to off in 0.8, " - "please explicitly set `.save(cascade=True)`") - warnings.warn(msg, FutureWarning) _refs.append(ref_id) kwargs["_refs"] = _refs ref.save(**kwargs) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index d74d39e..53ce638 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -17,7 +17,7 @@ __all__ = ('AllWarnings', ) class AllWarnings(unittest.TestCase): def setUp(self): - conn = connect(db='mongoenginetest') + connect(db='mongoenginetest') self.warning_list = [] self.showwarning_default = warnings.showwarning warnings.showwarning = self.append_to_warning_list @@ -30,31 +30,6 @@ class AllWarnings(unittest.TestCase): # restore default handling of warnings warnings.showwarning = self.showwarning_default - def test_document_save_cascade_future_warning(self): - - class Person(Document): - name = StringField() - parent = ReferenceField('self') - - Person.drop_collection() - - p1 = Person(name="Wilson Snr") - p1.parent = None - p1.save() - - p2 = Person(name="Wilson Jr") - p2.parent = p1 - p2.parent.name = "Poppa Wilson" - p2.save() - - self.assertTrue(len(self.warning_list) > 0) - if len(self.warning_list) > 1: - print self.warning_list - warning = self.warning_list[0] - self.assertEqual(FutureWarning, warning["category"]) - self.assertTrue("Cascading saves will default to off in 0.8" - in str(warning["message"])) - def test_document_collection_syntax_warning(self): class NonAbstractBase(Document): @@ -67,6 +42,3 @@ class AllWarnings(unittest.TestCase): self.assertEqual(SyntaxWarning, warning["category"]) self.assertEqual('non_abstract_base', InheritedDocumentFailTest._get_collection_name()) - -import sys -sys.path[0:0] = [""] diff --git a/tests/document/instance.py b/tests/document/instance.py index 5513ed8..a75cc4d 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -678,7 +678,7 @@ class InstanceTest(unittest.TestCase): p1.reload() self.assertEqual(p1.name, p.parent.name) - def test_save_cascade_meta(self): + def test_save_cascade_meta_false(self): class Person(Document): name = StringField() @@ -707,6 +707,31 @@ class InstanceTest(unittest.TestCase): p1.reload() self.assertEqual(p1.name, p.parent.name) + def test_save_cascade_meta_true(self): + + class Person(Document): + name = StringField() + parent = ReferenceField('self') + + meta = {'cascade': False} + + Person.drop_collection() + + p1 = Person(name="Wilson Snr") + p1.parent = None + p1.save() + + p2 = Person(name="Wilson Jr") + p2.parent = p1 + p2.save(cascade=True) + + p = Person.objects(name="Wilson Jr").get() + p.parent.name = "Daddy Wilson" + p.save() + + p1.reload() + self.assertNotEqual(p1.name, p.parent.name) + def test_save_cascades_generically(self): class Person(Document): From cb9166aba41b045d6868bb381d75517b05db8758 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 11:04:33 +0000 Subject: [PATCH 237/464] Auth cleanups - removed duplicates --- mongoengine/django/auth.py | 162 ++++++++++++++++--------------------- 1 file changed, 68 insertions(+), 94 deletions(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index d22f086..6582244 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -1,8 +1,7 @@ from mongoengine import * from django.utils.encoding import smart_str -from django.contrib.auth.models import _user_get_all_permissions -from django.contrib.auth.models import _user_has_perm +from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms from django.db import models from django.contrib.contenttypes.models import ContentTypeManager from django.contrib import auth @@ -38,11 +37,12 @@ from .utils import datetime_now REDIRECT_FIELD_NAME = 'next' + class ContentType(Document): name = StringField(max_length=100) app_label = StringField(max_length=100) model = StringField(max_length=100, verbose_name=_('python model class name'), - unique_with='app_label') + unique_with='app_label') objects = ContentTypeManager() class Meta: @@ -72,9 +72,11 @@ class ContentType(Document): def natural_key(self): return (self.app_label, self.model) + class SiteProfileNotAvailable(Exception): pass + class PermissionManager(models.Manager): def get_by_natural_key(self, codename, app_label, model): return self.get( @@ -82,18 +84,28 @@ class PermissionManager(models.Manager): content_type=ContentType.objects.get_by_natural_key(app_label, model) ) + class Permission(Document): - """The permissions system provides a way to assign permissions to specific users and groups of users. + """The permissions system provides a way to assign permissions to specific + users and groups of users. - The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows: + The permission system is used by the Django admin site, but may also be + useful in your own code. The Django admin site uses permissions as follows: - - The "add" permission limits the user's ability to view the "add" form and add an object. - - The "change" permission limits a user's ability to view the change list, view the "change" form and change an object. + - The "add" permission limits the user's ability to view the "add" + form and add an object. + - The "change" permission limits a user's ability to view the change + list, view the "change" form and change an object. - The "delete" permission limits the ability to delete an object. - Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date." + Permissions are set globally per type of object, not per specific object + instance. It is possible to say "Mary may change news stories," but it's + not currently possible to say "Mary may change news stories, but only the + ones she created herself" or "Mary may only change news stories that have + a certain status or publication date." - Three basic permissions -- add, change and delete -- are automatically created for each Django model. + Three basic permissions -- add, change and delete -- are automatically + created for each Django model. """ name = StringField(max_length=50, verbose_name=_('username')) content_type = ReferenceField(ContentType) @@ -119,12 +131,22 @@ class Permission(Document): return (self.codename,) + self.content_type.natural_key() natural_key.dependencies = ['contenttypes.contenttype'] + class Group(Document): - """Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups. + """Groups are a generic way of categorizing users to apply permissions, + or some other label, to those users. A user can belong to any number of + groups. - A user in a group automatically has all the permissions granted to that group. For example, if the group Site editors has the permission can_edit_home_page, any user in that group will have that permission. + A user in a group automatically has all the permissions granted to that + group. For example, if the group Site editors has the permission + can_edit_home_page, any user in that group will have that permission. - Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only e-mail messages. + Beyond permissions, groups are a convenient way to categorize users to + apply some label, or extended functionality, to them. For example, you + could create a group 'Special users', and you could write code that would + do special things to those users -- such as giving them access to a + members-only portion of your site, or sending them members-only + e-mail messages. """ name = StringField(max_length=80, unique=True, verbose_name=_('name')) # permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True) @@ -137,6 +159,7 @@ class Group(Document): def __unicode__(self): return self.name + class UserManager(models.Manager): def create_user(self, username, email, password=None): """ @@ -154,8 +177,8 @@ class UserManager(models.Manager): email = '@'.join([email_name, domain_part.lower()]) user = self.model(username=username, email=email, is_staff=False, - is_active=True, is_superuser=False, last_login=now, - date_joined=now) + is_active=True, is_superuser=False, last_login=now, + date_joined=now) user.set_password(password) user.save(using=self._db) @@ -177,7 +200,6 @@ class UserManager(models.Manager): return ''.join([choice(allowed_chars) for i in range(length)]) - class User(Document): """A User document that aims to mirror most of the API specified by Django at http://docs.djangoproject.com/en/dev/topics/auth/#users @@ -248,25 +270,6 @@ class User(Document): """ return check_password(raw_password, self.password) - def get_all_permissions(self, obj=None): - return _user_get_all_permissions(self, obj) - - def has_perm(self, perm, obj=None): - """ - Returns True if the user has the specified permission. This method - queries all available auth backends, but returns immediately if any - backend returns True. Thus, a user who has permission from a single - auth backend is assumed to have permission in general. If an object is - provided, permissions for this specific object are checked. - """ - - # Active superusers have all permissions. - if self.is_active and self.is_superuser: - return True - - # Otherwise we need to check the backends. - return _user_has_perm(self, perm, obj) - @classmethod def create_user(cls, username, password, email=None): """Create (and save) a new user with the given username, password and @@ -289,68 +292,47 @@ class User(Document): user.save() return user - def get_all_permissions(self, obj=None): + def get_group_permissions(self, obj=None): + """ + Returns a list of permission strings that this user has through his/her + groups. This method queries all available auth backends. If an object + is passed in, only permissions matching this object are returned. + """ permissions = set() - anon = self.is_anonymous() for backend in auth.get_backends(): - if not anon or backend.supports_anonymous_user: - if hasattr(backend, "get_all_permissions"): - if obj is not None: - if backend.supports_object_permissions: - permissions.update( - backend.get_all_permissions(user, obj) - ) - else: - permissions.update(backend.get_all_permissions(self)) + if hasattr(backend, "get_group_permissions"): + permissions.update(backend.get_group_permissions(self, obj)) return permissions - def get_and_delete_messages(self): - return [] + def get_all_permissions(self, obj=None): + return _user_get_all_permissions(self, obj) def has_perm(self, perm, obj=None): - anon = self.is_anonymous() - active = self.is_active - for backend in auth.get_backends(): - if (not active and not anon and backend.supports_inactive_user) or \ - (not anon or backend.supports_anonymous_user): - if hasattr(backend, "has_perm"): - if obj is not None: - if (backend.supports_object_permissions and - backend.has_perm(self, perm, obj)): - return True - else: - if backend.has_perm(self, perm): - return True - return False + """ + Returns True if the user has the specified permission. This method + queries all available auth backends, but returns immediately if any + backend returns True. Thus, a user who has permission from a single + auth backend is assumed to have permission in general. If an object is + provided, permissions for this specific object are checked. + """ - def has_perms(self, perm_list, obj=None): - """ - Returns True if the user has each of the specified permissions. - If object is passed, it checks if the user has all required perms - for this object. - """ - for perm in perm_list: - if not self.has_perm(perm, obj): - return False - return True + # Active superusers have all permissions. + if self.is_active and self.is_superuser: + return True + + # Otherwise we need to check the backends. + return _user_has_perm(self, perm, obj) def has_module_perms(self, app_label): - anon = self.is_anonymous() - active = self.is_active - for backend in auth.get_backends(): - if (not active and not anon and backend.supports_inactive_user) or \ - (not anon or backend.supports_anonymous_user): - if hasattr(backend, "has_module_perms"): - if backend.has_module_perms(self, app_label): - return True - return False + """ + Returns True if the user has any permissions in the given app label. + Uses pretty much the same logic as has_perm, above. + """ + # Active superusers have all permissions. + if self.is_active and self.is_superuser: + return True - def get_and_delete_messages(self): - messages = [] - for m in self.message_set.all(): - messages.append(m.message) - m.delete() - return messages + return _user_has_module_perms(self, app_label) def email_user(self, subject, message, from_email=None): "Sends an e-mail to this User." @@ -386,14 +368,6 @@ class User(Document): raise SiteProfileNotAvailable return self._profile_cache - def _get_message_set(self): - import warnings - warnings.warn('The user messaging API is deprecated. Please update' - ' your code to use the new messages framework.', - category=DeprecationWarning) - return self._message_set - message_set = property(_get_message_set) - class MongoEngineBackend(object): """Authenticate using MongoEngine and mongoengine.django.auth.User. From df4dc3492cf9cf8613eb13f8fedf78224fa70a37 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 11:41:01 +0000 Subject: [PATCH 238/464] Upgrade changelog, docs and django/auth.py --- AUTHORS | 1 - docs/changelog.rst | 1 + docs/upgrade.rst | 6 +++--- mongoengine/django/auth.py | 5 ++--- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/AUTHORS b/AUTHORS index e388a04..44e19bf 100644 --- a/AUTHORS +++ b/AUTHORS @@ -157,4 +157,3 @@ that much better: * Kenneth Falck * Lukasz Balcerzak * Nicolas Cortot - diff --git a/docs/changelog.rst b/docs/changelog.rst index 6c19933..bd3821f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Added Custom User Model for Django 1.5 (#285) - Cascading saves now default to off (#291) - ReferenceField now store ObjectId's by default rather than DBRef (#290) - Added ImageField support for inline replacements (#86) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 86d9f9d..738a949 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -12,8 +12,8 @@ FutureWarnings to help get you ready for the change, but that hasn't been possible for the whole of the release. .. warning:: Breaking changes - test upgrading on a test system before putting -live. There maybe multiple manual steps in migrating and these are best honed -on a staging / test system. + live. There maybe multiple manual steps in migrating and these are best honed + on a staging / test system. Data Model ========== @@ -90,7 +90,7 @@ ReferenceField -------------- ReferenceFields now store ObjectId's by default - this is more efficient than -DBRefs as we already know what Document types they reference. +DBRefs as we already know what Document types they reference:: # Old code class Animal(Document): diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 8fcbca9..cff4b74 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -108,7 +108,7 @@ class Permission(Document): created for each Django model. """ name = StringField(max_length=50, verbose_name=_('username')) - content_type = ReferenceField(ContentType, dbref=True) + content_type = ReferenceField(ContentType) codename = StringField(max_length=100, verbose_name=_('codename')) # FIXME: don't access field of the other class # unique_with=['content_type__app_label', 'content_type__model']) @@ -149,8 +149,7 @@ class Group(Document): e-mail messages. """ name = StringField(max_length=80, unique=True, verbose_name=_('name')) - # permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True) - permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False, dbref=True)) + permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) class Meta: verbose_name = _('group') From 3fc5dc852335317ae024cac81ae448d985ef9764 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 11:46:18 +0000 Subject: [PATCH 239/464] Testing if travis 2.6 is >= 2.6.6 --- .travis.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index c5fe62e..e78bda5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,12 +12,6 @@ env: - PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.4.2 - PYMONGO=2.4.2 DJANGO=1.4.2 -matrix: - exclude: - - python: "2.6" - env: PYMONGO=dev DJANGO=1.5.1 - - python: "2.6" - env: PYMONGO=2.5 DJANGO=1.5.1 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi From bafdf0381adbfa2f9a626d5d0ad4720366af3a03 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 11:59:56 +0000 Subject: [PATCH 240/464] Updates --- .travis.yml | 6 ++++++ docs/upgrade.rst | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/.travis.yml b/.travis.yml index e78bda5..c5fe62e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,6 +12,12 @@ env: - PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.4.2 - PYMONGO=2.4.2 DJANGO=1.4.2 +matrix: + exclude: + - python: "2.6" + env: PYMONGO=dev DJANGO=1.5.1 + - python: "2.6" + env: PYMONGO=2.5 DJANGO=1.5.1 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 738a949..6138bb4 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -15,6 +15,11 @@ possible for the whole of the release. live. There maybe multiple manual steps in migrating and these are best honed on a staging / test system. +Python +======= + +Support for python 2.5 has been dropped. + Data Model ========== From f7bc58a767c80495ca46bd05a8b2f04aeaae462e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 12:03:44 +0000 Subject: [PATCH 241/464] Added assertIn / assertNotIn for python 2.6 --- .travis.yml | 6 ------ tests/test_django.py | 8 +++++++- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index c5fe62e..e78bda5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,12 +12,6 @@ env: - PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.4.2 - PYMONGO=2.4.2 DJANGO=1.4.2 -matrix: - exclude: - - python: "2.6" - env: PYMONGO=dev DJANGO=1.5.1 - - python: "2.6" - env: PYMONGO=2.5 DJANGO=1.5.1 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi diff --git a/tests/test_django.py b/tests/test_django.py index 01a105a..573c072 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -178,6 +178,12 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): MongoSession.drop_collection() super(MongoDBSessionTest, self).setUp() + def assertIn(self, first, second, msg=None): + self.assertTrue(first in second, msg) + + def assertNotIn(self, first, second, msg=None): + self.assertFalse(first in second, msg) + def test_first_save(self): session = SessionStore() session['test'] = True @@ -188,7 +194,7 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): activate_timezone(FixedOffset(60, 'UTC+1')) # create and save new session session = SessionStore() - session.set_expiry(600) # expire in 600 seconds + session.set_expiry(600) # expire in 600 seconds session['test_expire'] = True session.save() # reload session with key From d0d9c3ea26ba7f053969fd84518477a4e6544be9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 12:21:25 +0000 Subject: [PATCH 242/464] Test to ensure that pickled complex fields work with save() (#228) --- tests/document/instance.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index a75cc4d..b800d90 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1694,11 +1694,19 @@ class InstanceTest(unittest.TestCase): self.assertEqual(resurrected, pickle_doc) + # Test pickling changed data + pickle_doc.lists.append("3") + pickled_doc = pickle.dumps(pickle_doc) + resurrected = pickle.loads(pickled_doc) + + self.assertEqual(resurrected, pickle_doc) resurrected.string = "Two" resurrected.save() - pickle_doc = pickle_doc.reload() + pickle_doc = PickleTest.objects.first() self.assertEqual(resurrected, pickle_doc) + self.assertEqual(pickle_doc.string, "Two") + self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) def test_throw_invalid_document_error(self): From ac6e793bbe8e907b7f1469a18709e18d045306a2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 13:43:56 +0000 Subject: [PATCH 243/464] UUIDField now stores as a binary by default (#292) --- docs/changelog.rst | 1 + docs/upgrade.rst | 24 ++ mongoengine/fields.py | 12 +- tests/__init__.py | 4 +- tests/document/delta.py | 47 ++-- tests/fields/fields.py | 305 +------------------------ tests/migration/__init__.py | 1 + tests/migration/uuidfield_to_binary.py | 48 ++++ 8 files changed, 109 insertions(+), 333 deletions(-) create mode 100644 tests/migration/uuidfield_to_binary.py diff --git a/docs/changelog.rst b/docs/changelog.rst index bd3821f..9ea4ad5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- UUIDField now stores as a binary by default (#292) - Added Custom User Model for Django 1.5 (#285) - Cascading saves now default to off (#291) - ReferenceField now store ObjectId's by default rather than DBRef (#290) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 6138bb4..bcdc110 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -120,6 +120,30 @@ eg:: p._mark_as_dirty('friends') p.save() +UUIDField +--------- + +UUIDFields now default to storing binary values:: + + # Old code + class Animal(Document): + uuid = UUIDField() + + # New code + class Animal(Document): + uuid = UUIDField(binary=False) + +To migrate all the uuid's you need to touch each object and mark it as dirty +eg:: + + # Doc definition + class Animal(Document): + uuid = UUIDField() + + # Mark all ReferenceFields as dirty and save + for a in Animal.objects: + a._mark_as_dirty('uuid') + a.save() Cascading Saves --------------- diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 979699c..bea827c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1474,19 +1474,15 @@ class UUIDField(BaseField): """ _binary = None - def __init__(self, binary=None, **kwargs): + def __init__(self, binary=True, **kwargs): """ Store UUID data in the database - :param binary: (optional) boolean store as binary. + :param binary: if False store as a string. + .. versionchanged:: 0.8.0 .. versionchanged:: 0.6.19 """ - if binary is None: - binary = False - msg = ("UUIDFields will soon default to store as binary, please " - "configure binary=False if you wish to store as a string") - warnings.warn(msg, FutureWarning) self._binary = binary super(UUIDField, self).__init__(**kwargs) @@ -1504,6 +1500,8 @@ class UUIDField(BaseField): def to_mongo(self, value): if not self._binary: return unicode(value) + elif isinstance(value, basestring): + return uuid.UUID(value) return value def prepare_query_value(self, op, value): diff --git a/tests/__init__.py b/tests/__init__.py index 152a8ce..b24df5d 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,5 @@ from all_warnings import AllWarnings from document import * -from queryset import * \ No newline at end of file +from queryset import * +from fields import * +from migration import * diff --git a/tests/document/delta.py b/tests/document/delta.py index c6191d9..16ab609 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -129,14 +129,14 @@ class DeltaTest(unittest.TestCase): } self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) self.assertEqual(doc._delta(), - ({'embedded_field': embedded_delta}, {})) + ({'embedded_field': embedded_delta}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} self.assertEqual(doc._get_changed_fields(), - ['embedded_field.dict_field']) + ['embedded_field.dict_field']) self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) doc.save() @@ -145,7 +145,7 @@ class DeltaTest(unittest.TestCase): doc.embedded_field.list_field = [] self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) + ['embedded_field.list_field']) self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) doc.save() @@ -160,7 +160,7 @@ class DeltaTest(unittest.TestCase): doc.embedded_field.list_field = ['1', 2, embedded_2] self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) + ['embedded_field.list_field']) self.assertEqual(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { @@ -192,11 +192,11 @@ class DeltaTest(unittest.TestCase): doc.embedded_field.list_field[2].string_field = 'world' self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field.2.string_field']) + ['embedded_field.list_field.2.string_field']) self.assertEqual(doc.embedded_field._delta(), - ({'list_field.2.string_field': 'world'}, {})) + ({'list_field.2.string_field': 'world'}, {})) self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.string_field': 'world'}, {})) + ({'embedded_field.list_field.2.string_field': 'world'}, {})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field[2].string_field, @@ -206,7 +206,7 @@ class DeltaTest(unittest.TestCase): doc.embedded_field.list_field[2].string_field = 'hello world' doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) + ['embedded_field.list_field']) self.assertEqual(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_cls': 'Embedded', @@ -225,40 +225,40 @@ class DeltaTest(unittest.TestCase): doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'hello world') + 'hello world') # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}]}, {})) + ({'embedded_field.list_field.2.list_field': + [2, {'hello': 'world'}]}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}, 1]}, {})) + ({'embedded_field.list_field.2.list_field': + [2, {'hello': 'world'}, 1]}, {})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field[2].list_field, - [2, {'hello': 'world'}, 1]) + [2, {'hello': 'world'}, 1]) doc.embedded_field.list_field[2].list_field.sort(key=str) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field[2].list_field, - [1, 2, {'hello': 'world'}]) + [1, 2, {'hello': 'world'}]) del(doc.embedded_field.list_field[2].list_field[2]['hello']) self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) + ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) doc.save() doc = doc.reload(10) del(doc.embedded_field.list_field[2].list_field) self.assertEqual(doc._delta(), - ({}, {'embedded_field.list_field.2.list_field': 1})) + ({}, {'embedded_field.list_field.2.list_field': 1})) doc.save() doc = doc.reload(10) @@ -269,9 +269,9 @@ class DeltaTest(unittest.TestCase): doc.dict_field['Embedded'].string_field = 'Hello World' self.assertEqual(doc._get_changed_fields(), - ['dict_field.Embedded.string_field']) + ['dict_field.Embedded.string_field']) self.assertEqual(doc._delta(), - ({'dict_field.Embedded.string_field': 'Hello World'}, {})) + ({'dict_field.Embedded.string_field': 'Hello World'}, {})) def test_circular_reference_deltas(self): self.circular_reference_deltas(Document, Document) @@ -289,10 +289,11 @@ class DeltaTest(unittest.TestCase): name = StringField() owner = ReferenceField('Person') - person = Person(name="owner") - person.save() - organization = Organization(name="company") - organization.save() + Person.drop_collection() + Organization.drop_collection() + + person = Person(name="owner").save() + organization = Organization(name="company").save() person.owns.append(organization) organization.owner = person diff --git a/tests/fields/fields.py b/tests/fields/fields.py index ade44b8..7eae3f4 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -354,7 +354,6 @@ class FieldTest(unittest.TestCase): person.api_key = api_key self.assertRaises(ValidationError, person.validate) - def test_datetime_validation(self): """Ensure that invalid values cannot be assigned to datetime fields. """ @@ -1805,304 +1804,6 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() - def test_file_fields(self): - """Ensure that file fields can be written to and their data retrieved - """ - class PutFile(Document): - the_file = FileField() - - class StreamFile(Document): - the_file = FileField() - - class SetFile(Document): - the_file = FileField() - - text = b('Hello, World!') - more_text = b('Foo Bar') - content_type = 'text/plain' - - PutFile.drop_collection() - StreamFile.drop_collection() - SetFile.drop_collection() - - putfile = PutFile() - putfile.the_file.put(text, content_type=content_type) - putfile.save() - putfile.validate() - result = PutFile.objects.first() - self.assertTrue(putfile == result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.delete() # Remove file from GridFS - PutFile.objects.delete() - - # Ensure file-like objects are stored - putfile = PutFile() - putstring = StringIO() - putstring.write(text) - putstring.seek(0) - putfile.the_file.put(putstring, content_type=content_type) - putfile.save() - putfile.validate() - result = PutFile.objects.first() - self.assertTrue(putfile == result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.delete() - - streamfile = StreamFile() - streamfile.the_file.new_file(content_type=content_type) - streamfile.the_file.write(text) - streamfile.the_file.write(more_text) - streamfile.the_file.close() - streamfile.save() - streamfile.validate() - result = StreamFile.objects.first() - self.assertTrue(streamfile == result) - self.assertEqual(result.the_file.read(), text + more_text) - self.assertEqual(result.the_file.content_type, content_type) - result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) - result.the_file.delete() - - # Ensure deleted file returns None - self.assertTrue(result.the_file.read() == None) - - setfile = SetFile() - setfile.the_file = text - setfile.save() - setfile.validate() - result = SetFile.objects.first() - self.assertTrue(setfile == result) - self.assertEqual(result.the_file.read(), text) - - # Try replacing file with new one - result.the_file.replace(more_text) - result.save() - result.validate() - result = SetFile.objects.first() - self.assertTrue(setfile == result) - self.assertEqual(result.the_file.read(), more_text) - result.the_file.delete() - - PutFile.drop_collection() - StreamFile.drop_collection() - SetFile.drop_collection() - - # Make sure FileField is optional and not required - class DemoFile(Document): - the_file = FileField() - DemoFile.objects.create() - - - def test_file_field_no_default(self): - - class GridDocument(Document): - the_file = FileField() - - GridDocument.drop_collection() - - with tempfile.TemporaryFile() as f: - f.write(b("Hello World!")) - f.flush() - - # Test without default - doc_a = GridDocument() - doc_a.save() - - - doc_b = GridDocument.objects.with_id(doc_a.id) - doc_b.the_file.replace(f, filename='doc_b') - doc_b.save() - self.assertNotEqual(doc_b.the_file.grid_id, None) - - # Test it matches - doc_c = GridDocument.objects.with_id(doc_b.id) - self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) - - # Test with default - doc_d = GridDocument(the_file=b('')) - doc_d.save() - - doc_e = GridDocument.objects.with_id(doc_d.id) - self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) - - doc_e.the_file.replace(f, filename='doc_e') - doc_e.save() - - doc_f = GridDocument.objects.with_id(doc_e.id) - self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) - - db = GridDocument._get_db() - grid_fs = gridfs.GridFS(db) - self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) - - def test_file_uniqueness(self): - """Ensure that each instance of a FileField is unique - """ - class TestFile(Document): - name = StringField() - the_file = FileField() - - # First instance - test_file = TestFile() - test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!')) - test_file.save() - - # Second instance - test_file_dupe = TestFile() - data = test_file_dupe.the_file.read() # Should be None - - self.assertTrue(test_file.name != test_file_dupe.name) - self.assertTrue(test_file.the_file.read() != data) - - TestFile.drop_collection() - - def test_file_boolean(self): - """Ensure that a boolean test of a FileField indicates its presence - """ - class TestFile(Document): - the_file = FileField() - - test_file = TestFile() - self.assertFalse(bool(test_file.the_file)) - test_file.the_file = b('Hello, World!') - test_file.the_file.content_type = 'text/plain' - test_file.save() - self.assertTrue(bool(test_file.the_file)) - - TestFile.drop_collection() - - def test_file_cmp(self): - """Test comparing against other types""" - class TestFile(Document): - the_file = FileField() - - test_file = TestFile() - self.assertFalse(test_file.the_file in [{"test": 1}]) - - def test_image_field(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField() - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'rb')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - - w, h = t.image.size - self.assertEqual(w, 371) - self.assertEqual(h, 76) - - t.image.delete() - - def test_image_field_resize(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(size=(185, 37)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'rb')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - w, h = t.image.size - - self.assertEqual(w, 185) - self.assertEqual(h, 37) - - t.image.delete() - - def test_image_field_resize_force(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(size=(185, 37, True)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'rb')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.format, 'PNG') - w, h = t.image.size - - self.assertEqual(w, 185) - self.assertEqual(h, 37) - - t.image.delete() - - def test_image_field_thumbnail(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') - - class TestImage(Document): - image = ImageField(thumbnail_size=(92, 18)) - - TestImage.drop_collection() - - t = TestImage() - t.image.put(open(TEST_IMAGE_PATH, 'rb')) - t.save() - - t = TestImage.objects.first() - - self.assertEqual(t.image.thumbnail.format, 'PNG') - self.assertEqual(t.image.thumbnail.width, 92) - self.assertEqual(t.image.thumbnail.height, 18) - - t.image.delete() - - def test_file_multidb(self): - register_connection('test_files', 'test_files') - class TestFile(Document): - name = StringField() - the_file = FileField(db_alias="test_files", - collection_name="macumba") - - TestFile.drop_collection() - - # delete old filesystem - get_db("test_files").macumba.files.drop() - get_db("test_files").macumba.chunks.drop() - - # First instance - test_file = TestFile() - test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!'), - name="hello.txt") - test_file.save() - - data = get_db("test_files").macumba.files.find_one() - self.assertEqual(data.get('name'), 'hello.txt') - - test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), - b('Hello, World!')) - def test_geo_indexes(self): """Ensure that indexes are created automatically for GeoPointFields. """ @@ -2170,7 +1871,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] - self.assertEqual(ids, xrange(1, 11)) + self.assertEqual(ids, range(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) @@ -2219,10 +1920,10 @@ class FieldTest(unittest.TestCase): self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] - self.assertEqual(ids, xrange(1, 11)) + self.assertEqual(ids, range(1, 11)) counters = [i.counter for i in Person.objects] - self.assertEqual(counters, xrange(1, 11)) + self.assertEqual(counters, range(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py index f7ad674..bff50c3 100644 --- a/tests/migration/__init__.py +++ b/tests/migration/__init__.py @@ -1,6 +1,7 @@ from convert_to_new_inheritance_model import * from refrencefield_dbref_to_object_id import * from turn_off_inheritance import * +from uuidfield_to_binary import * if __name__ == '__main__': unittest.main() diff --git a/tests/migration/uuidfield_to_binary.py b/tests/migration/uuidfield_to_binary.py new file mode 100644 index 0000000..a535e91 --- /dev/null +++ b/tests/migration/uuidfield_to_binary.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +import unittest +import uuid + +from mongoengine import Document, connect +from mongoengine.connection import get_db +from mongoengine.fields import StringField, UUIDField, ListField + +__all__ = ('ConvertToBinaryUUID', ) + + +class ConvertToBinaryUUID(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def test_how_to_convert_to_binary_uuid_fields(self): + """Demonstrates migrating from 0.7 to 0.8 + """ + + # 1. Old definition - using dbrefs + class Person(Document): + name = StringField() + uuid = UUIDField(binary=False) + uuids = ListField(UUIDField(binary=False)) + + Person.drop_collection() + Person(name="Wilson Jr", uuid=uuid.uuid4(), + uuids=[uuid.uuid4(), uuid.uuid4()]).save() + + # 2. Start the migration by changing the schema + # Change UUIDFIeld as now binary defaults to True + class Person(Document): + name = StringField() + uuid = UUIDField() + uuids = ListField(UUIDField()) + + # 3. Loop all the objects and mark parent as changed + for p in Person.objects: + p._mark_as_changed('uuid') + p._mark_as_changed('uuids') + p.save() + + # 4. Confirmation of the fix! + wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] + self.assertTrue(isinstance(wilson['uuid'], uuid.UUID)) + self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']])) From 5e94637adc2d0fcb89b5569cba5ffec13d511147 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 25 Apr 2013 15:39:57 +0000 Subject: [PATCH 244/464] DecimalField now stores as float not string (#289) --- docs/apireference.rst | 60 ++++++++++++------------ docs/changelog.rst | 1 + docs/conf.py | 7 ++- docs/upgrade.rst | 29 ++++++++++++ mongoengine/fields.py | 54 ++++++++++++++++----- mongoengine/queryset/transform.py | 6 +-- tests/fields/fields.py | 45 ++++++++++++++++-- tests/migration/__init__.py | 1 + tests/migration/decimalfield_as_float.py | 50 ++++++++++++++++++++ tests/queryset/queryset.py | 4 +- 10 files changed, 204 insertions(+), 53 deletions(-) create mode 100644 tests/migration/decimalfield_as_float.py diff --git a/docs/apireference.rst b/docs/apireference.rst index 0040f45..3a15629 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -54,33 +54,33 @@ Querying Fields ====== -.. autoclass:: mongoengine.StringField -.. autoclass:: mongoengine.URLField -.. autoclass:: mongoengine.EmailField -.. autoclass:: mongoengine.IntField -.. autoclass:: mongoengine.LongField -.. autoclass:: mongoengine.FloatField -.. autoclass:: mongoengine.DecimalField -.. autoclass:: mongoengine.BooleanField -.. autoclass:: mongoengine.DateTimeField -.. autoclass:: mongoengine.ComplexDateTimeField -.. autoclass:: mongoengine.EmbeddedDocumentField -.. autoclass:: mongoengine.GenericEmbeddedDocumentField -.. autoclass:: mongoengine.DynamicField -.. autoclass:: mongoengine.ListField -.. autoclass:: mongoengine.SortedListField -.. autoclass:: mongoengine.DictField -.. autoclass:: mongoengine.MapField -.. autoclass:: mongoengine.ReferenceField -.. autoclass:: mongoengine.GenericReferenceField -.. autoclass:: mongoengine.BinaryField -.. autoclass:: mongoengine.FileField -.. autoclass:: mongoengine.ImageField -.. autoclass:: mongoengine.GeoPointField -.. autoclass:: mongoengine.SequenceField -.. autoclass:: mongoengine.ObjectIdField -.. autoclass:: mongoengine.UUIDField -.. autoclass:: mongoengine.GridFSError -.. autoclass:: mongoengine.GridFSProxy -.. autoclass:: mongoengine.ImageGridFsProxy -.. autoclass:: mongoengine.ImproperlyConfigured +.. autoclass:: mongoengine.fields.StringField +.. autoclass:: mongoengine.fields.URLField +.. autoclass:: mongoengine.fields.EmailField +.. autoclass:: mongoengine.fields.IntField +.. autoclass:: mongoengine.fields.LongField +.. autoclass:: mongoengine.fields.FloatField +.. autoclass:: mongoengine.fields.DecimalField +.. autoclass:: mongoengine.fields.BooleanField +.. autoclass:: mongoengine.fields.DateTimeField +.. autoclass:: mongoengine.fields.ComplexDateTimeField +.. autoclass:: mongoengine.fields.EmbeddedDocumentField +.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField +.. autoclass:: mongoengine.fields.DynamicField +.. autoclass:: mongoengine.fields.ListField +.. autoclass:: mongoengine.fields.SortedListField +.. autoclass:: mongoengine.fields.DictField +.. autoclass:: mongoengine.fields.MapField +.. autoclass:: mongoengine.fields.ReferenceField +.. autoclass:: mongoengine.fields.GenericReferenceField +.. autoclass:: mongoengine.fields.BinaryField +.. autoclass:: mongoengine.fields.FileField +.. autoclass:: mongoengine.fields.ImageField +.. autoclass:: mongoengine.fields.GeoPointField +.. autoclass:: mongoengine.fields.SequenceField +.. autoclass:: mongoengine.fields.ObjectIdField +.. autoclass:: mongoengine.fields.UUIDField +.. autoclass:: mongoengine.fields.GridFSError +.. autoclass:: mongoengine.fields.GridFSProxy +.. autoclass:: mongoengine.fields.ImageGridFsProxy +.. autoclass:: mongoengine.fields.ImproperlyConfigured diff --git a/docs/changelog.rst b/docs/changelog.rst index 9ea4ad5..d0167c5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- DecimalField now stores as float not string (#289) - UUIDField now stores as a binary by default (#292) - Added Custom User Model for Django 1.5 (#285) - Cascading saves now default to off (#291) diff --git a/docs/conf.py b/docs/conf.py index 3cfcef5..8bcb9ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -173,8 +173,8 @@ latex_paper_size = 'a4' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'MongoEngine.tex', u'MongoEngine Documentation', - u'Harry Marr', 'manual'), + ('index', 'MongoEngine.tex', 'MongoEngine Documentation', + 'Ross Lawley', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -193,3 +193,6 @@ latex_documents = [ # If false, no module index is generated. #latex_use_modindex = True + +autoclass_content = 'both' + diff --git a/docs/upgrade.rst b/docs/upgrade.rst index bcdc110..dddce91 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -145,6 +145,35 @@ eg:: a._mark_as_dirty('uuid') a.save() +DecimalField +------------ + +DecimalField now store floats - previous it was storing strings and that +made it impossible to do comparisons when querying correctly.:: + + # Old code + class Person(Document): + balance = DecimalField() + + # New code + class Person(Document): + balance = DecimalField(force_string=True) + +To migrate all the uuid's you need to touch each object and mark it as dirty +eg:: + + # Doc definition + class Person(Document): + balance = DecimalField() + + # Mark all ReferenceFields as dirty and save + for p in Person.objects: + p._mark_as_dirty('balance') + p.save() + +.. note:: DecimalField's have also been improved with the addition of precision + and rounding. See :class:`~mongoengine.DecimalField` for more information. + Cascading Saves --------------- To improve performance document saves will no longer automatically cascade. diff --git a/mongoengine/fields.py b/mongoengine/fields.py index bea827c..2e14933 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -260,30 +260,57 @@ class FloatField(BaseField): class DecimalField(BaseField): """A fixed-point decimal number field. + .. versionchanged:: 0.8 .. versionadded:: 0.3 """ - def __init__(self, min_value=None, max_value=None, **kwargs): - self.min_value, self.max_value = min_value, max_value + def __init__(self, min_value=None, max_value=None, force_string=False, + precision=2, rounding=decimal.ROUND_HALF_UP, **kwargs): + """ + :param min_value: Validation rule for the minimum acceptable value. + :param max_value: Validation rule for the maximum acceptable value. + :param force_string: Store as a string. + :param precision: Number of decimal places to store. + :param rounding: The rounding rule from the python decimal libary: + + - decimial.ROUND_CEILING (towards Infinity) + - decimial.ROUND_DOWN (towards zero) + - decimial.ROUND_FLOOR (towards -Infinity) + - decimial.ROUND_HALF_DOWN (to nearest with ties going towards zero) + - decimial.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) + - decimial.ROUND_HALF_UP (to nearest with ties going away from zero) + - decimial.ROUND_UP (away from zero) + - decimial.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) + + Defaults to: ``decimal.ROUND_HALF_UP`` + + """ + self.min_value = min_value + self.max_value = max_value + self.force_string = force_string + self.precision = decimal.Decimal(".%s" % ("0" * precision)) + self.rounding = rounding + super(DecimalField, self).__init__(**kwargs) def to_python(self, value): - original_value = value - if not isinstance(value, basestring): - value = unicode(value) - try: - value = decimal.Decimal(value) - except ValueError: - return original_value - return value + if value is None: + return value + + return decimal.Decimal(value).quantize(self.precision, + rounding=self.rounding) def to_mongo(self, value): - return unicode(value) + if value is None: + return value + if self.force_string: + return unicode(value) + return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): if not isinstance(value, basestring): - value = str(value) + value = unicode(value) try: value = decimal.Decimal(value) except Exception, exc: @@ -295,6 +322,9 @@ class DecimalField(BaseField): if self.max_value is not None and value > self.max_value: self.error('Decimal value is too large') + def prepare_query_value(self, op, value): + return self.to_mongo(value) + class BooleanField(BaseField): """A boolean field type. diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 71f12e3..3da2693 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -9,7 +9,7 @@ __all__ = ('query', 'update') COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', - 'all', 'size', 'exists', 'not') + 'all', 'size', 'exists', 'not') GEO_OPERATORS = ('within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere', 'max_distance') @@ -74,7 +74,7 @@ def query(_doc_cls=None, _field_operation=False, **query): if op in singular_ops: if isinstance(field, basestring): if (op in STRING_OPERATORS and - isinstance(value, basestring)): + isinstance(value, basestring)): StringField = _import_class('StringField') value = StringField.prepare_query_value(op, value) else: @@ -144,7 +144,7 @@ def query(_doc_cls=None, _field_operation=False, **query): merge_query[k].append(mongo_query[k]) del mongo_query[k] if isinstance(v, list): - value = [{k:val} for val in v] + value = [{k: val} for val in v] if '$and' in mongo_query.keys(): mongo_query['$and'].append(value) else: diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 7eae3f4..4fa6989 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -272,10 +272,8 @@ class FieldTest(unittest.TestCase): Person.drop_collection() - person = Person() - person.height = Decimal('1.89') - person.save() - person.reload() + Person(height=Decimal('1.89')).save() + person = Person.objects.first() self.assertEqual(person.height, Decimal('1.89')) person.height = '2.0' @@ -289,6 +287,45 @@ class FieldTest(unittest.TestCase): Person.drop_collection() + def test_decimal_comparison(self): + + class Person(Document): + money = DecimalField() + + Person.drop_collection() + + Person(money=6).save() + Person(money=8).save() + Person(money=10).save() + + self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) + self.assertEqual(2, Person.objects(money__gt=7).count()) + self.assertEqual(2, Person.objects(money__gt="7").count()) + + def test_decimal_storage(self): + class Person(Document): + btc = DecimalField(precision=4) + + Person.drop_collection() + Person(btc=10).save() + Person(btc=10.1).save() + Person(btc=10.11).save() + Person(btc="10.111").save() + Person(btc=Decimal("10.1111")).save() + Person(btc=Decimal("10.11111")).save() + + # How its stored + expected = [{'btc': 10.0}, {'btc': 10.1}, {'btc': 10.11}, + {'btc': 10.111}, {'btc': 10.1111}, {'btc': 10.1111}] + actual = list(Person.objects.exclude('id').as_pymongo()) + self.assertEqual(expected, actual) + + # How it comes out locally + expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), + Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] + actual = list(Person.objects().scalar('btc')) + self.assertEqual(expected, actual) + def test_boolean_validation(self): """Ensure that invalid values cannot be assigned to boolean fields. """ diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py index bff50c3..6fc83e0 100644 --- a/tests/migration/__init__.py +++ b/tests/migration/__init__.py @@ -1,4 +1,5 @@ from convert_to_new_inheritance_model import * +from decimalfield_as_float import * from refrencefield_dbref_to_object_id import * from turn_off_inheritance import * from uuidfield_to_binary import * diff --git a/tests/migration/decimalfield_as_float.py b/tests/migration/decimalfield_as_float.py new file mode 100644 index 0000000..3903c91 --- /dev/null +++ b/tests/migration/decimalfield_as_float.py @@ -0,0 +1,50 @@ + # -*- coding: utf-8 -*- +import unittest +import decimal +from decimal import Decimal + +from mongoengine import Document, connect +from mongoengine.connection import get_db +from mongoengine.fields import StringField, DecimalField, ListField + +__all__ = ('ConvertDecimalField', ) + + +class ConvertDecimalField(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def test_how_to_convert_decimal_fields(self): + """Demonstrates migrating from 0.7 to 0.8 + """ + + # 1. Old definition - using dbrefs + class Person(Document): + name = StringField() + money = DecimalField(force_string=True) + monies = ListField(DecimalField(force_string=True)) + + Person.drop_collection() + Person(name="Wilson Jr", money=Decimal("2.50"), + monies=[Decimal("2.10"), Decimal("5.00")]).save() + + # 2. Start the migration by changing the schema + # Change DecimalField - add precision and rounding settings + class Person(Document): + name = StringField() + money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP) + monies = ListField(DecimalField(precision=2, + rounding=decimal.ROUND_HALF_UP)) + + # 3. Loop all the objects and mark parent as changed + for p in Person.objects: + p._mark_as_changed('money') + p._mark_as_changed('monies') + p.save() + + # 4. Confirmation of the fix! + wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] + self.assertTrue(isinstance(wilson['money'], float)) + self.assertTrue(all([isinstance(m, float) for m in wilson['monies']])) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index c7c4c7c..5e403c4 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3262,9 +3262,9 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(isinstance(results[0], dict)) self.assertTrue(isinstance(results[1], dict)) self.assertEqual(results[0]['name'], 'Bob Dole') - self.assertEqual(results[0]['price'], '1.11') + self.assertEqual(results[0]['price'], 1.11) self.assertEqual(results[1]['name'], 'Barack Obama') - self.assertEqual(results[1]['price'], '2.22') + self.assertEqual(results[1]['price'], 2.22) # Test coerce_types users = User.objects.only('name', 'price').as_pymongo(coerce_types=True) From 13d8dfdb5fc44ce92ed3373111d4e2e74cf2e66b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 08:43:38 +0000 Subject: [PATCH 245/464] Save py2.6 from Decimal Float fun --- mongoengine/fields.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 2e14933..a5dbf5d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -297,8 +297,9 @@ class DecimalField(BaseField): if value is None: return value - return decimal.Decimal(value).quantize(self.precision, - rounding=self.rounding) + # Convert to string for python 2.6 before casting to Decimal + value = decimal.Decimal("%s" % value) + return value.quantize(self.precision, rounding=self.rounding) def to_mongo(self, value): if value is None: From 7765f272ac74f1b25e9e057d13b54408031d1594 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 08:46:46 +0000 Subject: [PATCH 246/464] Documentation api and reference cleanups --- docs/django.rst | 2 +- docs/guide/defining-documents.rst | 82 +++++++++++++++---------------- docs/guide/document-instances.rst | 4 +- docs/guide/gridfs.rst | 6 +-- docs/guide/querying.rst | 16 +++--- docs/tutorial.rst | 8 +-- docs/upgrade.rst | 11 ++++- mongoengine/document.py | 2 +- mongoengine/fields.py | 2 +- mongoengine/queryset/queryset.py | 8 +-- 10 files changed, 75 insertions(+), 66 deletions(-) diff --git a/docs/django.rst b/docs/django.rst index e3a1c6b..d60e55d 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -98,7 +98,7 @@ Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` sec Storage ======= -With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, +With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`, it is useful to have a Django file storage backend that wraps this. The new storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. Using it is very similar to using the default FileSystemStorage.:: diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 350ba67..d18606a 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -62,31 +62,31 @@ not provided. Default values may optionally be a callable, which will be called to retrieve the value (such as in the above example). The field types available are as follows: -* :class:`~mongoengine.BinaryField` -* :class:`~mongoengine.BooleanField` -* :class:`~mongoengine.ComplexDateTimeField` -* :class:`~mongoengine.DateTimeField` -* :class:`~mongoengine.DecimalField` -* :class:`~mongoengine.DictField` -* :class:`~mongoengine.DynamicField` -* :class:`~mongoengine.EmailField` -* :class:`~mongoengine.EmbeddedDocumentField` -* :class:`~mongoengine.FileField` -* :class:`~mongoengine.FloatField` -* :class:`~mongoengine.GenericEmbeddedDocumentField` -* :class:`~mongoengine.GenericReferenceField` -* :class:`~mongoengine.GeoPointField` -* :class:`~mongoengine.ImageField` -* :class:`~mongoengine.IntField` -* :class:`~mongoengine.ListField` -* :class:`~mongoengine.MapField` -* :class:`~mongoengine.ObjectIdField` -* :class:`~mongoengine.ReferenceField` -* :class:`~mongoengine.SequenceField` -* :class:`~mongoengine.SortedListField` -* :class:`~mongoengine.StringField` -* :class:`~mongoengine.URLField` -* :class:`~mongoengine.UUIDField` +* :class:`~mongoengine.fields.BinaryField` +* :class:`~mongoengine.fields.BooleanField` +* :class:`~mongoengine.fields.ComplexDateTimeField` +* :class:`~mongoengine.fields.DateTimeField` +* :class:`~mongoengine.fields.DecimalField` +* :class:`~mongoengine.fields.DictField` +* :class:`~mongoengine.fields.DynamicField` +* :class:`~mongoengine.fields.EmailField` +* :class:`~mongoengine.fields.EmbeddedDocumentField` +* :class:`~mongoengine.fields.FileField` +* :class:`~mongoengine.fields.FloatField` +* :class:`~mongoengine.fields.GenericEmbeddedDocumentField` +* :class:`~mongoengine.fields.GenericReferenceField` +* :class:`~mongoengine.fields.GeoPointField` +* :class:`~mongoengine.fields.ImageField` +* :class:`~mongoengine.fields.IntField` +* :class:`~mongoengine.fields.ListField` +* :class:`~mongoengine.fields.MapField` +* :class:`~mongoengine.fields.ObjectIdField` +* :class:`~mongoengine.fields.ReferenceField` +* :class:`~mongoengine.fields.SequenceField` +* :class:`~mongoengine.fields.SortedListField` +* :class:`~mongoengine.fields.StringField` +* :class:`~mongoengine.fields.URLField` +* :class:`~mongoengine.fields.UUIDField` Field arguments --------------- @@ -110,7 +110,7 @@ arguments can be set on all fields: The definion of default parameters follow `the general rules on Python `__, which means that some care should be taken when dealing with default mutable objects - (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: + (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: class ExampleFirst(Document): # Default an empty list @@ -172,8 +172,8 @@ arguments can be set on all fields: List fields ----------- MongoDB allows the storage of lists of items. To add a list of items to a -:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field -type. :class:`~mongoengine.ListField` takes another field object as its first +:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field +type. :class:`~mongoengine.fields.ListField` takes another field object as its first argument, which specifies which type elements may be stored within the list:: class Page(Document): @@ -191,7 +191,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than content = StringField() To embed the document within another document, use the -:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded +:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded document class as the first argument:: class Page(Document): @@ -206,7 +206,7 @@ Dictionary Fields Often, an embedded document may be used instead of a dictionary -- generally this is recommended as dictionaries don't support validation or custom field types. However, sometimes you will not know the structure of what you want to -store; in this situation a :class:`~mongoengine.DictField` is appropriate:: +store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: class SurveyResponse(Document): date = DateTimeField() @@ -224,7 +224,7 @@ other objects, so are the most flexible field type available. Reference fields ---------------- References may be stored to other documents in the database using the -:class:`~mongoengine.ReferenceField`. Pass in another document class as the +:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the first argument to the constructor, then simply assign document objects to the field:: @@ -245,9 +245,9 @@ field:: The :class:`User` object is automatically turned into a reference behind the scenes, and dereferenced when the :class:`Page` object is retrieved. -To add a :class:`~mongoengine.ReferenceField` that references the document +To add a :class:`~mongoengine.fields.ReferenceField` that references the document being defined, use the string ``'self'`` in place of the document class as the -argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a +argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a document that has not yet been defined, use the name of the undefined document as the constructor's argument:: @@ -325,7 +325,7 @@ Its value can take any of the following constants: :const:`mongoengine.PULL` Removes the reference to the object (using MongoDB's "pull" operation) from any object's fields of - :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). + :class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`). .. warning:: @@ -352,7 +352,7 @@ Its value can take any of the following constants: Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, -:class:`~mongoengine.GenericReferenceField`. This allows you to reference any +:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any kind of :class:`~mongoengine.Document`, and hence doesn't take a :class:`~mongoengine.Document` subclass as a constructor argument:: @@ -376,15 +376,15 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a .. note:: - Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less - efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if + Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less + efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if you will only be referencing one document type, prefer the standard - :class:`~mongoengine.ReferenceField`. + :class:`~mongoengine.fields.ReferenceField`. Uniqueness constraints ---------------------- MongoEngine allows you to specify that a field should be unique across a -collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's +collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's constructor. If you try to save a document that has the same value for a unique field as a document that is already in the database, a :class:`~mongoengine.OperationError` will be raised. You may also specify @@ -492,11 +492,11 @@ Geospatial indexes ------------------ Geospatial indexes will be automatically created for all -:class:`~mongoengine.GeoPointField`\ s +:class:`~mongoengine.fields.GeoPointField`\ s It is also possible to explicitly define geospatial indexes. This is useful if you need to define a geospatial index on a subfield of a -:class:`~mongoengine.DictField` or a custom field that contains a +:class:`~mongoengine.fields.DictField` or a custom field that contains a point. To create a geospatial index you must prefix the field with the ***** sign. :: diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index e8e7d63..619f3e8 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -68,8 +68,8 @@ document values for example:: Cascading Saves --------------- -If your document contains :class:`~mongoengine.ReferenceField` or -:class:`~mongoengine.GenericReferenceField` objects, then by default the +If your document contains :class:`~mongoengine.fields.ReferenceField` or +:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the :meth:`~mongoengine.Document.save` method will automatically save any changes to those objects as well. If this is not desired passing :attr:`cascade` as False to the save method turns this feature off. diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 1125947..d81bb92 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -7,7 +7,7 @@ GridFS Writing ------- -GridFS support comes in the form of the :class:`~mongoengine.FileField` field +GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field object. This field acts as a file-like object and provides a couple of different ways of inserting and retrieving data. Arbitrary metadata such as content type can also be stored alongside the files. In the following example, @@ -27,7 +27,7 @@ a document is created to store details about animals, including a photo:: Retrieval --------- -So using the :class:`~mongoengine.FileField` is just like using any other +So using the :class:`~mongoengine.fields.FileField` is just like using any other field. The file can also be retrieved just as easily:: marmot = Animal.objects(genus='Marmota').first() @@ -37,7 +37,7 @@ field. The file can also be retrieved just as easily:: Streaming --------- -Streaming data into a :class:`~mongoengine.FileField` is achieved in a +Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a slightly different manner. First, a new file must be created by calling the :func:`new_file` method. Data can then be written using :func:`write`:: diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 60702ec..3a25c28 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -79,7 +79,7 @@ expressions: * ``match`` -- performs an $elemMatch so you can match an entire document within an array There are a few special operators for performing geographical queries, that -may used with :class:`~mongoengine.GeoPointField`\ s: +may used with :class:`~mongoengine.fields.GeoPointField`\ s: * ``within_distance`` -- provide a list containing a point and a maximum distance (e.g. [(41.342, -87.653), 5]) @@ -100,7 +100,7 @@ Querying lists -------------- On most fields, this syntax will look up documents where the field specified matches the given value exactly, but when the field refers to a -:class:`~mongoengine.ListField`, a single item may be provided, in which case +:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case lists that contain that item will be matched:: class Page(Document): @@ -319,7 +319,7 @@ Retrieving a subset of fields Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, and for efficiency only these should be retrieved from the database. This issue is especially important for MongoDB, as fields may often be extremely large -(e.g. a :class:`~mongoengine.ListField` of +(e.g. a :class:`~mongoengine.fields.ListField` of :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a blog post. To select only a subset of fields, use :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to @@ -351,14 +351,14 @@ If you later need the missing fields, just call Getting related data -------------------- -When iterating the results of :class:`~mongoengine.ListField` or -:class:`~mongoengine.DictField` we automatically dereference any +When iterating the results of :class:`~mongoengine.fields.ListField` or +:class:`~mongoengine.fields.DictField` we automatically dereference any :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the number the queries to mongo. There are times when that efficiency is not enough, documents that have -:class:`~mongoengine.ReferenceField` objects or -:class:`~mongoengine.GenericReferenceField` objects at the top level are +:class:`~mongoengine.fields.ReferenceField` objects or +:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are expensive as the number of queries to MongoDB can quickly rise. To limit the number of queries use @@ -541,7 +541,7 @@ Javascript code. When accessing a field on a collection object, use square-bracket notation, and prefix the MongoEngine field name with a tilde. The field name that follows the tilde will be translated to the name used in the database. Note that when referring to fields on embedded documents, -the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, +the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot, should be used before the name of the field on the embedded document. The following example shows how the substitutions are made:: diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 423df9b..c2f481b 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -115,7 +115,7 @@ by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: link_url = StringField() We are storing a reference to the author of the posts using a -:class:`~mongoengine.ReferenceField` object. These are similar to foreign key +:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key fields in traditional ORMs, and are automatically translated into references when they are saved, and dereferenced when they are loaded. @@ -137,7 +137,7 @@ size of our database. So let's take a look that the code our modified author = ReferenceField(User) tags = ListField(StringField(max_length=30)) -The :class:`~mongoengine.ListField` object that is used to define a Post's tags +The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags takes a field object as its first argument --- this means that you can have lists of any type of field (including lists). @@ -174,7 +174,7 @@ We can then store a list of comment documents in our post document:: Handling deletions of references ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The :class:`~mongoengine.ReferenceField` object takes a keyword +The :class:`~mongoengine.fields.ReferenceField` object takes a keyword `reverse_delete_rule` for handling deletion rules if the reference is deleted. To delete all the posts if a user is deleted set the rule:: @@ -184,7 +184,7 @@ To delete all the posts if a user is deleted set the rule:: tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) -See :class:`~mongoengine.ReferenceField` for more information. +See :class:`~mongoengine.fields.ReferenceField` for more information. .. note:: MapFields and DictFields currently don't support automatic handling of diff --git a/docs/upgrade.rst b/docs/upgrade.rst index dddce91..0ae65f3 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -120,6 +120,9 @@ eg:: p._mark_as_dirty('friends') p.save() +`An example test migration is available on github +`_. + UUIDField --------- @@ -145,6 +148,9 @@ eg:: a._mark_as_dirty('uuid') a.save() +`An example test migration is available on github +`_. + DecimalField ------------ @@ -172,7 +178,10 @@ eg:: p.save() .. note:: DecimalField's have also been improved with the addition of precision - and rounding. See :class:`~mongoengine.DecimalField` for more information. + and rounding. See :class:`~mongoengine.fields.DecimalField` for more information. + +`An example test migration is available on github +`_. Cascading Saves --------------- diff --git a/mongoengine/document.py b/mongoengine/document.py index d0cafa3..c4542a2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -559,7 +559,7 @@ class DynamicDocument(Document): way as an ordinary document but has expando style properties. Any data passed or set against the :class:`~mongoengine.DynamicDocument` that is not a field is automatically converted into a - :class:`~mongoengine.DynamicField` and data can be attributed to that + :class:`~mongoengine.fields.DynamicField` and data can be attributed to that field. .. note:: diff --git a/mongoengine/fields.py b/mongoengine/fields.py index a5dbf5d..cf2c802 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -782,7 +782,7 @@ class ReferenceField(BaseField): * NULLIFY - Updates the reference to null. * CASCADE - Deletes the documents associated with the reference. * DENY - Prevent the deletion of the reference object. - * PULL - Pull the reference from a :class:`~mongoengine.ListField` + * PULL - Pull the reference from a :class:`~mongoengine.fields.ListField` of references Alternative syntax for registering delete rules (useful when implementing diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index dcfb240..769cf68 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1049,7 +1049,7 @@ class QuerySet(object): """) for result in self.map_reduce(map_func, reduce_func, - finalize_f=finalize_func, output='inline'): + finalize_f=finalize_func, output='inline'): return result.value else: return 0 @@ -1062,11 +1062,11 @@ class QuerySet(object): .. note:: Can only do direct simple mappings and cannot map across - :class:`~mongoengine.ReferenceField` or - :class:`~mongoengine.GenericReferenceField` for more complex + :class:`~mongoengine.fields.ReferenceField` or + :class:`~mongoengine.fields.GenericReferenceField` for more complex counting a manual map reduce call would is required. - If the field is a :class:`~mongoengine.ListField`, the items within + If the field is a :class:`~mongoengine.fields.ListField`, the items within each list will be counted individually. :param field: the field to use From 2447349383ca86dd57ff83403ce6bf9aebef90f6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 09:59:43 +0000 Subject: [PATCH 247/464] Added a note about distinct being a command --- mongoengine/queryset/queryset.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 769cf68..5c7c7c8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -608,6 +608,9 @@ class QuerySet(object): :param field: the field to select distinct values from + .. note:: This is a command and won't take ordering or limit into + account. + .. versionadded:: 0.4 .. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.6 - Improved db_field refrence handling From 36993097b4668e20e809a9bbb9a575b45b004939 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 11:38:45 +0000 Subject: [PATCH 248/464] Document serialization uses field order to ensure a strict order is set (#296) --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 4 ++++ docs/guide/document-instances.rst | 14 ++++++++----- docs/upgrade.rst | 19 ++++++++++++++--- mongoengine/base/document.py | 34 ++++++++++++++++++++++--------- tests/document/dynamic.py | 3 ++- tests/document/inheritance.py | 27 ++++++++++++++++++++---- tests/document/instance.py | 15 ++++++++++++++ 8 files changed, 94 insertions(+), 23 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d0167c5..f786c1d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Document serialization uses field order to ensure a strict order is set (#296) - DecimalField now stores as float not string (#289) - UUIDField now stores as a binary by default (#292) - Added Custom User Model for Django 1.5 (#285) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index d18606a..36e0efe 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -24,6 +24,9 @@ objects** as class attributes to the document class:: title = StringField(max_length=200, required=True) date_modified = DateTimeField(default=datetime.datetime.now) +As BSON (the binary format for storing data in mongodb) is order dependent, +documents are serialized based on their field order. + Dynamic document schemas ======================== One of the benefits of MongoDb is dynamic schemas for a collection, whilst data @@ -51,6 +54,7 @@ be saved :: There is one caveat on Dynamic Documents: fields cannot start with `_` +Dynamic fields are stored in alphabetical order *after* any declared fields. Fields ====== diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index 619f3e8..f9a6610 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -30,11 +30,14 @@ already exist, then any changes will be updated atomically. For example:: .. note:: - Changes to documents are tracked and on the whole perform `set` operations. + Changes to documents are tracked and on the whole perform ``set`` operations. - * ``list_field.pop(0)`` - *sets* the resulting list + * ``list_field.push(0)`` - *sets* the resulting list * ``del(list_field)`` - *unsets* whole list + With lists its preferable to use ``Doc.update(push__list_field=0)`` as + this stops the whole list being updated - stopping any race conditions. + .. seealso:: :ref:`guide-atomic-updates` @@ -70,9 +73,10 @@ Cascading Saves --------------- If your document contains :class:`~mongoengine.fields.ReferenceField` or :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the -:meth:`~mongoengine.Document.save` method will automatically save any changes to -those objects as well. If this is not desired passing :attr:`cascade` as False -to the save method turns this feature off. +:meth:`~mongoengine.Document.save` method will not save any changes to +those objects. If you want all references to also be saved also, noting each +save is a separate query, then passing :attr:`cascade` as True +to the save method will cascade any saves. Deleting documents ------------------ diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 0ae65f3..bb5705c 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -120,7 +120,7 @@ eg:: p._mark_as_dirty('friends') p.save() -`An example test migration is available on github +`An example test migration for ReferenceFields is available on github `_. UUIDField @@ -148,7 +148,7 @@ eg:: a._mark_as_dirty('uuid') a.save() -`An example test migration is available on github +`An example test migration for UUIDFields is available on github `_. DecimalField @@ -180,7 +180,7 @@ eg:: .. note:: DecimalField's have also been improved with the addition of precision and rounding. See :class:`~mongoengine.fields.DecimalField` for more information. -`An example test migration is available on github +`An example test migration for DecimalFields is available on github `_. Cascading Saves @@ -196,6 +196,19 @@ you will have to explicitly tell it to cascade on save:: # Or on save: my_document.save(cascade=True) +Storage +------- + +Document and Embedded Documents are now serialized based on declared field order. +Previously, the data was passed to mongodb as a dictionary and which meant that +order wasn't guaranteed - so things like ``$addToSet`` operations on +:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected +ways. + +If this impacts you, you may want to rewrite the objects using the +``doc.mark_as_dirty('field')`` pattern described above. If you are using a +compound primary key then you will need to ensure the order is fixed and match +your EmbeddedDocument to that order. Querysets ========= diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 7ec672f..53686b2 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -6,6 +6,7 @@ from functools import partial import pymongo from bson import json_util from bson.dbref import DBRef +from bson.son import SON from mongoengine import signals from mongoengine.common import _import_class @@ -228,11 +229,16 @@ class BaseDocument(object): pass def to_mongo(self): - """Return data dictionary ready for use with MongoDB. + """Return as SON data ready for use with MongoDB. """ - data = {} - for field_name, field in self._fields.iteritems(): + data = SON() + data["_id"] = None + data['_cls'] = self._class_name + + for field_name in self: value = self._data.get(field_name, None) + field = self._fields.get(field_name) + if value is not None: value = field.to_mongo(value) @@ -244,19 +250,27 @@ class BaseDocument(object): if value is not None: data[field.db_field] = value - # Only add _cls if allow_inheritance is True - if (hasattr(self, '_meta') and - self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): - data['_cls'] = self._class_name + # If "_id" has not been set, then try and set it + if data["_id"] is None: + data["_id"] = self._data.get("id", None) - if '_id' in data and data['_id'] is None: - del data['_id'] + if data['_id'] is None: + data.pop('_id') + + # Only add _cls if allow_inheritance is True + if (not hasattr(self, '_meta') or + not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): + data.pop('_cls') if not self._dynamic: return data - for name, field in self._dynamic_fields.items(): + # Sort dynamic fields by key + dynamic_fields = sorted(self._dynamic_fields.iteritems(), + key=operator.itemgetter(0)) + for name, field in dynamic_fields: data[name] = field.to_mongo(self._data.get(name, None)) + return data def validate(self, clean=True): diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index 5881cd0..6263e68 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -31,8 +31,9 @@ class DynamicTest(unittest.TestCase): self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", "age": 34}) - + self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) p.save() + self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) self.assertEqual(self.Person.objects.first().age, 34) diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index 3b550f1..f011631 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -143,7 +143,7 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(Animal._superclasses, ()) self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', - 'Animal.Fish.Pike')) + 'Animal.Fish.Pike')) self.assertEqual(Fish._superclasses, ('Animal', )) self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) @@ -168,6 +168,26 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) + def test_inheritance_to_mongo_keys(self): + """Ensure that document may inherit fields from a superclass document. + """ + class Person(Document): + name = StringField() + age = IntField() + + meta = {'allow_inheritance': True} + + class Employee(Person): + salary = IntField() + + self.assertEqual(['age', 'id', 'name', 'salary'], + sorted(Employee._fields.keys())) + self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), + ['_cls', 'name', 'age']) + self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), + ['_cls', 'name', 'age', 'salary']) + self.assertEqual(Employee._get_collection_name(), + Person._get_collection_name()) def test_polymorphic_queries(self): """Ensure that the correct subclasses are returned from a query @@ -197,7 +217,6 @@ class InheritanceTest(unittest.TestCase): classes = [obj.__class__ for obj in Human.objects] self.assertEqual(classes, [Human]) - def test_allow_inheritance(self): """Ensure that inheritance may be disabled on simple classes and that _cls and _subclasses will not be used. @@ -213,8 +232,8 @@ class InheritanceTest(unittest.TestCase): self.assertRaises(ValueError, create_dog_class) # Check that _cls etc aren't present on simple documents - dog = Animal(name='dog') - dog.save() + dog = Animal(name='dog').save() + self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) collection = self.db[Animal._get_collection_name()] obj = collection.find_one() diff --git a/tests/document/instance.py b/tests/document/instance.py index b800d90..06744ab 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -428,6 +428,21 @@ class InstanceTest(unittest.TestCase): self.assertFalse('age' in person) self.assertFalse('nationality' in person) + def test_embedded_document_to_mongo(self): + class Person(EmbeddedDocument): + name = StringField() + age = IntField() + + meta = {"allow_inheritance": True} + + class Employee(Person): + salary = IntField() + + self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), + ['_cls', 'name', 'age']) + self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), + ['_cls', 'name', 'age', 'salary']) + def test_embedded_document(self): """Ensure that embedded documents are set up correctly. """ From 5e65d278324c86bc615747108658957b4e9dff03 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 11:46:12 +0000 Subject: [PATCH 249/464] PEP8 x == True should be x is True --- mongoengine/base/fields.py | 2 +- mongoengine/document.py | 2 +- mongoengine/queryset/queryset.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 6ebba36..3929a3a 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -295,7 +295,7 @@ class ComplexBaseField(BaseField): meta = getattr(v, '_meta', {}) allow_inheritance = ( meta.get('allow_inheritance', ALLOW_INHERITANCE) - == True) + is True) if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: diff --git a/mongoengine/document.py b/mongoengine/document.py index c4542a2..bd6ce19 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -548,7 +548,7 @@ class Document(BaseDocument): # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls if (index_cls and not cls_indexed and - cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): + cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): collection.ensure_index('_cls', background=background, **index_opts) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 5c7c7c8..65d6553 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -66,7 +66,7 @@ class QuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used - if document._meta.get('allow_inheritance') == True: + if document._meta.get('allow_inheritance') is True: self._initial_query = {"_cls": {"$in": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None From 6e2d2f33deeaee2b69685a9c2389cacdfefefa38 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 26 Apr 2013 14:33:40 +0000 Subject: [PATCH 250/464] Updated benchmarks for #27 --- benchmark.py | 143 ++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 113 insertions(+), 30 deletions(-) diff --git a/benchmark.py b/benchmark.py index 0197e1d..16b2fd4 100644 --- a/benchmark.py +++ b/benchmark.py @@ -86,17 +86,43 @@ def main(): ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, force=True 8.36906409264 + 0.8.X + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - Pymongo + 3.69964408875 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - Pymongo write_concern={"w": 0} + 3.5526599884 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - MongoEngine + 7.00959801674 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries without continual assign - MongoEngine + 5.60943293571 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True + 6.715102911 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True + 5.50644683838 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False + 4.69851183891 + ---------------------------------------------------------------------------------------------------- + Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False + 4.68946313858 + ---------------------------------------------------------------------------------------------------- """ setup = """ -from pymongo import Connection -connection = Connection() +from pymongo import MongoClient +connection = MongoClient() connection.drop_database('timeit_test') """ stmt = """ -from pymongo import Connection -connection = Connection() +from pymongo import MongoClient +connection = MongoClient() db = connection.timeit_test noddy = db.noddy @@ -106,7 +132,7 @@ for i in xrange(10000): for j in range(20): example['fields']["key"+str(j)] = "value "+str(j) - noddy.insert(example) + noddy.save(example) myNoddys = noddy.find() [n for n in myNoddys] # iterate @@ -117,9 +143,32 @@ myNoddys = noddy.find() t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) + stmt = """ +from pymongo import MongoClient +connection = MongoClient() + +db = connection.timeit_test +noddy = db.noddy + +for i in xrange(10000): + example = {'fields': {}} + for j in range(20): + example['fields']["key"+str(j)] = "value "+str(j) + + noddy.save(example, write_concern={"w": 0}) + +myNoddys = noddy.find() +[n for n in myNoddys] # iterate +""" + + print "-" * 100 + print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""" + t = timeit.Timer(stmt=stmt, setup=setup) + print t.timeit(1) + setup = """ -from pymongo import Connection -connection = Connection() +from pymongo import MongoClient +connection = MongoClient() connection.drop_database('timeit_test') connection.disconnect() @@ -149,33 +198,18 @@ myNoddys = Noddy.objects() stmt = """ for i in xrange(10000): noddy = Noddy() + fields = {} for j in range(20): - noddy.fields["key"+str(j)] = "value "+str(j) - noddy.save(safe=False, validate=False) + fields["key"+str(j)] = "value "+str(j) + noddy.fields = fields + noddy.save() myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" - t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) - - - stmt = """ -for i in xrange(10000): - noddy = Noddy() - for j in range(20): - noddy.fields["key"+str(j)] = "value "+str(j) - noddy.save(safe=False, validate=False, cascade=False) - -myNoddys = Noddy.objects() -[n for n in myNoddys] # iterate -""" - - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" + print """Creating 10000 dictionaries without continual assign - MongoEngine""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) @@ -184,16 +218,65 @@ for i in xrange(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) - noddy.save(force_insert=True, safe=False, validate=False, cascade=False) + noddy.save(write_concern={"w": 0}, cascade=True) myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, force=True""" + print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) + stmt = """ +for i in xrange(10000): + noddy = Noddy() + for j in range(20): + noddy.fields["key"+str(j)] = "value "+str(j) + noddy.save(write_concern={"w": 0}, validate=False, cascade=True) + +myNoddys = Noddy.objects() +[n for n in myNoddys] # iterate +""" + + print "-" * 100 + print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""" + t = timeit.Timer(stmt=stmt, setup=setup) + print t.timeit(1) + + stmt = """ +for i in xrange(10000): + noddy = Noddy() + for j in range(20): + noddy.fields["key"+str(j)] = "value "+str(j) + noddy.save(validate=False, write_concern={"w": 0}) + +myNoddys = Noddy.objects() +[n for n in myNoddys] # iterate +""" + + print "-" * 100 + print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""" + t = timeit.Timer(stmt=stmt, setup=setup) + print t.timeit(1) + + stmt = """ +for i in xrange(10000): + noddy = Noddy() + for j in range(20): + noddy.fields["key"+str(j)] = "value "+str(j) + noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) + +myNoddys = Noddy.objects() +[n for n in myNoddys] # iterate +""" + + print "-" * 100 + print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""" + t = timeit.Timer(stmt=stmt, setup=setup) + print t.timeit(1) + + if __name__ == "__main__": - main() + main() \ No newline at end of file From b0c1ec04b5f39be0f08d2765e1070c7ed0652d60 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Apr 2013 07:38:31 +0000 Subject: [PATCH 251/464] Improvements to indexing documentation (#130) --- docs/guide/defining-documents.rst | 20 ++++++++++++++++++++ mongoengine/fields.py | 6 +++--- tests/document/indexes.py | 11 +++++------ 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 36e0efe..c404101 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -479,6 +479,10 @@ If a dictionary is passed then the following options are available: :attr:`unique` (Default: False) Whether the index should be unique. +:attr:`expireAfterSeconds` (Optional) + Allows you to automatically expire data from a collection by setting the + time in seconds to expire the a field. + .. note:: Inheritance adds extra fields indices see: :ref:`document-inheritance`. @@ -512,6 +516,22 @@ point. To create a geospatial index you must prefix the field with the ], } +Time To Live indexes +-------------------- + +A special index type that allows you to automatically expire data from a +collection after a given period. See the official +`ttl `_ +documentation for more information. A common usecase might be session data:: + + class Session(Document): + created = DateTimeField(default=datetime.now) + meta = { + 'indexes': [ + {'fields': ['created'], 'expireAfterSeconds': 3600} + ] + } + Ordering ======== A default ordering can be specified for your diff --git a/mongoengine/fields.py b/mongoengine/fields.py index cf2c802..bb2539c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -8,6 +8,7 @@ import uuid import warnings from operator import itemgetter +import pymongo import gridfs from bson import Binary, DBRef, SON, ObjectId @@ -37,7 +38,6 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField', 'SequenceField', 'UUIDField'] - RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -1392,7 +1392,7 @@ class GeoPointField(BaseField): .. versionadded:: 0.4 """ - _geo_index = True + _geo_index = pymongo.GEO2D def validate(self, value): """Make sure that a geo-value is of type (x, y) @@ -1404,7 +1404,7 @@ class GeoPointField(BaseField): if not len(value) == 2: self.error('Value must be a two-dimensional point') if (not isinstance(value[0], (float, int)) and - not isinstance(value[1], (float, int))): + not isinstance(value[1], (float, int))): self.error('Both values in point must be float or int') diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 61e3c0e..99aeca6 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -217,7 +217,7 @@ class IndexesTest(unittest.TestCase): } self.assertEqual([{'fields': [('location.point', '2d')]}], - Place._meta['index_specs']) + Place._meta['index_specs']) Place.ensure_indexes() info = Place._get_collection().index_information() @@ -231,8 +231,7 @@ class IndexesTest(unittest.TestCase): location = DictField() class Place(Document): - current = DictField( - field=EmbeddedDocumentField('EmbeddedLocation')) + current = DictField(field=EmbeddedDocumentField('EmbeddedLocation')) meta = { 'allow_inheritance': True, 'indexes': [ @@ -241,7 +240,7 @@ class IndexesTest(unittest.TestCase): } self.assertEqual([{'fields': [('current.location.point', '2d')]}], - Place._meta['index_specs']) + Place._meta['index_specs']) Place.ensure_indexes() info = Place._get_collection().index_information() @@ -264,7 +263,7 @@ class IndexesTest(unittest.TestCase): self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, 'sparse': True}], - BlogPost._meta['index_specs']) + BlogPost._meta['index_specs']) BlogPost.drop_collection() @@ -633,7 +632,7 @@ class IndexesTest(unittest.TestCase): list(Log.objects) info = Log.objects._collection.index_information() self.assertEqual(3600, - info['created_1']['expireAfterSeconds']) + info['created_1']['expireAfterSeconds']) def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by From 5d7444c115c043ed0a262f03193fd2f99dd55f1d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Apr 2013 09:38:21 +0000 Subject: [PATCH 252/464] Ensure as_pymongo() and to_json honour only() and exclude() (#293) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 15 +++++++++++---- tests/queryset/queryset.py | 22 ++++++++++++++++++++++ 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f786c1d..699c5a7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Ensure as_pymongo() and to_json honour only() and exclude() (#293) - Document serialization uses field order to ensure a strict order is set (#296) - DecimalField now stores as float not string (#289) - UUIDField now stores as a binary by default (#292) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 65d6553..5ae889c 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -822,8 +822,7 @@ class QuerySet(object): def to_json(self): """Converts a queryset to JSON""" - queryset = self.clone() - return json_util.dumps(queryset._collection_obj.find(queryset._query)) + return json_util.dumps(self.as_pymongo()) def from_json(self, json_data): """Converts json data to unsaved objects""" @@ -1095,7 +1094,7 @@ class QuerySet(object): raise StopIteration if self._scalar: return self._get_scalar(self._document._from_son( - self._cursor.next())) + self._cursor.next())) if self._as_pymongo: return self._get_as_pymongo(self._cursor.next()) @@ -1370,7 +1369,15 @@ class QuerySet(object): new_data = {} for key, value in data.iteritems(): new_path = '%s.%s' % (path, key) if path else key - if all_fields or new_path in self.__as_pymongo_fields: + + if all_fields: + include_field = True + elif self._loaded_fields.value == QueryFieldList.ONLY: + include_field = new_path in self.__as_pymongo_fields + else: + include_field = new_path not in self.__as_pymongo_fields + + if include_field: new_data[key] = clean(value, path=new_path) data = new_data elif isinstance(data, list): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 5e403c4..5bf8183 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3276,6 +3276,28 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(results[1]['name'], 'Barack Obama') self.assertEqual(results[1]['price'], Decimal('2.22')) + def test_as_pymongo_json_limit_fields(self): + + class User(Document): + email = EmailField(unique=True, required=True) + password_hash = StringField(db_field='password_hash', required=True) + password_salt = StringField(db_field='password_salt', required=True) + + User.drop_collection() + User(email="ross@example.com", password_salt="SomeSalt", password_hash="SomeHash").save() + + serialized_user = User.objects.exclude('password_salt', 'password_hash').as_pymongo()[0] + self.assertEqual(set(['_id', 'email']), set(serialized_user.keys())) + + serialized_user = User.objects.exclude('id', 'password_salt', 'password_hash').to_json() + self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) + + serialized_user = User.objects.exclude('password_salt').only('email').as_pymongo()[0] + self.assertEqual(set(['email']), set(serialized_user.keys())) + + serialized_user = User.objects.exclude('password_salt').only('email').to_json() + self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) + def test_no_dereference(self): class Organization(Document): From 85b81fb12a3e6fd4a1129602c433ce381d45e925 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Apr 2013 10:36:11 +0000 Subject: [PATCH 253/464] If values cant be compared mark as changed (#287) --- docs/changelog.rst | 1 + mongoengine/base/fields.py | 17 ++++++++++------- tests/fields/fields.py | 21 +++++++++++++++++++++ 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 699c5a7..ffe94d1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- If values cant be compared mark as changed (#287) - Ensure as_pymongo() and to_json honour only() and exclude() (#293) - Document serialization uses field order to ensure a strict order is set (#296) - DecimalField now stores as float not string (#289) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 3929a3a..d9ed278 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -81,13 +81,16 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - changed = False - if (self.name not in instance._data or - instance._data[self.name] != value): - changed = True - instance._data[self.name] = value - if changed and instance._initialised: - instance._mark_as_changed(self.name) + if instance._initialised: + try: + if (self.name not in instance._data or + instance._data[self.name] != value): + instance._mark_as_changed(self.name) + except: + # Values cant be compared eg: naive and tz datetimes + # So mark it as changed + instance._mark_as_changed(self.name) + instance._data[self.name] = value def error(self, message="", errors=None, field_name=None): """Raises a ValidationError. diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 4fa6989..5474aa6 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -409,6 +409,27 @@ class FieldTest(unittest.TestCase): log.time = '1pm' self.assertRaises(ValidationError, log.validate) + def test_datetime_tz_aware_mark_as_changed(self): + from mongoengine import connection + + # Reset the connections + connection._connection_settings = {} + connection._connections = {} + connection._dbs = {} + + connect(db='mongoenginetest', tz_aware=True) + + class LogEntry(Document): + time = DateTimeField() + + LogEntry.drop_collection() + + LogEntry(time=datetime.datetime(2013, 1, 1, 0, 0, 0)).save() + + log = LogEntry.objects.first() + log.time = datetime.datetime(2013, 1, 1, 0, 0, 0) + self.assertEqual(['time'], log._changed_fields) + def test_datetime(self): """Tests showing pymongo datetime fields handling of microseconds. Microseconds are rounded to the nearest millisecond and pre UTC From 9c1cd81adb4d240b9783ce80cef275858b96d5ca Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Apr 2013 14:46:23 +0000 Subject: [PATCH 254/464] Add support for new geojson fields, indexes and queries (#299) --- docs/apireference.rst | 5 +- docs/changelog.rst | 1 + docs/conf.py | 6 +- docs/django.rst | 8 +- docs/guide/defining-documents.rst | 29 +++ docs/guide/querying.rst | 72 ++++- docs/index.rst | 4 +- mongoengine/base/document.py | 24 +- mongoengine/base/fields.py | 112 +++++++- mongoengine/common.py | 3 +- mongoengine/document.py | 1 - mongoengine/fields.py | 108 ++++++-- mongoengine/queryset/queryset.py | 7 +- mongoengine/queryset/transform.py | 98 +++++-- tests/document/indexes.py | 28 +- tests/fields/__init__.py | 3 +- tests/fields/fields.py | 39 --- tests/fields/geo.py | 274 ++++++++++++++++++++ tests/queryset/__init__.py | 4 +- tests/queryset/geo.py | 418 ++++++++++++++++++++++++++++++ tests/queryset/queryset.py | 161 ------------ 21 files changed, 1101 insertions(+), 304 deletions(-) create mode 100644 tests/fields/geo.py create mode 100644 tests/queryset/geo.py diff --git a/docs/apireference.rst b/docs/apireference.rst index 3a15629..37370e2 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -76,10 +76,13 @@ Fields .. autoclass:: mongoengine.fields.BinaryField .. autoclass:: mongoengine.fields.FileField .. autoclass:: mongoengine.fields.ImageField -.. autoclass:: mongoengine.fields.GeoPointField .. autoclass:: mongoengine.fields.SequenceField .. autoclass:: mongoengine.fields.ObjectIdField .. autoclass:: mongoengine.fields.UUIDField +.. autoclass:: mongoengine.fields.GeoPointField +.. autoclass:: mongoengine.fields.PointField +.. autoclass:: mongoengine.fields.LineStringField +.. autoclass:: mongoengine.fields.PolygonField .. autoclass:: mongoengine.fields.GridFSError .. autoclass:: mongoengine.fields.GridFSProxy .. autoclass:: mongoengine.fields.ImageGridFsProxy diff --git a/docs/changelog.rst b/docs/changelog.rst index ffe94d1..207f0dd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Add support for new geojson fields, indexes and queries (#299) - If values cant be compared mark as changed (#287) - Ensure as_pymongo() and to_json honour only() and exclude() (#293) - Document serialization uses field order to ensure a strict order is set (#296) diff --git a/docs/conf.py b/docs/conf.py index 8bcb9ec..40c1f43 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -132,7 +132,11 @@ html_theme_path = ['_themes'] html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +html_sidebars = { + 'index': ['globaltoc.html', 'searchbox.html'], + '**': ['localtoc.html', 'relations.html', 'searchbox.html'] +} + # Additional templates that should be rendered to pages, maps page names to # template names. diff --git a/docs/django.rst b/docs/django.rst index d60e55d..09c91e7 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -1,8 +1,8 @@ -============================= -Using MongoEngine with Django -============================= +============== +Django Support +============== -.. note:: Updated to support Django 1.4 +.. note:: Updated to support Django 1.5 Connecting ========== diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index c404101..2c744b7 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -499,6 +499,35 @@ in this case use 'dot' notation to identify the value to index eg: `rank.title` Geospatial indexes ------------------ + +The best geo index for mongodb is the new "2dsphere", which has an improved +spherical model and provides better performance and more options when querying. +The following fields will explicitly add a "2dsphere" index: + + - :class:`~mongoengine.fields.PointField` + - :class:`~mongoengine.fields.LineStringField` + - :class:`~mongoengine.fields.PolygonField` + +As "2dsphere" indexes can be part of a compound index, you may not want the +automatic index but would prefer a compound index. In this example we turn off +auto indexing and explicitly declare a compound index on ``location`` and ``datetime``:: + + class Log(Document): + location = PointField(auto_index=False) + datetime = DateTimeField() + + meta = { + 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] + } + + +Pre MongoDB 2.4 Geo +''''''''''''''''''' + +.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere + index is a big improvement over the previous 2D model - so upgrading is + advised. + Geospatial indexes will be automatically created for all :class:`~mongoengine.fields.GeoPointField`\ s diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 3a25c28..f1b6470 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -65,6 +65,9 @@ Available operators are as follows: * ``size`` -- the size of the array is * ``exists`` -- value for field exists +String queries +-------------- + The following operators are available as shortcuts to querying with regular expressions: @@ -78,8 +81,71 @@ expressions: * ``iendswith`` -- string field ends with value (case insensitive) * ``match`` -- performs an $elemMatch so you can match an entire document within an array -There are a few special operators for performing geographical queries, that -may used with :class:`~mongoengine.fields.GeoPointField`\ s: + +Geo queries +----------- + +There are a few special operators for performing geographical queries. The following +were added in 0.8 for: :class:`~mongoengine.fields.PointField`, +:class:`~mongoengine.fields.LineStringField` and +:class:`~mongoengine.fields.PolygonField`: + +* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use + it accepts either a geojson geometry or just the polygon coordinates eg:: + + loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) + loc.objects(point__geo_with={"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) + +* ``geo_within_box`` - simplified geo_within searching with a box eg:: + + loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)]) + loc.objects(point__geo_within_box=[, ]) + +* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg:: + + loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]]) + loc.objects(point__geo_within_polygon=[ [ , ] , + [ , ] , + [ , ] ]) + +* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg:: + + loc.objects(point__geo_within_center=[(-125.0, 35.0), 1]) + loc.objects(point__geo_within_center=[ [ , ] , ]) + +* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg:: + + loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1]) + loc.objects(point__geo_within_sphere=[ [ , ] , ]) + +* ``geo_intersects`` -- selects all locations that intersect with a geometry eg:: + + # Inferred from provided points lists: + loc.objects(poly__geo_intersects=[40, 6]) + loc.objects(poly__geo_intersects=[[40, 5], [40, 6]]) + loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]) + + # With geoJson style objects + loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]}) + loc.objects(poly__geo_intersects={"type": "LineString", + "coordinates": [[40, 5], [40, 6]]}) + loc.objects(poly__geo_intersects={"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}) + +* ``near`` -- Find all the locations near a given point:: + + loc.objects(point__near=[40, 5]) + loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) + + + You can also set the maximum distance in meters as well:: + + loc.objects(point__near=[40, 5], point__max_distance=1000) + + +The older 2D indexes are still supported with the +:class:`~mongoengine.fields.GeoPointField`: * ``within_distance`` -- provide a list containing a point and a maximum distance (e.g. [(41.342, -87.653), 5]) @@ -91,7 +157,9 @@ may used with :class:`~mongoengine.fields.GeoPointField`\ s: [(35.0, -125.0), (40.0, -100.0)]) * ``within_polygon`` -- filter documents to those within a given polygon (e.g. [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). + .. note:: Requires Mongo Server 2.0 + * ``max_distance`` -- can be added to your location queries to set a maximum distance. diff --git a/docs/index.rst b/docs/index.rst index 4aca82d..6358a31 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -56,14 +56,16 @@ See the :doc:`changelog` for a full list of changes to MongoEngine and putting updates live in production **;)** .. toctree:: + :maxdepth: 1 + :numbered: :hidden: tutorial guide/index apireference - django changelog upgrade + django Indices and tables ------------------ diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 53686b2..c2ccc48 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -662,7 +662,8 @@ class BaseDocument(object): if include_cls and direction is not pymongo.GEO2D: index_list.insert(0, ('_cls', 1)) - spec['fields'] = index_list + if index_list: + spec['fields'] = index_list if spec.get('sparse', False) and len(spec['fields']) > 1: raise ValueError( 'Sparse indexes can only have one field in them. ' @@ -704,13 +705,13 @@ class BaseDocument(object): # Add the new index to the list fields = [("%s%s" % (namespace, f), pymongo.ASCENDING) - for f in unique_fields] + for f in unique_fields] index = {'fields': fields, 'unique': True, 'sparse': sparse} unique_indexes.append(index) # Grab any embedded document field unique indexes if (field.__class__.__name__ == "EmbeddedDocumentField" and - field.document_type != cls): + field.document_type != cls): field_namespace = "%s." % field_name doc_cls = field.document_type unique_indexes += doc_cls._unique_with_indexes(field_namespace) @@ -718,26 +719,31 @@ class BaseDocument(object): return unique_indexes @classmethod - def _geo_indices(cls, inspected=None): + def _geo_indices(cls, inspected=None, parent_field=None): inspected = inspected or [] geo_indices = [] inspected.append(cls) - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") - GeoPointField = _import_class("GeoPointField") + geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField", + "PointField", "LineStringField", "PolygonField"] + + geo_field_types = tuple([_import_class(field) for field in geo_field_type_names]) for field in cls._fields.values(): - if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): + if not isinstance(field, geo_field_types): continue if hasattr(field, 'document_type'): field_cls = field.document_type if field_cls in inspected: continue if hasattr(field_cls, '_geo_indices'): - geo_indices += field_cls._geo_indices(inspected) + geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field) elif field._geo_index: + field_name = field.db_field + if parent_field: + field_name = "%s.%s" % (parent_field, field_name) geo_indices.append({'fields': - [(field.db_field, pymongo.GEO2D)]}) + [(field_name, field._geo_index)]}) return geo_indices @classmethod diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index d9ed278..fa0b134 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -2,7 +2,8 @@ import operator import warnings import weakref -from bson import DBRef, ObjectId +from bson import DBRef, ObjectId, SON +import pymongo from mongoengine.common import _import_class from mongoengine.errors import ValidationError @@ -10,7 +11,7 @@ from mongoengine.errors import ValidationError from mongoengine.base.common import ALLOW_INHERITANCE from mongoengine.base.datastructures import BaseDict, BaseList -__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField") +__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") class BaseField(object): @@ -186,7 +187,7 @@ class ComplexBaseField(BaseField): # Convert lists / values so we can watch for any changes on them if (isinstance(value, (list, tuple)) and - not isinstance(value, BaseList)): + not isinstance(value, BaseList)): value = BaseList(value, instance, self.name) instance._data[self.name] = value elif isinstance(value, dict) and not isinstance(value, BaseDict): @@ -194,8 +195,8 @@ class ComplexBaseField(BaseField): instance._data[self.name] = value if (self._auto_dereference and instance._initialised and - isinstance(value, (BaseList, BaseDict)) - and not value._dereferenced): + isinstance(value, (BaseList, BaseDict)) + and not value._dereferenced): value = self._dereference( value, max_depth=1, instance=instance, name=self.name ) @@ -231,7 +232,7 @@ class ComplexBaseField(BaseField): if self.field: value_dict = dict([(key, self.field.to_python(item)) - for key, item in value.items()]) + for key, item in value.items()]) else: value_dict = {} for k, v in value.items(): @@ -282,7 +283,7 @@ class ComplexBaseField(BaseField): if self.field: value_dict = dict([(key, self.field.to_mongo(item)) - for key, item in value.iteritems()]) + for key, item in value.iteritems()]) else: value_dict = {} for k, v in value.iteritems(): @@ -396,3 +397,100 @@ class ObjectIdField(BaseField): ObjectId(unicode(value)) except: self.error('Invalid Object ID') + + +class GeoJsonBaseField(BaseField): + """A geo json field storing a geojson style object. + .. versionadded:: 0.8 + """ + + _geo_index = pymongo.GEOSPHERE + _type = "GeoBase" + + def __init__(self, auto_index=True, *args, **kwargs): + """ + :param auto_index: Automatically create a "2dsphere" index. Defaults + to `True`. + """ + self._name = "%sField" % self._type + if not auto_index: + self._geo_index = False + super(GeoJsonBaseField, self).__init__(*args, **kwargs) + + def validate(self, value): + """Validate the GeoJson object based on its type + """ + if isinstance(value, dict): + if set(value.keys()) == set(['type', 'coordinates']): + if value['type'] != self._type: + self.error('%s type must be "%s"' % (self._name, self._type)) + return self.validate(value['coordinates']) + else: + self.error('%s can only accept a valid GeoJson dictionary' + ' or lists of (x, y)' % self._name) + return + elif not isinstance(value, (list, tuple)): + self.error('%s can only accept lists of [x, y]' % self._name) + return + + validate = getattr(self, "_validate_%s" % self._type.lower()) + error = validate(value) + if error: + self.error(error) + + def _validate_polygon(self, value): + if not isinstance(value, (list, tuple)): + return 'Polygons must contain list of linestrings' + + # Quick and dirty validator + try: + value[0][0][0] + except: + return "Invalid Polygon must contain at least one valid linestring" + + errors = [] + for val in value: + error = self._validate_linestring(val, False) + if not error and val[0] != val[-1]: + error = 'LineStrings must start and end at the same point' + if error and error not in errors: + errors.append(error) + if errors: + return "Invalid Polygon:\n%s" % ", ".join(set(errors)) + + def _validate_linestring(self, value, top_level=True): + """Validates a linestring""" + if not isinstance(value, (list, tuple)): + return 'LineStrings must contain list of coordinate pairs' + + # Quick and dirty validator + try: + value[0][0] + except: + return "Invalid LineString must contain at least one valid point" + + errors = [] + for val in value: + error = self._validate_point(val) + if error and error not in errors: + errors.append(error) + if errors: + if top_level: + return "Invalid LineString:\n%s" % ", ".join(errors) + else: + return "%s" % ", ".join(set(errors)) + + def _validate_point(self, value): + """Validate each set of coords""" + if not isinstance(value, (list, tuple)): + return 'Points must be a list of coordinate pairs' + elif not len(value) == 2: + return "Value (%s) must be a two-dimensional point" % repr(value) + elif (not isinstance(value[0], (float, int)) or + not isinstance(value[1], (float, int))): + return "Both values (%s) in point must be float or int" % repr(value) + + def to_mongo(self, value): + if isinstance(value, dict): + return value + return SON([("type", self._type), ("coordinates", value)]) diff --git a/mongoengine/common.py b/mongoengine/common.py index 718ac0b..bff55ac 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -11,6 +11,7 @@ def _import_class(cls_name): field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', 'FileField', 'GenericReferenceField', 'GenericEmbeddedDocumentField', 'GeoPointField', + 'PointField', 'LineStringField', 'PolygonField', 'ReferenceField', 'StringField', 'ComplexBaseField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) @@ -33,4 +34,4 @@ def _import_class(cls_name): for cls in import_classes: _class_registry_cache[cls] = getattr(module, cls) - return _class_registry_cache.get(cls_name) \ No newline at end of file + return _class_registry_cache.get(cls_name) diff --git a/mongoengine/document.py b/mongoengine/document.py index bd6ce19..143802c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -523,7 +523,6 @@ class Document(BaseDocument): # an extra index on _cls, as mongodb will use the existing # index to service queries against _cls cls_indexed = False - def includes_cls(fields): first_field = None if len(fields): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index bb2539c..274ad3c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -15,7 +15,7 @@ from bson import Binary, DBRef, SON, ObjectId from mongoengine.errors import ValidationError from mongoengine.python_support import (PY3, bin_type, txt_type, str_types, StringIO) -from base import (BaseField, ComplexBaseField, ObjectIdField, +from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField, get_document, BaseDocument) from queryset import DO_NOTHING, QuerySet from document import Document, EmbeddedDocument @@ -34,8 +34,8 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField', 'SortedListField', 'DictField', 'MapField', 'ReferenceField', 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy', - 'ImproperlyConfigured', 'ImageField', 'GeoPointField', - 'SequenceField', 'UUIDField'] + 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField', + 'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField'] RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -1386,28 +1386,6 @@ class ImageField(FileField): **kwargs) -class GeoPointField(BaseField): - """A list storing a latitude and longitude. - - .. versionadded:: 0.4 - """ - - _geo_index = pymongo.GEO2D - - def validate(self, value): - """Make sure that a geo-value is of type (x, y) - """ - if not isinstance(value, (list, tuple)): - self.error('GeoPointField can only accept tuples or lists ' - 'of (x, y)') - - if not len(value) == 2: - self.error('Value must be a two-dimensional point') - if (not isinstance(value[0], (float, int)) and - not isinstance(value[1], (float, int))): - self.error('Both values in point must be float or int') - - class SequenceField(BaseField): """Provides a sequental counter see: http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers @@ -1548,3 +1526,83 @@ class UUIDField(BaseField): value = uuid.UUID(value) except Exception, exc: self.error('Could not convert to UUID: %s' % exc) + + +class GeoPointField(BaseField): + """A list storing a latitude and longitude. + + .. versionadded:: 0.4 + """ + + _geo_index = pymongo.GEO2D + + def validate(self, value): + """Make sure that a geo-value is of type (x, y) + """ + if not isinstance(value, (list, tuple)): + self.error('GeoPointField can only accept tuples or lists ' + 'of (x, y)') + + if not len(value) == 2: + self.error("Value (%s) must be a two-dimensional point" % repr(value)) + elif (not isinstance(value[0], (float, int)) or + not isinstance(value[1], (float, int))): + self.error("Both values (%s) in point must be float or int" % repr(value)) + + +class PointField(GeoJsonBaseField): + """A geo json field storing a latitude and longitude. + + The data is represented as: + + .. code-block:: js + + { "type" : "Point" , + "coordinates" : [x, y]} + + You can either pass a dict with the full information or a list + to set the value. + + Requires mongodb >= 2.4 + .. versionadded:: 0.8 + """ + _type = "Point" + + +class LineStringField(GeoJsonBaseField): + """A geo json field storing a line of latitude and longitude coordinates. + + The data is represented as: + + .. code-block:: js + + { "type" : "LineString" , + "coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]} + + You can either pass a dict with the full information or a list of points. + + Requires mongodb >= 2.4 + .. versionadded:: 0.8 + """ + _type = "LineString" + + +class PolygonField(GeoJsonBaseField): + """A geo json field storing a polygon of latitude and longitude coordinates. + + The data is represented as: + + .. code-block:: js + + { "type" : "Polygon" , + "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], + [[x1, y1], [x1, y1] ... [xn, yn]]} + + You can either pass a dict with the full information or a list + of LineStrings. The first LineString being the outside and the rest being + holes. + + Requires mongodb >= 2.4 + .. versionadded:: 0.8 + """ + _type = "Polygon" diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 5ae889c..bfb5a48 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1422,15 +1422,14 @@ class QuerySet(object): code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) + code) return code # Deprecated - def ensure_index(self, **kwargs): """Deprecated use :func:`~Document.ensure_index`""" msg = ("Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead.") + "Use Doc.ensure_index() instead.") warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_index(**kwargs) return self @@ -1438,6 +1437,6 @@ class QuerySet(object): def _ensure_indexes(self): """Deprecated use :func:`~Document.ensure_indexes`""" msg = ("Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead.") + "Use Doc.ensure_indexes() instead.") warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 3da2693..96d9904 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -1,5 +1,6 @@ from collections import defaultdict +import pymongo from bson import SON from mongoengine.common import _import_class @@ -12,7 +13,9 @@ COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not') GEO_OPERATORS = ('within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere', - 'max_distance') + 'max_distance', 'geo_within', 'geo_within_box', + 'geo_within_polygon', 'geo_within_center', + 'geo_within_sphere', 'geo_intersects') STRING_OPERATORS = ('contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact') @@ -81,30 +84,14 @@ def query(_doc_cls=None, _field_operation=False, **query): value = field else: value = field.prepare_query_value(op, value) - elif op in ('in', 'nin', 'all', 'near'): + elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): # 'in', 'nin' and 'all' require a list of values value = [field.prepare_query_value(op, v) for v in value] # if op and op not in COMPARISON_OPERATORS: if op: if op in GEO_OPERATORS: - if op == "within_distance": - value = {'$within': {'$center': value}} - elif op == "within_spherical_distance": - value = {'$within': {'$centerSphere': value}} - elif op == "within_polygon": - value = {'$within': {'$polygon': value}} - elif op == "near": - value = {'$near': value} - elif op == "near_sphere": - value = {'$nearSphere': value} - elif op == 'within_box': - value = {'$within': {'$box': value}} - elif op == "max_distance": - value = {'$maxDistance': value} - else: - raise NotImplementedError("Geo method '%s' has not " - "been implemented" % op) + value = _geo_operator(field, op, value) elif op in CUSTOM_OPERATORS: if op == 'match': value = {"$elemMatch": value} @@ -250,3 +237,76 @@ def update(_doc_cls=None, **update): mongo_update[key].update(value) return mongo_update + + +def _geo_operator(field, op, value): + """Helper to return the query for a given geo query""" + if field._geo_index == pymongo.GEO2D: + if op == "within_distance": + value = {'$within': {'$center': value}} + elif op == "within_spherical_distance": + value = {'$within': {'$centerSphere': value}} + elif op == "within_polygon": + value = {'$within': {'$polygon': value}} + elif op == "near": + value = {'$near': value} + elif op == "near_sphere": + value = {'$nearSphere': value} + elif op == 'within_box': + value = {'$within': {'$box': value}} + elif op == "max_distance": + value = {'$maxDistance': value} + else: + raise NotImplementedError("Geo method '%s' has not " + "been implemented for a GeoPointField" % op) + else: + if op == "geo_within": + value = {"$geoWithin": _infer_geometry(value)} + elif op == "geo_within_box": + value = {"$geoWithin": {"$box": value}} + elif op == "geo_within_polygon": + value = {"$geoWithin": {"$polygon": value}} + elif op == "geo_within_center": + value = {"$geoWithin": {"$center": value}} + elif op == "geo_within_sphere": + value = {"$geoWithin": {"$centerSphere": value}} + elif op == "geo_intersects": + value = {"$geoIntersects": _infer_geometry(value)} + elif op == "near": + value = {'$near': _infer_geometry(value)} + elif op == "max_distance": + value = {'$maxDistance': value} + else: + raise NotImplementedError("Geo method '%s' has not " + "been implemented for a %s " % (op, field._name)) + return value + + +def _infer_geometry(value): + """Helper method that tries to infer the $geometry shape for a given value""" + if isinstance(value, dict): + if "$geometry" in value: + return value + elif 'coordinates' in value and 'type' in value: + return {"$geometry": value} + raise InvalidQueryError("Invalid $geometry dictionary should have " + "type and coordinates keys") + elif isinstance(value, (list, set)): + try: + value[0][0][0] + return {"$geometry": {"type": "Polygon", "coordinates": value}} + except: + pass + try: + value[0][0] + return {"$geometry": {"type": "LineString", "coordinates": value}} + except: + pass + try: + value[0] + return {"$geometry": {"type": "Point", "coordinates": value}} + except: + pass + + raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " + "or (nested) lists of coordinate(s)") diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 99aeca6..ddc147b 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -381,8 +381,7 @@ class IndexesTest(unittest.TestCase): self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1']) post1 = BlogPost(title="Embedded Indexes tests in place", - tags=[Tag(name="about"), Tag(name="time")] - ) + tags=[Tag(name="about"), Tag(name="time")]) post1.save() BlogPost.drop_collection() @@ -399,29 +398,6 @@ class IndexesTest(unittest.TestCase): info = RecursiveDocument._get_collection().index_information() self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_']) - def test_geo_indexes_recursion(self): - - class Location(Document): - name = StringField() - location = GeoPointField() - - class Parent(Document): - name = StringField() - location = ReferenceField(Location, dbref=False) - - Location.drop_collection() - Parent.drop_collection() - - list(Parent.objects) - - collection = Parent._get_collection() - info = collection.index_information() - - self.assertFalse('location_2d' in info) - - self.assertEqual(len(Parent._geo_indices()), 0) - self.assertEqual(len(Location._geo_indices()), 1) - def test_covered_index(self): """Ensure that covered indexes can be used """ @@ -432,7 +408,7 @@ class IndexesTest(unittest.TestCase): meta = { 'indexes': ['a'], 'allow_inheritance': False - } + } Test.drop_collection() diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py index 0731838..be70aaa 100644 --- a/tests/fields/__init__.py +++ b/tests/fields/__init__.py @@ -1,2 +1,3 @@ from fields import * -from file_tests import * \ No newline at end of file +from file_tests import * +from geo import * \ No newline at end of file diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 5474aa6..f7ab63e 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1862,45 +1862,6 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() - def test_geo_indexes(self): - """Ensure that indexes are created automatically for GeoPointFields. - """ - class Event(Document): - title = StringField() - location = GeoPointField() - - Event.drop_collection() - event = Event(title="Coltrane Motion @ Double Door", - location=[41.909889, -87.677137]) - event.save() - - info = Event.objects._collection.index_information() - self.assertTrue(u'location_2d' in info) - self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) - - Event.drop_collection() - - def test_geo_embedded_indexes(self): - """Ensure that indexes are created automatically for GeoPointFields on - embedded documents. - """ - class Venue(EmbeddedDocument): - location = GeoPointField() - name = StringField() - - class Event(Document): - title = StringField() - venue = EmbeddedDocumentField(Venue) - - Event.drop_collection() - venue = Venue(name="Double Door", location=[41.909889, -87.677137]) - event = Event(title="Coltrane Motion", venue=venue) - event.save() - - info = Event.objects._collection.index_information() - self.assertTrue(u'location_2d' in info) - self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) - def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" class D(Document): diff --git a/tests/fields/geo.py b/tests/fields/geo.py new file mode 100644 index 0000000..2936f72 --- /dev/null +++ b/tests/fields/geo.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import * +from mongoengine.connection import get_db + +__all__ = ("GeoFieldTest", ) + + +class GeoFieldTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = get_db() + + def _test_for_expected_error(self, Cls, loc, expected): + try: + Cls(loc=loc).validate() + self.fail() + except ValidationError, e: + self.assertEqual(expected, e.to_dict()['loc']) + + def test_geopoint_validation(self): + class Location(Document): + loc = GeoPointField() + + invalid_coords = [{"x": 1, "y": 2}, 5, "a"] + expected = 'GeoPointField can only accept tuples or lists of (x, y)' + + for coord in invalid_coords: + self._test_for_expected_error(Location, coord, expected) + + invalid_coords = [[], [1], [1, 2, 3]] + for coord in invalid_coords: + expected = "Value (%s) must be a two-dimensional point" % repr(coord) + self._test_for_expected_error(Location, coord, expected) + + invalid_coords = [[{}, {}], ("a", "b")] + for coord in invalid_coords: + expected = "Both values (%s) in point must be float or int" % repr(coord) + self._test_for_expected_error(Location, coord, expected) + + def test_point_validation(self): + class Location(Document): + loc = PointField() + + invalid_coords = {"x": 1, "y": 2} + expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "MadeUp", "coordinates": []} + expected = 'PointField type must be "Point"' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]} + expected = "Value ([1, 2, 3]) must be a two-dimensional point" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [5, "a"] + expected = "PointField can only accept lists of [x, y]" + for coord in invalid_coords: + self._test_for_expected_error(Location, coord, expected) + + invalid_coords = [[], [1], [1, 2, 3]] + for coord in invalid_coords: + expected = "Value (%s) must be a two-dimensional point" % repr(coord) + self._test_for_expected_error(Location, coord, expected) + + invalid_coords = [[{}, {}], ("a", "b")] + for coord in invalid_coords: + expected = "Both values (%s) in point must be float or int" % repr(coord) + self._test_for_expected_error(Location, coord, expected) + + Location(loc=[1, 2]).validate() + + def test_linestring_validation(self): + class Location(Document): + loc = LineStringField() + + invalid_coords = {"x": 1, "y": 2} + expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "MadeUp", "coordinates": [[]]} + expected = 'LineStringField type must be "LineString"' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} + expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [5, "a"] + expected = "Invalid LineString must contain at least one valid point" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[1]] + expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[1, 2, 3]] + expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[{}, {}]], [("a", "b")]] + for coord in invalid_coords: + expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) + self._test_for_expected_error(Location, coord, expected) + + Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate() + + def test_polygon_validation(self): + class Location(Document): + loc = PolygonField() + + invalid_coords = {"x": 1, "y": 2} + expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "MadeUp", "coordinates": [[]]} + expected = 'PolygonField type must be "Polygon"' + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]} + expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[5, "a"]]] + expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[]]] + expected = "Invalid Polygon must contain at least one valid linestring" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[1, 2, 3]]] + expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[{}, {}]], [("a", "b")]] + expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int" + self._test_for_expected_error(Location, invalid_coords, expected) + + invalid_coords = [[[1, 2], [3, 4]]] + expected = "Invalid Polygon:\nLineStrings must start and end at the same point" + self._test_for_expected_error(Location, invalid_coords, expected) + + Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() + + def test_indexes_geopoint(self): + """Ensure that indexes are created automatically for GeoPointFields. + """ + class Event(Document): + title = StringField() + location = GeoPointField() + + geo_indicies = Event._geo_indices() + self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) + + def test_geopoint_embedded_indexes(self): + """Ensure that indexes are created automatically for GeoPointFields on + embedded documents. + """ + class Venue(EmbeddedDocument): + location = GeoPointField() + name = StringField() + + class Event(Document): + title = StringField() + venue = EmbeddedDocumentField(Venue) + + geo_indicies = Event._geo_indices() + self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) + + def test_indexes_2dsphere(self): + """Ensure that indexes are created automatically for GeoPointFields. + """ + class Event(Document): + title = StringField() + point = PointField() + line = LineStringField() + polygon = PolygonField() + + geo_indicies = Event._geo_indices() + self.assertEqual(geo_indicies, [{'fields': [('line', '2dsphere')]}, + {'fields': [('polygon', '2dsphere')]}, + {'fields': [('point', '2dsphere')]}]) + + def test_indexes_2dsphere_embedded(self): + """Ensure that indexes are created automatically for GeoPointFields. + """ + class Venue(EmbeddedDocument): + name = StringField() + point = PointField() + line = LineStringField() + polygon = PolygonField() + + class Event(Document): + title = StringField() + venue = EmbeddedDocumentField(Venue) + + geo_indicies = Event._geo_indices() + self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) + self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) + self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) + + def test_geo_indexes_recursion(self): + + class Location(Document): + name = StringField() + location = GeoPointField() + + class Parent(Document): + name = StringField() + location = ReferenceField(Location) + + Location.drop_collection() + Parent.drop_collection() + + list(Parent.objects) + + collection = Parent._get_collection() + info = collection.index_information() + + self.assertFalse('location_2d' in info) + + self.assertEqual(len(Parent._geo_indices()), 0) + self.assertEqual(len(Location._geo_indices()), 1) + + def test_geo_indexes_auto_index(self): + + # Test just listing the fields + class Log(Document): + location = PointField(auto_index=False) + datetime = DateTimeField() + + meta = { + 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] + } + + self.assertEqual([], Log._geo_indices()) + + Log.drop_collection() + Log.ensure_indexes() + + info = Log._get_collection().index_information() + self.assertEqual(info["location_2dsphere_datetime_1"]["key"], + [('location', '2dsphere'), ('datetime', 1)]) + + # Test listing explicitly + class Log(Document): + location = PointField(auto_index=False) + datetime = DateTimeField() + + meta = { + 'indexes': [ + {'fields': [("location", "2dsphere"), ("datetime", 1)]} + ] + } + + self.assertEqual([], Log._geo_indices()) + + Log.drop_collection() + Log.ensure_indexes() + + info = Log._get_collection().index_information() + self.assertEqual(info["location_2dsphere_datetime_1"]["key"], + [('location', '2dsphere'), ('datetime', 1)]) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py index 93cb8c2..8a93c19 100644 --- a/tests/queryset/__init__.py +++ b/tests/queryset/__init__.py @@ -1,5 +1,5 @@ - from transform import * from field_list import * from queryset import * -from visitor import * \ No newline at end of file +from visitor import * +from geo import * diff --git a/tests/queryset/geo.py b/tests/queryset/geo.py new file mode 100644 index 0000000..f564896 --- /dev/null +++ b/tests/queryset/geo.py @@ -0,0 +1,418 @@ +import sys +sys.path[0:0] = [""] + +import unittest +from datetime import datetime, timedelta +from mongoengine import * + +__all__ = ("GeoQueriesTest",) + + +class GeoQueriesTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + def test_geospatial_operators(self): + """Ensure that geospatial queries are working. + """ + class Event(Document): + title = StringField() + date = DateTimeField() + location = GeoPointField() + + def __unicode__(self): + return self.title + + Event.drop_collection() + + event1 = Event(title="Coltrane Motion @ Double Door", + date=datetime.now() - timedelta(days=1), + location=[-87.677137, 41.909889]).save() + event2 = Event(title="Coltrane Motion @ Bottom of the Hill", + date=datetime.now() - timedelta(days=10), + location=[-122.4194155, 37.7749295]).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", + date=datetime.now(), + location=[-87.686638, 41.900474]).save() + + # find all events "near" pitchfork office, chicago. + # note that "near" will show the san francisco event, too, + # although it sorts to last. + events = Event.objects(location__near=[-87.67892, 41.9120459]) + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event1, event3, event2]) + + # find events within 5 degrees of pitchfork office, chicago + point_and_distance = [[-87.67892, 41.9120459], 5] + events = Event.objects(location__within_distance=point_and_distance) + self.assertEqual(events.count(), 2) + events = list(events) + self.assertTrue(event2 not in events) + self.assertTrue(event1 in events) + self.assertTrue(event3 in events) + + # ensure ordering is respected by "near" + events = Event.objects(location__near=[-87.67892, 41.9120459]) + events = events.order_by("-date") + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event3, event1, event2]) + + # find events within 10 degrees of san francisco + point = [-122.415579, 37.7566023] + events = Event.objects(location__near=point, location__max_distance=10) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + + # find events within 10 degrees of san francisco + point_and_distance = [[-122.415579, 37.7566023], 10] + events = Event.objects(location__within_distance=point_and_distance) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + + # find events within 1 degree of greenpoint, broolyn, nyc, ny + point_and_distance = [[-73.9509714, 40.7237134], 1] + events = Event.objects(location__within_distance=point_and_distance) + self.assertEqual(events.count(), 0) + + # ensure ordering is respected by "within_distance" + point_and_distance = [[-87.67892, 41.9120459], 10] + events = Event.objects(location__within_distance=point_and_distance) + events = events.order_by("-date") + self.assertEqual(events.count(), 2) + self.assertEqual(events[0], event3) + + # check that within_box works + box = [(-125.0, 35.0), (-100.0, 40.0)] + events = Event.objects(location__within_box=box) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event2.id) + + polygon = [ + (-87.694445, 41.912114), + (-87.69084, 41.919395), + (-87.681742, 41.927186), + (-87.654276, 41.911731), + (-87.656164, 41.898061), + ] + events = Event.objects(location__within_polygon=polygon) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event1.id) + + polygon2 = [ + (-1.742249, 54.033586), + (-1.225891, 52.792797), + (-4.40094, 53.389881) + ] + events = Event.objects(location__within_polygon=polygon2) + self.assertEqual(events.count(), 0) + + def test_geo_spatial_embedded(self): + + class Venue(EmbeddedDocument): + location = GeoPointField() + name = StringField() + + class Event(Document): + title = StringField() + venue = EmbeddedDocumentField(Venue) + + Event.drop_collection() + + venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) + venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) + + event1 = Event(title="Coltrane Motion @ Double Door", + venue=venue1).save() + event2 = Event(title="Coltrane Motion @ Bottom of the Hill", + venue=venue2).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", + venue=venue1).save() + + # find all events "near" pitchfork office, chicago. + # note that "near" will show the san francisco event, too, + # although it sorts to last. + events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event1, event3, event2]) + + def test_spherical_geospatial_operators(self): + """Ensure that spherical geospatial queries are working + """ + class Point(Document): + location = GeoPointField() + + Point.drop_collection() + + # These points are one degree apart, which (according to Google Maps) + # is about 110 km apart at this place on the Earth. + north_point = Point(location=[-122, 38]).save() # Near Concord, CA + south_point = Point(location=[-122, 37]).save() # Near Santa Cruz, CA + + earth_radius = 6378.009 # in km (needs to be a float for dividing by) + + # Finds both points because they are within 60 km of the reference + # point equidistant between them. + points = Point.objects(location__near_sphere=[-122, 37.5]) + self.assertEqual(points.count(), 2) + + # Same behavior for _within_spherical_distance + points = Point.objects( + location__within_spherical_distance=[[-122, 37.5], 60/earth_radius] + ) + self.assertEqual(points.count(), 2) + + points = Point.objects(location__near_sphere=[-122, 37.5], + location__max_distance=60 / earth_radius) + self.assertEqual(points.count(), 2) + + # Finds both points, but orders the north point first because it's + # closer to the reference point to the north. + points = Point.objects(location__near_sphere=[-122, 38.5]) + self.assertEqual(points.count(), 2) + self.assertEqual(points[0].id, north_point.id) + self.assertEqual(points[1].id, south_point.id) + + # Finds both points, but orders the south point first because it's + # closer to the reference point to the south. + points = Point.objects(location__near_sphere=[-122, 36.5]) + self.assertEqual(points.count(), 2) + self.assertEqual(points[0].id, south_point.id) + self.assertEqual(points[1].id, north_point.id) + + # Finds only one point because only the first point is within 60km of + # the reference point to the south. + points = Point.objects( + location__within_spherical_distance=[[-122, 36.5], 60/earth_radius]) + self.assertEqual(points.count(), 1) + self.assertEqual(points[0].id, south_point.id) + + def test_2dsphere_point(self): + + class Event(Document): + title = StringField() + date = DateTimeField() + location = PointField() + + def __unicode__(self): + return self.title + + Event.drop_collection() + + event1 = Event(title="Coltrane Motion @ Double Door", + date=datetime.now() - timedelta(days=1), + location=[-87.677137, 41.909889]) + event1.save() + event2 = Event(title="Coltrane Motion @ Bottom of the Hill", + date=datetime.now() - timedelta(days=10), + location=[-122.4194155, 37.7749295]).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", + date=datetime.now(), + location=[-87.686638, 41.900474]).save() + + # find all events "near" pitchfork office, chicago. + # note that "near" will show the san francisco event, too, + # although it sorts to last. + events = Event.objects(location__near=[-87.67892, 41.9120459]) + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event1, event3, event2]) + + # find events within 5 degrees of pitchfork office, chicago + point_and_distance = [[-87.67892, 41.9120459], 2] + events = Event.objects(location__geo_within_center=point_and_distance) + self.assertEqual(events.count(), 2) + events = list(events) + self.assertTrue(event2 not in events) + self.assertTrue(event1 in events) + self.assertTrue(event3 in events) + + # ensure ordering is respected by "near" + events = Event.objects(location__near=[-87.67892, 41.9120459]) + events = events.order_by("-date") + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event3, event1, event2]) + + # find events within 10km of san francisco + point = [-122.415579, 37.7566023] + events = Event.objects(location__near=point, location__max_distance=10000) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0], event2) + + # find events within 1km of greenpoint, broolyn, nyc, ny + events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000) + self.assertEqual(events.count(), 0) + + # ensure ordering is respected by "near" + events = Event.objects(location__near=[-87.67892, 41.9120459], + location__max_distance=10000).order_by("-date") + self.assertEqual(events.count(), 2) + self.assertEqual(events[0], event3) + + # check that within_box works + box = [(-125.0, 35.0), (-100.0, 40.0)] + events = Event.objects(location__geo_within_box=box) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event2.id) + + polygon = [ + (-87.694445, 41.912114), + (-87.69084, 41.919395), + (-87.681742, 41.927186), + (-87.654276, 41.911731), + (-87.656164, 41.898061), + ] + events = Event.objects(location__geo_within_polygon=polygon) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event1.id) + + polygon2 = [ + (-1.742249, 54.033586), + (-1.225891, 52.792797), + (-4.40094, 53.389881) + ] + events = Event.objects(location__geo_within_polygon=polygon2) + self.assertEqual(events.count(), 0) + + def test_2dsphere_point_embedded(self): + + class Venue(EmbeddedDocument): + location = GeoPointField() + name = StringField() + + class Event(Document): + title = StringField() + venue = EmbeddedDocumentField(Venue) + + Event.drop_collection() + + venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) + venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) + + event1 = Event(title="Coltrane Motion @ Double Door", + venue=venue1).save() + event2 = Event(title="Coltrane Motion @ Bottom of the Hill", + venue=venue2).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", + venue=venue1).save() + + # find all events "near" pitchfork office, chicago. + # note that "near" will show the san francisco event, too, + # although it sorts to last. + events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) + self.assertEqual(events.count(), 3) + self.assertEqual(list(events), [event1, event3, event2]) + + def test_linestring(self): + + class Road(Document): + name = StringField() + line = LineStringField() + + Road.drop_collection() + + Road(name="66", line=[[40, 5], [41, 6]]).save() + + # near + point = {"type": "Point", "coordinates": [40, 5]} + roads = Road.objects.filter(line__near=point["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__near=point).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__near={"$geometry": point}).count() + self.assertEqual(1, roads) + + # Within + polygon = {"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_within=polygon).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count() + self.assertEqual(1, roads) + + # Intersects + line = {"type": "LineString", + "coordinates": [[40, 5], [40, 6]]} + roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_intersects=line).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() + self.assertEqual(1, roads) + + polygon = {"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_intersects=polygon).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count() + self.assertEqual(1, roads) + + def test_polygon(self): + + class Road(Document): + name = StringField() + poly = PolygonField() + + Road.drop_collection() + + Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() + + # near + point = {"type": "Point", "coordinates": [40, 5]} + roads = Road.objects.filter(poly__near=point["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__near=point).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__near={"$geometry": point}).count() + self.assertEqual(1, roads) + + # Within + polygon = {"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_within=polygon).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count() + self.assertEqual(1, roads) + + # Intersects + line = {"type": "LineString", + "coordinates": [[40, 5], [41, 6]]} + roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_intersects=line).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() + self.assertEqual(1, roads) + + polygon = {"type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_intersects=polygon).count() + self.assertEqual(1, roads) + + roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() + self.assertEqual(1, roads) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 5bf8183..40aef7e 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2380,167 +2380,6 @@ class QuerySetTest(unittest.TestCase): def tearDown(self): self.Person.drop_collection() - def test_geospatial_operators(self): - """Ensure that geospatial queries are working. - """ - class Event(Document): - title = StringField() - date = DateTimeField() - location = GeoPointField() - - def __unicode__(self): - return self.title - - Event.drop_collection() - - event1 = Event(title="Coltrane Motion @ Double Door", - date=datetime.now() - timedelta(days=1), - location=[41.909889, -87.677137]) - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - date=datetime.now() - timedelta(days=10), - location=[37.7749295, -122.4194155]) - event3 = Event(title="Coltrane Motion @ Empty Bottle", - date=datetime.now(), - location=[41.900474, -87.686638]) - - event1.save() - event2.save() - event3.save() - - # find all events "near" pitchfork office, chicago. - # note that "near" will show the san francisco event, too, - # although it sorts to last. - events = Event.objects(location__near=[41.9120459, -87.67892]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) - - # find events within 5 degrees of pitchfork office, chicago - point_and_distance = [[41.9120459, -87.67892], 5] - events = Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 2) - events = list(events) - self.assertTrue(event2 not in events) - self.assertTrue(event1 in events) - self.assertTrue(event3 in events) - - # ensure ordering is respected by "near" - events = Event.objects(location__near=[41.9120459, -87.67892]) - events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) - - # find events within 10 degrees of san francisco - point = [37.7566023, -122.415579] - events = Event.objects(location__near=point, location__max_distance=10) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) - - # find events within 10 degrees of san francisco - point_and_distance = [[37.7566023, -122.415579], 10] - events = Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) - - # find events within 1 degree of greenpoint, broolyn, nyc, ny - point_and_distance = [[40.7237134, -73.9509714], 1] - events = Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 0) - - # ensure ordering is respected by "within_distance" - point_and_distance = [[41.9120459, -87.67892], 10] - events = Event.objects(location__within_distance=point_and_distance) - events = events.order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) - - # check that within_box works - box = [(35.0, -125.0), (40.0, -100.0)] - events = Event.objects(location__within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) - - # check that polygon works for users who have a server >= 1.9 - server_version = tuple( - get_connection().server_info()['version'].split('.') - ) - required_version = tuple("1.9.0".split(".")) - if server_version >= required_version: - polygon = [ - (41.912114,-87.694445), - (41.919395,-87.69084), - (41.927186,-87.681742), - (41.911731,-87.654276), - (41.898061,-87.656164), - ] - events = Event.objects(location__within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) - - polygon2 = [ - (54.033586,-1.742249), - (52.792797,-1.225891), - (53.389881,-4.40094) - ] - events = Event.objects(location__within_polygon=polygon2) - self.assertEqual(events.count(), 0) - - Event.drop_collection() - - def test_spherical_geospatial_operators(self): - """Ensure that spherical geospatial queries are working - """ - class Point(Document): - location = GeoPointField() - - Point.drop_collection() - - # These points are one degree apart, which (according to Google Maps) - # is about 110 km apart at this place on the Earth. - north_point = Point(location=[-122, 38]) # Near Concord, CA - south_point = Point(location=[-122, 37]) # Near Santa Cruz, CA - north_point.save() - south_point.save() - - earth_radius = 6378.009; # in km (needs to be a float for dividing by) - - # Finds both points because they are within 60 km of the reference - # point equidistant between them. - points = Point.objects(location__near_sphere=[-122, 37.5]) - self.assertEqual(points.count(), 2) - - # Same behavior for _within_spherical_distance - points = Point.objects( - location__within_spherical_distance=[[-122, 37.5], 60/earth_radius] - ); - self.assertEqual(points.count(), 2) - - points = Point.objects(location__near_sphere=[-122, 37.5], - location__max_distance=60 / earth_radius); - self.assertEqual(points.count(), 2) - - # Finds both points, but orders the north point first because it's - # closer to the reference point to the north. - points = Point.objects(location__near_sphere=[-122, 38.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, north_point.id) - self.assertEqual(points[1].id, south_point.id) - - # Finds both points, but orders the south point first because it's - # closer to the reference point to the south. - points = Point.objects(location__near_sphere=[-122, 36.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, south_point.id) - self.assertEqual(points[1].id, north_point.id) - - # Finds only one point because only the first point is within 60km of - # the reference point to the south. - points = Point.objects( - location__within_spherical_distance=[[-122, 36.5], 60/earth_radius]) - self.assertEqual(points.count(), 1) - self.assertEqual(points[0].id, south_point.id) - - Point.drop_collection() - def test_custom_querysets(self): """Ensure that custom QuerySet classes may be used. """ From 68f760b56375255173bb31b04af485f5987c96da Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Apr 2013 15:05:41 +0000 Subject: [PATCH 255/464] get_db() only assigns the db after authentication (#257) --- mongoengine/connection.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 3c53ea3..abab269 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -137,11 +137,12 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): if alias not in _dbs: conn = get_connection(alias) conn_settings = _connection_settings[alias] - _dbs[alias] = conn[conn_settings['name']] + db = conn[conn_settings['name']] # Authenticate if necessary if conn_settings['username'] and conn_settings['password']: - _dbs[alias].authenticate(conn_settings['username'], - conn_settings['password']) + db.authenticate(conn_settings['username'], + conn_settings['password']) + _dbs[alias] = db return _dbs[alias] From 473d5ead7bf2b61e3b7dbab2845e496994ef5aed Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Apr 2013 16:42:38 +0000 Subject: [PATCH 256/464] Geo errors fix and test update --- mongoengine/base/fields.py | 4 ++-- tests/fields/geo.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index fa0b134..72a9e8e 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -456,7 +456,7 @@ class GeoJsonBaseField(BaseField): if error and error not in errors: errors.append(error) if errors: - return "Invalid Polygon:\n%s" % ", ".join(set(errors)) + return "Invalid Polygon:\n%s" % ", ".join(errors) def _validate_linestring(self, value, top_level=True): """Validates a linestring""" @@ -478,7 +478,7 @@ class GeoJsonBaseField(BaseField): if top_level: return "Invalid LineString:\n%s" % ", ".join(errors) else: - return "%s" % ", ".join(set(errors)) + return "%s" % ", ".join(errors) def _validate_point(self, value): """Validate each set of coords""" diff --git a/tests/fields/geo.py b/tests/fields/geo.py index 2936f72..31ded26 100644 --- a/tests/fields/geo.py +++ b/tests/fields/geo.py @@ -184,9 +184,9 @@ class GeoFieldTest(unittest.TestCase): polygon = PolygonField() geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{'fields': [('line', '2dsphere')]}, - {'fields': [('polygon', '2dsphere')]}, - {'fields': [('point', '2dsphere')]}]) + self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) + self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) + self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) def test_indexes_2dsphere_embedded(self): """Ensure that indexes are created automatically for GeoPointFields. From 7aa1f473785ed17cff280835285a69e401fd9b86 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Apr 2013 16:46:08 +0000 Subject: [PATCH 257/464] Updated minimum requirements --- .travis.yml | 1 - docs/changelog.rst | 1 + docs/upgrade.rst | 6 +++--- setup.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index e78bda5..b7c56a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,6 @@ env: - PYMONGO=dev DJANGO=1.4.2 - PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.4.2 - - PYMONGO=2.4.2 DJANGO=1.4.2 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi diff --git a/docs/changelog.rst b/docs/changelog.rst index 207f0dd..6aa6214 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Updated minimum requirement for pymongo to 2.5 - Add support for new geojson fields, indexes and queries (#299) - If values cant be compared mark as changed (#287) - Ensure as_pymongo() and to_json honour only() and exclude() (#293) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index bb5705c..c633c28 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -15,10 +15,10 @@ possible for the whole of the release. live. There maybe multiple manual steps in migrating and these are best honed on a staging / test system. -Python -======= +Python and PyMongo +================== -Support for python 2.5 has been dropped. +MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above) Data Model ========== diff --git a/setup.py b/setup.py index bdd0182..10a6dbc 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo'], + install_requires=['pymongo>=2.5'], test_suite='nose.collector', **extra_opts ) From 1c345edc49b9b5e382fdb8b64ab6bf5058d48288 Mon Sep 17 00:00:00 2001 From: Alex Kelly Date: Tue, 30 Apr 2013 21:36:43 +0100 Subject: [PATCH 258/464] Updated tests for passing write_concern to update and update_one to check return. --- tests/queryset/queryset.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 40aef7e..7ca0596 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -287,15 +287,19 @@ class QuerySetTest(unittest.TestCase): name='Test User', write_concern=write_concern) author.save(write_concern=write_concern) - self.Person.objects.update(set__name='Ross', - write_concern=write_concern) + result = self.Person.objects.update( + set__name='Ross',write_concern={"w": 1}) + self.assertEqual(result, 1) + result = self.Person.objects.update( + set__name='Ross',write_concern={"w": 0}) + self.assertEqual(result, None) - author = self.Person.objects.first() - self.assertEqual(author.name, 'Ross') - - self.Person.objects.update_one(set__name='Test User', write_concern=write_concern) - author = self.Person.objects.first() - self.assertEqual(author.name, 'Test User') + result = self.Person.objects.update_one( + set__name='Test User', write_concern={"w": 1}) + self.assertEqual(result, 1) + result = self.Person.objects.update_one( + set__name='Test User', write_concern={"w": 0}) + self.assertEqual(result, None) def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" From 00a57f6cea8ba679281deba4fd4362ca23fb06c8 Mon Sep 17 00:00:00 2001 From: Alex Kelly Date: Tue, 30 Apr 2013 21:13:49 +0100 Subject: [PATCH 259/464] Pass write_concern parameter from update_one --- mongoengine/queryset/queryset.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index bfb5a48..1739f05 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -469,7 +469,8 @@ class QuerySet(object): .. versionadded:: 0.2 """ - return self.update(upsert=upsert, multi=False, write_concern=None, **update) + return self.update( + upsert=upsert, multi=False, write_concern=write_concern, **update) def with_id(self, object_id): """Retrieve the object matching the id provided. Uses `object_id` only From e58b3390aa8855659c006a8758fa23c075cdcb68 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 1 May 2013 08:48:14 +0000 Subject: [PATCH 260/464] Removed import with from future --- AUTHORS | 1 + docs/changelog.rst | 1 + mongoengine/document.py | 1 - tests/document/class_methods.py | 1 - tests/document/indexes.py | 1 - tests/document/instance.py | 1 - tests/fields/fields.py | 1 - tests/fields/file_tests.py | 1 - tests/queryset/queryset.py | 1 - tests/queryset/transform.py | 1 - tests/queryset/visitor.py | 1 - tests/test_connection.py | 1 - tests/test_context_managers.py | 1 - tests/test_dereference.py | 1 - tests/test_django.py | 1 - 15 files changed, 2 insertions(+), 13 deletions(-) diff --git a/AUTHORS b/AUTHORS index 44e19bf..181ad5a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -157,3 +157,4 @@ that much better: * Kenneth Falck * Lukasz Balcerzak * Nicolas Cortot + * Alex (https://github.com/kelsta) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6aa6214..314967e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.X ================ +- Fixed update_one write concern (#302) - Updated minimum requirement for pymongo to 2.5 - Add support for new geojson fields, indexes and queries (#299) - If values cant be compared mark as changed (#287) diff --git a/mongoengine/document.py b/mongoengine/document.py index 143802c..0e9be56 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import warnings import pymongo diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 83e68ff..231dd8f 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import sys sys.path[0:0] = [""] import unittest diff --git a/tests/document/indexes.py b/tests/document/indexes.py index ddc147b..04d5632 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import unittest import sys sys.path[0:0] = [""] diff --git a/tests/document/instance.py b/tests/document/instance.py index 06744ab..d8df0b2 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/fields/fields.py b/tests/fields/fields.py index f7ab63e..3047156 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index c5842d8..52bd88a 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 40aef7e..dfaae85 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index bde4b6f..7886965 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index bd81a65..2e9195e 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] diff --git a/tests/test_connection.py b/tests/test_connection.py index 4b8a3d1..d792648 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] import unittest diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index eef63be..f87d638 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] import unittest diff --git a/tests/test_dereference.py b/tests/test_dereference.py index ef5a10d..e146963 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import with_statement import sys sys.path[0:0] = [""] import unittest diff --git a/tests/test_django.py b/tests/test_django.py index 573c072..e30fe3c 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -1,4 +1,3 @@ -from __future__ import with_statement import sys sys.path[0:0] = [""] import unittest From 9654fe0d8d4657ed24b98bb1396faea77813b290 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 1 May 2013 09:30:20 +0000 Subject: [PATCH 261/464] 0.8.0RC1 is a go! --- docs/changelog.rst | 2 +- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 314967e..6140925 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,7 +2,7 @@ Changelog ========= -Changes in 0.8.X +Changes in 0.8.0 ================ - Fixed update_one write concern (#302) - Updated minimum requirement for pymongo to 2.5 diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 6fe6d08..0a3ca24 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0, '+') +VERSION = (0, 8, 0, 'RC1') def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index eaf478d..33ea48c 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.7.10 +Version: 0.8.0.RC1 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From cd73654683a2c68d951e7a9c33310fc9fc1ab211 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 1 May 2013 09:48:58 +0000 Subject: [PATCH 262/464] Update readme --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 5eab502..ea4b505 100644 --- a/README.rst +++ b/README.rst @@ -26,7 +26,7 @@ setup.py install``. Dependencies ============ -- pymongo 2.1.1+ +- pymongo 2.5+ - sphinx (optional - for documentation generation) Examples From 8c9afbd278eac13afdb9e12e23ec0e324d56d539 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 1 May 2013 19:40:49 +0000 Subject: [PATCH 263/464] Fix cloning of sliced querysets (#303) --- mongoengine/queryset/queryset.py | 14 +++----- tests/test_django.py | 60 +++++++++++++++++++++++++++++--- 2 files changed, 60 insertions(+), 14 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 1739f05..c1c9378 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -72,7 +72,6 @@ class QuerySet(object): self._cursor_obj = None self._limit = None self._skip = None - self._slice = None self._hint = -1 # Using -1 as None is a valid value for hint def __call__(self, q_obj=None, class_check=True, slave_okay=False, @@ -127,8 +126,10 @@ class QuerySet(object): if isinstance(key, slice): try: queryset._cursor_obj = queryset._cursor[key] - queryset._slice = key queryset._skip, queryset._limit = key.start, key.stop + queryset._limit + if key.start and key.stop: + queryset._limit = key.stop - key.start except IndexError, err: # PyMongo raises an error if key.start == key.stop, catch it, # bin it, kill it. @@ -537,15 +538,9 @@ class QuerySet(object): val = getattr(self, prop) setattr(c, prop, copy.copy(val)) - if self._slice: - c._slice = self._slice - if self._cursor_obj: c._cursor_obj = self._cursor_obj.clone() - if self._slice: - c._cursor[self._slice] - return c def select_related(self, max_depth=1): @@ -571,7 +566,6 @@ class QuerySet(object): else: queryset._cursor.limit(n) queryset._limit = n - # Return self to allow chaining return queryset @@ -1155,7 +1149,7 @@ class QuerySet(object): self._cursor_obj.sort(order) if self._limit is not None: - self._cursor_obj.limit(self._limit - (self._skip or 0)) + self._cursor_obj.limit(self._limit) if self._skip is not None: self._cursor_obj.skip(self._skip) diff --git a/tests/test_django.py b/tests/test_django.py index e30fe3c..f81213c 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -150,22 +150,74 @@ class QuerySetTest(unittest.TestCase): # Try iterating the same queryset twice, nested, in a Django template. names = ['A', 'B', 'C', 'D'] - class User(Document): + class CustomUser(Document): name = StringField() def __unicode__(self): return self.name - User.drop_collection() + CustomUser.drop_collection() for name in names: - User(name=name).save() + CustomUser(name=name).save() - users = User.objects.all().order_by('name') + users = CustomUser.objects.all().order_by('name') template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}") rendered = template.render(Context({'users': users})) self.assertEqual(rendered, 'AB ABCD CD') + def test_filter(self): + """Ensure that a queryset and filters work as expected + """ + + class Note(Document): + text = StringField() + + for i in xrange(1, 101): + Note(name="Note: %s" % i).save() + + # Check the count + self.assertEqual(Note.objects.count(), 100) + + # Get the first 10 and confirm + notes = Note.objects[:10] + self.assertEqual(notes.count(), 10) + + # Test djangos template filters + # self.assertEqual(length(notes), 10) + t = Template("{{ notes.count }}") + c = Context({"notes": notes}) + self.assertEqual(t.render(c), "10") + + # Test with skip + notes = Note.objects.skip(90) + self.assertEqual(notes.count(), 10) + + # Test djangos template filters + self.assertEqual(notes.count(), 10) + t = Template("{{ notes.count }}") + c = Context({"notes": notes}) + self.assertEqual(t.render(c), "10") + + # Test with limit + notes = Note.objects.skip(90) + self.assertEqual(notes.count(), 10) + + # Test djangos template filters + self.assertEqual(notes.count(), 10) + t = Template("{{ notes.count }}") + c = Context({"notes": notes}) + self.assertEqual(t.render(c), "10") + + # Test with skip and limit + notes = Note.objects.skip(10).limit(10) + + # Test djangos template filters + self.assertEqual(notes.count(), 10) + t = Template("{{ notes.count }}") + c = Context({"notes": notes}) + self.assertEqual(t.render(c), "10") + class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore From 1cdbade7619f887c017905d42cec374ee012d8ff Mon Sep 17 00:00:00 2001 From: Jin Date: Wed, 1 May 2013 16:54:48 -0700 Subject: [PATCH 264/464] fixed typo in defining-documents.rst --- docs/guide/defining-documents.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 2c744b7..0ee5ad3 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -493,7 +493,7 @@ Compound Indexes and Indexing sub documents Compound indexes can be created by adding the Embedded field or dictionary field name to the index definition. -Sometimes its more efficient to index parts of Embeedded / dictionary fields, +Sometimes its more efficient to index parts of Embedded / dictionary fields, in this case use 'dot' notation to identify the value to index eg: `rank.title` Geospatial indexes From 3002e79c9844973d545a1f9560b88bff0cee4b71 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 07:35:33 +0000 Subject: [PATCH 265/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6140925..edadbd7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Fix cloning of sliced querysets (#303) - Fixed update_one write concern (#302) - Updated minimum requirement for pymongo to 2.5 - Add support for new geojson fields, indexes and queries (#299) From 268dd80cd09ddcc4be8df8547d0ccc6eae8a5618 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 07:35:44 +0000 Subject: [PATCH 266/464] Added Jin Zhang to authors (#304) --- AUTHORS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 181ad5a..0ff48e8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -158,3 +158,5 @@ that much better: * Lukasz Balcerzak * Nicolas Cortot * Alex (https://github.com/kelsta) + * Jin Zhang + From 4a71c5b4249610a16752046ae9268207c3d272a9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 10:47:37 +0000 Subject: [PATCH 267/464] Updates to CONTRIBUTING.rst --- CONTRIBUTING.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 9688339..8754896 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,7 +20,7 @@ post to the `user group ` Supported Interpreters ---------------------- -PyMongo supports CPython 2.5 and newer. Language +MongoEngine supports CPython 2.6 and newer. Language features not supported by all interpreters can not be used. Please also ensure that your code is properly converted by `2to3 `_ for Python 3 support. @@ -46,7 +46,7 @@ General Guidelines - Write tests and make sure they pass (make sure you have a mongod running on the default port, then execute ``python setup.py test`` from the cmd line to run the test suite). -- Add yourself to AUTHORS.rst :) +- Add yourself to AUTHORS :) Documentation ------------- From a2c429a4a5029358ec9d38d64d53a19f532906ed Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 10:48:09 +0000 Subject: [PATCH 268/464] Queryset cursor regeneration testcase --- tests/queryset/queryset.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 13039f2..bbb28bd 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -115,6 +115,15 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User B') + # Test slice limit and skip cursor reset + qs = self.Person.objects[1:2] + # fetch then delete the cursor + qs._cursor + qs._cursor_obj = None + people = list(qs) + self.assertEqual(len(people), 1) + self.assertEqual(people[0].name, 'User B') + people = list(self.Person.objects[1:1]) self.assertEqual(len(people), 0) From f2c16452c66c064b466301fa0da1df7cd10c3770 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 10:48:30 +0000 Subject: [PATCH 269/464] Help with backwards compatibility --- mongoengine/base/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index ce119b3..e8d4b6a 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -3,3 +3,6 @@ from mongoengine.base.datastructures import * from mongoengine.base.document import * from mongoengine.base.fields import * from mongoengine.base.metaclasses import * + +# Help with backwards compatibility +from mongoengine.errors import * From 0eda7a5a3c4a82f5160d50a0e4694f96c54c300d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 2 May 2013 10:51:04 +0000 Subject: [PATCH 270/464] 0.8.0RC2 is a go --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 0a3ca24..b6adcb4 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0, 'RC1') +VERSION = (0, 8, 0, 'RC2') def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 33ea48c..62ec8f8 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.0.RC1 +Version: 0.8.0.RC2 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 3ccc495c758aaa2112405663fe9c9f6607dcc24d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 3 May 2013 12:56:53 +0000 Subject: [PATCH 271/464] Fixed register_delete_rule inheritance issue --- mongoengine/base/metaclasses.py | 49 +++++++++++++++++---------------- tests/document/class_methods.py | 21 +++++++++++++- 2 files changed, 45 insertions(+), 25 deletions(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index def8a05..444d9a2 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -140,8 +140,31 @@ class DocumentMetaclass(type): base._subclasses += (_cls,) base._types = base._subclasses # TODO depreciate _types - # Handle delete rules Document, EmbeddedDocument, DictField = cls._import_classes() + + if issubclass(new_class, Document): + new_class._collection = None + + # Add class to the _document_registry + _document_registry[new_class._class_name] = new_class + + # In Python 2, User-defined methods objects have special read-only + # attributes 'im_func' and 'im_self' which contain the function obj + # and class instance object respectively. With Python 3 these special + # attributes have been replaced by __func__ and __self__. The Blinker + # module continues to use im_func and im_self, so the code below + # copies __func__ into im_func and __self__ into im_self for + # classmethod objects in Document derived classes. + if PY3: + for key, val in new_class.__dict__.items(): + if isinstance(val, classmethod): + f = val.__get__(new_class) + if hasattr(f, '__func__') and not hasattr(f, 'im_func'): + f.__dict__.update({'im_func': getattr(f, '__func__')}) + if hasattr(f, '__self__') and not hasattr(f, 'im_self'): + f.__dict__.update({'im_self': getattr(f, '__self__')}) + + # Handle delete rules for field in new_class._fields.itervalues(): f = field f.owner_document = new_class @@ -167,33 +190,11 @@ class DocumentMetaclass(type): field.name, delete_rule) if (field.name and hasattr(Document, field.name) and - EmbeddedDocument not in new_class.mro()): + EmbeddedDocument not in new_class.mro()): msg = ("%s is a document method and not a valid " "field name" % field.name) raise InvalidDocumentError(msg) - if issubclass(new_class, Document): - new_class._collection = None - - # Add class to the _document_registry - _document_registry[new_class._class_name] = new_class - - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - if PY3: - for key, val in new_class.__dict__.items(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, '__func__') and not hasattr(f, 'im_func'): - f.__dict__.update({'im_func': getattr(f, '__func__')}) - if hasattr(f, '__self__') and not hasattr(f, 'im_self'): - f.__dict__.update({'im_self': getattr(f, '__self__')}) - return new_class def add_to_class(self, name, value): diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 231dd8f..b2c7283 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -5,7 +5,7 @@ import unittest from mongoengine import * -from mongoengine.queryset import NULLIFY +from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db __all__ = ("ClassMethodsTest", ) @@ -85,6 +85,25 @@ class ClassMethodsTest(unittest.TestCase): self.assertEqual(self.Person._meta['delete_rules'], {(Job, 'employee'): NULLIFY}) + def test_register_delete_rule_inherited(self): + + class Vaccine(Document): + name = StringField(required=True) + + meta = {"indexes": ["name"]} + + class Animal(Document): + family = StringField(required=True) + vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) + + meta = {"allow_inheritance": True, "indexes": ["family"]} + + class Cat(Animal): + name = StringField(required=True) + + self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) + self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) + def test_collection_naming(self): """Ensure that a collection with a specified name may be used. """ From ebd15616827149f7c7d1d2a710056fbc652d4da5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 3 May 2013 14:21:36 +0000 Subject: [PATCH 272/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index edadbd7..bfa809c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Fixed register_delete_rule inheritance issue - Fix cloning of sliced querysets (#303) - Fixed update_one write concern (#302) - Updated minimum requirement for pymongo to 2.5 From 2c119dea472a92e3ac9b3e5be35cc90b260ad6fe Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 10:34:13 +0000 Subject: [PATCH 273/464] Upserting is the only way to ensure docs are saved correctly (#306) --- docs/changelog.rst | 1 + mongoengine/document.py | 3 +-- tests/document/instance.py | 8 ++++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index bfa809c..205df4e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Upserting is the only way to ensure docs are saved correctly (#306) - Fixed register_delete_rule inheritance issue - Fix cloning of sliced querysets (#303) - Fixed update_one write concern (#302) diff --git a/mongoengine/document.py b/mongoengine/document.py index 0e9be56..6c1045b 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -231,7 +231,6 @@ class Document(BaseDocument): return not updated return created - upsert = self._created update_query = {} if updates: @@ -240,7 +239,7 @@ class Document(BaseDocument): update_query["$unset"] = removals if updates or removals: last_error = collection.update(select_dict, update_query, - upsert=upsert, **write_concern) + upsert=True, **write_concern) created = is_new_object(last_error) cascade = (self._meta.get('cascade', True) diff --git a/tests/document/instance.py b/tests/document/instance.py index d8df0b2..d84d65c 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -852,6 +852,14 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_inserts_if_you_set_the_pk(self): + p1 = self.Person(name='p1', id=bson.ObjectId()).save() + p2 = self.Person(name='p2') + p2.id = bson.ObjectId() + p2.save() + + self.assertEqual(2, self.Person.objects.count()) + def test_can_save_if_not_included(self): class EmbeddedDoc(EmbeddedDocument): From ddd11c7ed21d1be4392c67fa9e6ef137caecff82 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 10:57:52 +0000 Subject: [PATCH 274/464] Added offline docs links --- docs/index.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/index.rst b/docs/index.rst index 6358a31..77f965c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -55,6 +55,14 @@ See the :doc:`changelog` for a full list of changes to MongoEngine and .. note:: Always read and test the `upgrade `_ documentation before putting updates live in production **;)** +Offline Reading +--------------- + +Download the docs in `pdf `_ +or `epub `_ +formats for offline reading. + + .. toctree:: :maxdepth: 1 :numbered: From 52c162a478ad4796d9a9621a7a1eb68529d992d7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 11:01:23 +0000 Subject: [PATCH 275/464] Pep8 --- mongoengine/fields.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 274ad3c..de9b44f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -107,11 +107,11 @@ class URLField(StringField): """ _URL_REGEX = re.compile( - r'^(?:http|ftp)s?://' # http:// or https:// - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... - r'localhost|' #localhost... - r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip - r'(?::\d+)?' # optional port + r'^(?:http|ftp)s?://' # http:// or https:// + r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... + r'localhost|' # localhost... + r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip + r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) def __init__(self, verify_exists=False, url_regex=None, **kwargs): @@ -128,8 +128,7 @@ class URLField(StringField): warnings.warn( "The URLField verify_exists argument has intractable security " "and performance issues. Accordingly, it has been deprecated.", - DeprecationWarning - ) + DeprecationWarning) try: request = urllib2.Request(value) urllib2.urlopen(request) @@ -469,7 +468,7 @@ class ComplexDateTimeField(StringField): def __get__(self, instance, owner): data = super(ComplexDateTimeField, self).__get__(instance, owner) - if data == None: + if data is None: return datetime.datetime.now() if isinstance(data, datetime.datetime): return data @@ -658,15 +657,15 @@ class ListField(ComplexBaseField): """Make sure that a list of valid fields is being used. """ if (not isinstance(value, (list, tuple, QuerySet)) or - isinstance(value, basestring)): + isinstance(value, basestring)): self.error('Only lists and tuples may be used in a list field') super(ListField, self).validate(value) def prepare_query_value(self, op, value): if self.field: if op in ('set', 'unset') and (not isinstance(value, basestring) - and not isinstance(value, BaseDocument) - and hasattr(value, '__iter__')): + and not isinstance(value, BaseDocument) + and hasattr(value, '__iter__')): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) return super(ListField, self).prepare_query_value(op, value) @@ -701,7 +700,7 @@ class SortedListField(ListField): value = super(SortedListField, self).to_mongo(value) if self._ordering is not None: return sorted(value, key=itemgetter(self._ordering), - reverse=self._order_reverse) + reverse=self._order_reverse) return sorted(value, reverse=self._order_reverse) @@ -1001,7 +1000,7 @@ class BinaryField(BaseField): if not isinstance(value, (bin_type, txt_type, Binary)): self.error("BinaryField only accepts instances of " "(%s, %s, Binary)" % ( - bin_type.__name__, txt_type.__name__)) + bin_type.__name__, txt_type.__name__)) if self.max_bytes is not None and len(value) > self.max_bytes: self.error('Binary value is too long') @@ -1235,8 +1234,6 @@ class ImageGridFsProxy(GridFSProxy): Insert a image in database applying field properties (size, thumbnail_size) """ - if not self.instance: - import ipdb; ipdb.set_trace(); field = self.instance._fields[self.key] try: @@ -1308,6 +1305,7 @@ class ImageGridFsProxy(GridFSProxy): height=h, format=format, **kwargs) + @property def size(self): """ From 870ff1d4d986077f9e306b7a9098d2b339b4c246 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 11:11:55 +0000 Subject: [PATCH 276/464] Added $setOnInsert support for upserts (#308) Upserts now possible with just query parameters (#309) --- docs/changelog.rst | 2 ++ mongoengine/queryset/queryset.py | 2 +- mongoengine/queryset/transform.py | 7 +++++-- tests/queryset/queryset.py | 22 ++++++++++++++++++++-- 4 files changed, 28 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 205df4e..ad5f615 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,8 @@ Changelog Changes in 0.8.0 ================ +- Added $setOnInsert support for upserts (#308) +- Upserts now possible with just query parameters (#309) - Upserting is the only way to ensure docs are saved correctly (#306) - Fixed register_delete_rule inheritance issue - Fix cloning of sliced querysets (#303) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index c1c9378..85b683d 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -427,7 +427,7 @@ class QuerySet(object): .. versionadded:: 0.2 """ - if not update: + if not update and not upsert: raise OperationError("No update parameters, would remove data") if not write_concern: diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 96d9904..4062fc1 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -24,7 +24,8 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS) UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push', - 'push_all', 'pull', 'pull_all', 'add_to_set') + 'push_all', 'pull', 'pull_all', 'add_to_set', + 'set_on_insert') def query(_doc_cls=None, _field_operation=False, **query): @@ -163,7 +164,9 @@ def update(_doc_cls=None, **update): if value > 0: value = -value elif op == 'add_to_set': - op = op.replace('_to_set', 'ToSet') + op = 'addToSet' + elif op == 'set_on_insert': + op = "setOnInsert" match = None if parts[-1] in COMPARISON_OPERATORS: diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index bbb28bd..ffb5378 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -296,10 +296,10 @@ class QuerySetTest(unittest.TestCase): author.save(write_concern=write_concern) result = self.Person.objects.update( - set__name='Ross',write_concern={"w": 1}) + set__name='Ross', write_concern={"w": 1}) self.assertEqual(result, 1) result = self.Person.objects.update( - set__name='Ross',write_concern={"w": 0}) + set__name='Ross', write_concern={"w": 0}) self.assertEqual(result, None) result = self.Person.objects.update_one( @@ -536,6 +536,24 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(club.members['John']['gender'], "F") self.assertEqual(club.members['John']['age'], 14) + def test_upsert(self): + self.Person.drop_collection() + + self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True) + + bob = self.Person.objects.first() + self.assertEqual("Bob", bob.name) + self.assertEqual(30, bob.age) + + def test_set_on_insert(self): + self.Person.drop_collection() + + self.Person.objects(pk=ObjectId()).update(set__name='Bob', set_on_insert__age=30, upsert=True) + + bob = self.Person.objects.first() + self.assertEqual("Bob", bob.name) + self.assertEqual(30, bob.age) + def test_get_or_create(self): """Ensure that ``get_or_create`` returns one result or creates a new document. From 7cde97973696eb28e513b522e227d65786378739 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 11:39:16 +0000 Subject: [PATCH 277/464] Updated fields --- mongoengine/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index de9b44f..4995998 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -879,7 +879,7 @@ class ReferenceField(BaseField): """Convert a MongoDB-compatible type to a Python type. """ if (not self.dbref and - not isinstance(value, (DBRef, Document, EmbeddedDocument))): + not isinstance(value, (DBRef, Document, EmbeddedDocument))): collection = self.document_type._get_collection_name() value = DBRef(collection, self.document_type.id.to_python(value)) return value From 9dfee83e687a9aef625fbc38bf5dd10b16e463dc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 11:54:47 +0000 Subject: [PATCH 278/464] Fixed querying string versions of ObjectIds issue with ReferenceField (#307) --- docs/changelog.rst | 1 + mongoengine/fields.py | 2 +- mongoengine/queryset/queryset.py | 5 ++-- tests/queryset/queryset.py | 45 +++++++++++++++++++++++++++++++- 4 files changed, 49 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ad5f615..842bc7d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Fixed querying string versions of ObjectIds issue with ReferenceField (#307) - Added $setOnInsert support for upserts (#308) - Upserts now possible with just query parameters (#309) - Upserting is the only way to ensure docs are saved correctly (#306) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 4995998..573d9a0 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -854,7 +854,7 @@ class ReferenceField(BaseField): return document.id return document elif not self.dbref and isinstance(document, basestring): - return document + return ObjectId(document) id_field_name = self.document_type._meta['id_field'] id_field = self.document_type._fields[id_field_name] diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 85b683d..191afdd 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -544,8 +544,9 @@ class QuerySet(object): return c def select_related(self, max_depth=1): - """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to - a maximum depth in order to cut down the number queries to mongodb. + """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or + :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down + the number queries to mongodb. .. versionadded:: 0.5 """ diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index ffb5378..b9db297 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1263,7 +1263,7 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): content = StringField() authors = ListField(ReferenceField(self.Person, - reverse_delete_rule=PULL)) + reverse_delete_rule=PULL)) BlogPost.drop_collection() self.Person.drop_collection() @@ -1321,6 +1321,49 @@ class QuerySetTest(unittest.TestCase): self.Person.objects()[:1].delete() self.assertEqual(1, BlogPost.objects.count()) + + def test_reference_field_find(self): + """Ensure cascading deletion of referring documents from the database. + """ + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person) + + BlogPost.drop_collection() + self.Person.drop_collection() + + me = self.Person(name='Test User').save() + BlogPost(content="test 123", author=me).save() + + self.assertEqual(1, BlogPost.objects(author=me).count()) + self.assertEqual(1, BlogPost.objects(author=me.pk).count()) + self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + + self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) + self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) + self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + + def test_reference_field_find_dbref(self): + """Ensure cascading deletion of referring documents from the database. + """ + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, dbref=True) + + BlogPost.drop_collection() + self.Person.drop_collection() + + me = self.Person(name='Test User').save() + BlogPost(content="test 123", author=me).save() + + self.assertEqual(1, BlogPost.objects(author=me).count()) + self.assertEqual(1, BlogPost.objects(author=me.pk).count()) + self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + + self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) + self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) + self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + def test_update(self): """Ensure that atomic updates work properly. """ From 9e513e08aeafec19399677e9bd813fedd3d596ed Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 7 May 2013 11:55:56 +0000 Subject: [PATCH 279/464] Updated RC version --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index b6adcb4..3a4d7c9 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0, 'RC2') +VERSION = (0, 8, 0, 'RC3') def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 62ec8f8..68cb72c 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.0.RC2 +Version: 0.8.0.RC3 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 96a964a18332b13e5ba4ecd11ee246cf80a8a8da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Thu, 9 May 2013 13:18:58 -0300 Subject: [PATCH 280/464] added .disable_inheritance method for the simple fetch exclusives classes --- mongoengine/queryset/queryset.py | 9 +++++++++ tests/queryset/queryset.py | 20 ++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 191afdd..407bf2f 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -520,6 +520,15 @@ class QuerySet(object): queryset._none = True return queryset + def disable_inheritance(self): + """ + Disable inheritance query, fetch only objects for the query class + """ + if self._document._meta.get('allow_inheritance') is True: + self._initial_query = {"_cls": self._document._class_name} + + return self + def clone(self): """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index b9db297..49ed36c 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3272,5 +3272,25 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(outer_count, 7) # outer loop should be executed seven times total self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total + def test_disable_inheritance_queryset(self): + class A(Document): + x = IntField() + y = IntField() + + meta = {'allow_inheritance': True} + + class B(A): + z = IntField() + + A.drop_collection() + + A(x=10, y=20).save() + A(x=15, y=30).save() + B(x=20, y=40).save() + B(x=30, y=50).save() + + for obj in A.objects.disable_inheritance(): + self.assertEqual(obj.__class__, A) + if __name__ == '__main__': unittest.main() From 9251ce312bc0545d3b86224e35a913029a86695e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 10 May 2013 13:57:32 +0000 Subject: [PATCH 281/464] Querysets now utilises a local cache Changed __len__ behavour in the queryset (#247, #311) --- docs/changelog.rst | 3 +- docs/upgrade.rst | 13 ++-- mongoengine/queryset/queryset.py | 111 ++++++++++++++++++++++--------- setup.py | 4 +- tests/queryset/queryset.py | 82 ++++++++++++++++++----- tests/test_jinja.py | 47 +++++++++++++ 6 files changed, 204 insertions(+), 56 deletions(-) create mode 100644 tests/test_jinja.py diff --git a/docs/changelog.rst b/docs/changelog.rst index 842bc7d..3b6813a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,8 @@ Changelog Changes in 0.8.0 ================ +- Querysets now utilises a local cache +- Changed __len__ behavour in the queryset (#247, #311) - Fixed querying string versions of ObjectIds issue with ReferenceField (#307) - Added $setOnInsert support for upserts (#308) - Upserts now possible with just query parameters (#309) @@ -25,7 +27,6 @@ Changes in 0.8.0 - Added SequenceField.set_next_value(value) helper (#159) - Updated .only() behaviour - now like exclude it is chainable (#202) - Added with_limit_and_skip support to count() (#235) -- Removed __len__ from queryset (#247) - Objects queryset manager now inherited (#256) - Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index c633c28..fe9e4fa 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -235,12 +235,15 @@ update your code like so: :: mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals [m for m in mammals] # This will return all carnivores -No more len ------------ +Len iterates the queryset +-------------------------- -If you ever did len(queryset) it previously did a count() under the covers, this -caused some unusual issues - so now it has been removed in favour of the -explicit `queryset.count()` to update:: +If you ever did `len(queryset)` it previously did a `count()` under the covers, +this caused some unusual issues. As `len(queryset)` is most often used by +`list(queryset)` we now cache the queryset results and use that for the length. + +This isn't as performant as a `count()` and if you aren't iterating the +queryset you should upgrade to use count:: # Old code len(Animal.objects(type="mammal")) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 191afdd..2d63183 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -26,6 +26,7 @@ __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 +ITER_CHUNK_SIZE = 100 # Delete rules DO_NOTHING = 0 @@ -63,6 +64,9 @@ class QuerySet(object): self._none = False self._as_pymongo = False self._as_pymongo_coerce = False + self._result_cache = [] + self._has_more = True + self._len = None # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -109,13 +113,60 @@ class QuerySet(object): queryset._class_check = class_check return queryset + def __len__(self): + """Since __len__ is called quite frequently (for example, as part of + list(qs) we populate the result cache and cache the length. + """ + if self._len is not None: + return self._len + if self._has_more: + # populate the cache + list(self._iter_results()) + + self._len = len(self._result_cache) + return self._len + def __iter__(self): - """Support iterator protocol""" - queryset = self - if queryset._iter: - queryset = self.clone() - queryset.rewind() - return queryset + """Iteration utilises a results cache which iterates the cursor + in batches of ``ITER_CHUNK_SIZE``. + + If ``self._has_more`` the cursor hasn't been exhausted so cache then + batch. Otherwise iterate the result_cache. + """ + self._iter = True + if self._has_more: + return self._iter_results() + + # iterating over the cache. + return iter(self._result_cache) + + def _iter_results(self): + """A generator for iterating over the result cache. + + Also populates the cache if there are more possible results to yield. + Raises StopIteration when there are no more results""" + pos = 0 + while True: + upper = len(self._result_cache) + while pos < upper: + yield self._result_cache[pos] + pos = pos + 1 + if not self._has_more: + raise StopIteration + if len(self._result_cache) <= pos: + self._populate_cache() + + def _populate_cache(self): + """ + Populates the result cache with ``ITER_CHUNK_SIZE`` more entries + (until the cursor is exhausted). + """ + if self._has_more: + try: + for i in xrange(ITER_CHUNK_SIZE): + self._result_cache.append(self.next()) + except StopIteration: + self._has_more = False def __getitem__(self, key): """Support skip and limit using getitem and slicing syntax. @@ -157,22 +208,15 @@ class QuerySet(object): def __repr__(self): """Provides the string representation of the QuerySet - - .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ + if self._iter: return '.. queryset mid-iteration ..' - data = [] - for i in xrange(REPR_OUTPUT_SIZE + 1): - try: - data.append(self.next()) - except StopIteration: - break + self._populate_cache() + data = self._result_cache[:REPR_OUTPUT_SIZE + 1] if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." - - self.rewind() return repr(data) # Core functions @@ -201,7 +245,7 @@ class QuerySet(object): result = queryset.next() except StopIteration: msg = ("%s matching query does not exist." - % queryset._document._class_name) + % queryset._document._class_name) raise queryset._document.DoesNotExist(msg) try: queryset.next() @@ -352,7 +396,12 @@ class QuerySet(object): """ if self._limit == 0: return 0 - return self._cursor.count(with_limit_and_skip=with_limit_and_skip) + if with_limit_and_skip and self._len is not None: + return self._len + count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + if with_limit_and_skip: + self._len = count + return count def delete(self, write_concern=None): """Delete the documents matched by the query. @@ -910,7 +959,7 @@ class QuerySet(object): mr_args['out'] = output results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) + map_f, reduce_f, **mr_args) if map_reduce_function == 'map_reduce': results = results.find() @@ -1084,20 +1133,18 @@ class QuerySet(object): def next(self): """Wrap the result in a :class:`~mongoengine.Document` object. """ - self._iter = True - try: - if self._limit == 0 or self._none: - raise StopIteration - if self._scalar: - return self._get_scalar(self._document._from_son( - self._cursor.next())) - if self._as_pymongo: - return self._get_as_pymongo(self._cursor.next()) + if self._limit == 0 or self._none: + raise StopIteration - return self._document._from_son(self._cursor.next()) - except StopIteration, e: - self.rewind() - raise e + raw_doc = self._cursor.next() + if self._as_pymongo: + return self._get_as_pymongo(raw_doc) + + doc = self._document._from_son(raw_doc) + if self._scalar: + return self._get_scalar(doc) + + return doc def rewind(self): """Rewind the cursor to its unevaluated state. diff --git a/setup.py b/setup.py index 10a6dbc..594f7f8 100644 --- a/setup.py +++ b/setup.py @@ -51,13 +51,13 @@ CLASSIFIERS = [ extra_opts = {} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2'] extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index b9db297..b9c1396 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -793,7 +793,7 @@ class QuerySetTest(unittest.TestCase): p = p.snapshot(True).slave_okay(True).timeout(True) self.assertEqual(p._cursor_args, - {'snapshot': True, 'slave_okay': True, 'timeout': True}) + {'snapshot': True, 'slave_okay': True, 'timeout': True}) def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. @@ -835,6 +835,7 @@ class QuerySetTest(unittest.TestCase): self.assertTrue("Doc: 0" in docs_string) self.assertEqual(docs.count(), 1000) + self.assertTrue('(remaining elements truncated)' in "%s" % docs) # Limit and skip docs = docs[1:4] @@ -3231,6 +3232,51 @@ class QuerySetTest(unittest.TestCase): Organization)) self.assertTrue(isinstance(qs.first().organization, Organization)) + def test_cached_queryset(self): + class Person(Document): + name = StringField() + + Person.drop_collection() + for i in xrange(100): + Person(name="No: %s" % i).save() + + with query_counter() as q: + self.assertEqual(q, 0) + people = Person.objects + + [x for x in people] + self.assertEqual(100, len(people._result_cache)) + self.assertEqual(None, people._len) + self.assertEqual(q, 1) + + list(people) + self.assertEqual(100, people._len) # Caused by list calling len + self.assertEqual(q, 1) + + people.count() # count is cached + self.assertEqual(q, 1) + + def test_cache_not_cloned(self): + + class User(Document): + name = StringField() + + def __unicode__(self): + return self.name + + User.drop_collection() + + User(name="Alice").save() + User(name="Bob").save() + + users = User.objects.all().order_by('name') + self.assertEqual("%s" % users, "[, ]") + self.assertEqual(2, len(users._result_cache)) + + users = users.filter(name="Bob") + self.assertEqual("%s" % users, "[]") + self.assertEqual(1, len(users._result_cache)) + def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] @@ -3247,30 +3293,34 @@ class QuerySetTest(unittest.TestCase): User(name=name).save() users = User.objects.all().order_by('name') - outer_count = 0 inner_count = 0 inner_total_count = 0 - self.assertEqual(users.count(), 7) + with query_counter() as q: + self.assertEqual(q, 0) - for i, outer_user in enumerate(users): - self.assertEqual(outer_user.name, names[i]) - outer_count += 1 - inner_count = 0 - - # Calling len might disrupt the inner loop if there are bugs self.assertEqual(users.count(), 7) - for j, inner_user in enumerate(users): - self.assertEqual(inner_user.name, names[j]) - inner_count += 1 - inner_total_count += 1 + for i, outer_user in enumerate(users): + self.assertEqual(outer_user.name, names[i]) + outer_count += 1 + inner_count = 0 - self.assertEqual(inner_count, 7) # inner loop should always be executed seven times + # Calling len might disrupt the inner loop if there are bugs + self.assertEqual(users.count(), 7) - self.assertEqual(outer_count, 7) # outer loop should be executed seven times total - self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total + for j, inner_user in enumerate(users): + self.assertEqual(inner_user.name, names[j]) + inner_count += 1 + inner_total_count += 1 + + self.assertEqual(inner_count, 7) # inner loop should always be executed seven times + + self.assertEqual(outer_count, 7) # outer loop should be executed seven times total + self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total + + self.assertEqual(q, 2) if __name__ == '__main__': unittest.main() diff --git a/tests/test_jinja.py b/tests/test_jinja.py new file mode 100644 index 0000000..0449f86 --- /dev/null +++ b/tests/test_jinja.py @@ -0,0 +1,47 @@ +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import * + +import jinja2 + + +class TemplateFilterTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + def test_jinja2(self): + env = jinja2.Environment() + + class TestData(Document): + title = StringField() + description = StringField() + + TestData.drop_collection() + + examples = [('A', '1'), + ('B', '2'), + ('C', '3')] + + for title, description in examples: + TestData(title=title, description=description).save() + + tmpl = """ +{%- for record in content -%} + {%- if loop.first -%}{ {%- endif -%} + "{{ record.title }}": "{{ record.description }}" + {%- if loop.last -%} }{%- else -%},{% endif -%} +{%- endfor -%} +""" + ctx = {'content': TestData.objects} + template = env.from_string(tmpl) + rendered = template.render(**ctx) + + self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered) + + +if __name__ == '__main__': + unittest.main() From 5b498bd8d6f161e81605a686fe927307b2e28078 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 10 May 2013 15:05:16 +0000 Subject: [PATCH 282/464] Added no_sub_classes context manager and queryset helper (#312) --- docs/changelog.rst | 1 + mongoengine/context_managers.py | 36 ++++++++++++++++++++-- mongoengine/queryset/queryset.py | 4 +-- tests/queryset/queryset.py | 23 ++++++++++++-- tests/test_context_managers.py | 51 +++++++++++++++++++++++++++++++- 5 files changed, 108 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3b6813a..c3e50e4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Added no_sub_classes context manager and queryset helper (#312) - Querysets now utilises a local cache - Changed __len__ behavour in the queryset (#247, #311) - Fixed querying string versions of ObjectIds issue with ReferenceField (#307) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 76d5fbf..1280e11 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,8 +1,10 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db -from mongoengine.queryset import OperationError, QuerySet +from mongoengine.queryset import QuerySet -__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter") + +__all__ = ("switch_db", "switch_collection", "no_dereference", + "no_sub_classes", "query_counter") class switch_db(object): @@ -130,6 +132,36 @@ class no_dereference(object): return self.cls +class no_sub_classes(object): + """ no_sub_classes context manager. + + Only returns instances of this class and no sub (inherited) classes:: + + with no_sub_classes(Group) as Group: + Group.objects.find() + + """ + + def __init__(self, cls): + """ Construct the no_sub_classes context manager. + + :param cls: the class to turn querying sub classes on + """ + self.cls = cls + + def __enter__(self): + """ change the objects default and _auto_dereference values""" + self.cls._all_subclasses = self.cls._subclasses + self.cls._subclasses = (self.cls,) + return self.cls + + def __exit__(self, t, value, traceback): + """ Reset the default and _auto_dereference values""" + self.cls._subclasses = self.cls._all_subclasses + delattr(self.cls, '_all_subclasses') + return self.cls + + class QuerySetNoDeRef(QuerySet): """Special no_dereference QuerySet""" def __dereference(items, max_depth=1, instance=None, name=None): diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4cf86d1..5da6295 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -569,9 +569,9 @@ class QuerySet(object): queryset._none = True return queryset - def disable_inheritance(self): + def no_sub_classes(self): """ - Disable inheritance query, fetch only objects for the query class + Only return instances of this document and not any inherited documents """ if self._document._meta.get('allow_inheritance') is True: self._initial_query = {"_cls": self._document._class_name} diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 27a418d..bf23761 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3322,7 +3322,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 2) - def test_disable_inheritance_queryset(self): + def test_no_sub_classes(self): class A(Document): x = IntField() y = IntField() @@ -3332,15 +3332,34 @@ class QuerySetTest(unittest.TestCase): class B(A): z = IntField() + class C(B): + zz = IntField() + A.drop_collection() A(x=10, y=20).save() A(x=15, y=30).save() B(x=20, y=40).save() B(x=30, y=50).save() + C(x=40, y=60).save() - for obj in A.objects.disable_inheritance(): + self.assertEqual(A.objects.no_sub_classes().count(), 2) + self.assertEqual(A.objects.count(), 5) + + self.assertEqual(B.objects.no_sub_classes().count(), 2) + self.assertEqual(B.objects.count(), 3) + + self.assertEqual(C.objects.no_sub_classes().count(), 1) + self.assertEqual(C.objects.count(), 1) + + for obj in A.objects.no_sub_classes(): self.assertEqual(obj.__class__, A) + for obj in B.objects.no_sub_classes(): + self.assertEqual(obj.__class__, B) + + for obj in C.objects.no_sub_classes(): + self.assertEqual(obj.__class__, C) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index f87d638..c201a5f 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -5,7 +5,8 @@ import unittest from mongoengine import * from mongoengine.connection import get_db from mongoengine.context_managers import (switch_db, switch_collection, - no_dereference, query_counter) + no_sub_classes, no_dereference, + query_counter) class ContextManagersTest(unittest.TestCase): @@ -138,6 +139,54 @@ class ContextManagersTest(unittest.TestCase): self.assertTrue(isinstance(group.ref, User)) self.assertTrue(isinstance(group.generic, User)) + def test_no_sub_classes(self): + class A(Document): + x = IntField() + y = IntField() + + meta = {'allow_inheritance': True} + + class B(A): + z = IntField() + + class C(B): + zz = IntField() + + A.drop_collection() + + A(x=10, y=20).save() + A(x=15, y=30).save() + B(x=20, y=40).save() + B(x=30, y=50).save() + C(x=40, y=60).save() + + self.assertEqual(A.objects.count(), 5) + self.assertEqual(B.objects.count(), 3) + self.assertEqual(C.objects.count(), 1) + + with no_sub_classes(A) as A: + self.assertEqual(A.objects.count(), 2) + + for obj in A.objects: + self.assertEqual(obj.__class__, A) + + with no_sub_classes(B) as B: + self.assertEqual(B.objects.count(), 2) + + for obj in B.objects: + self.assertEqual(obj.__class__, B) + + with no_sub_classes(C) as C: + self.assertEqual(C.objects.count(), 1) + + for obj in C.objects: + self.assertEqual(obj.__class__, C) + + # Confirm context manager exit correctly + self.assertEqual(A.objects.count(), 5) + self.assertEqual(B.objects.count(), 3) + self.assertEqual(C.objects.count(), 1) + def test_query_counter(self): connect('mongoenginetest') db = get_db() From f8350409ad57c95c1b89c47c0c331b58bee26be6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 10 May 2013 15:08:01 +0000 Subject: [PATCH 283/464] assertEquals is bad --- tests/document/instance.py | 20 ++++++++++---------- tests/queryset/queryset.py | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index d84d65c..dcb0de3 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -319,8 +319,8 @@ class InstanceTest(unittest.TestCase): Location.drop_collection() - self.assertEquals(Area, get_document("Area")) - self.assertEquals(Area, get_document("Location.Area")) + self.assertEqual(Area, get_document("Area")) + self.assertEqual(Area, get_document("Location.Area")) def test_creation(self): """Ensure that document may be created using keyword arguments. @@ -508,12 +508,12 @@ class InstanceTest(unittest.TestCase): t = TestDocument(status="published") t.save(clean=False) - self.assertEquals(t.pub_date, None) + self.assertEqual(t.pub_date, None) t = TestDocument(status="published") t.save(clean=True) - self.assertEquals(type(t.pub_date), datetime) + self.assertEqual(type(t.pub_date), datetime) def test_document_embedded_clean(self): class TestEmbeddedDocument(EmbeddedDocument): @@ -545,7 +545,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() - self.assertEquals(t.doc.z, 35) + self.assertEqual(t.doc.z, 35) # Asserts not raises t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5)) @@ -1903,11 +1903,11 @@ class InstanceTest(unittest.TestCase): A.objects.all() - self.assertEquals('testdb-2', B._meta.get('db_alias')) - self.assertEquals('mongoenginetest', - A._get_collection().database.name) - self.assertEquals('mongoenginetest2', - B._get_collection().database.name) + self.assertEqual('testdb-2', B._meta.get('db_alias')) + self.assertEqual('mongoenginetest', + A._get_collection().database.name) + self.assertEqual('mongoenginetest2', + B._get_collection().database.name) def test_db_alias_propagates(self): """db_alias propagates? diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index bf23761..9e1fda2 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -282,7 +282,7 @@ class QuerySetTest(unittest.TestCase): a_objects = A.objects(s='test1') query = B.objects(ref__in=a_objects) query = query.filter(boolfield=True) - self.assertEquals(query.count(), 1) + self.assertEqual(query.count(), 1) def test_update_write_concern(self): """Test that passing write_concern works""" From b16eabd2b6350fdc5a05036034d7d0175c33d6a7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 10 May 2013 15:09:08 +0000 Subject: [PATCH 284/464] Updated version --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 3a4d7c9..0f8913a 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0, 'RC3') +VERSION = (0, 8, 0, 'RC4') def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 68cb72c..be9c67b 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.0.RC3 +Version: 0.8.0.RC4 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 0efb90deb6daf1f47a324be2b295a59600226d02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9o=20S?= Date: Mon, 13 May 2013 13:14:15 +0200 Subject: [PATCH 285/464] Added a failing test when using pickle with signal hooks --- tests/document/instance.py | 8 +++++++- tests/fixtures.py | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index dcb0de3..d972ae5 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -9,7 +9,7 @@ import unittest import uuid from datetime import datetime -from tests.fixtures import PickleEmbedded, PickleTest +from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest from mongoengine import * from mongoengine.errors import (NotRegistered, InvalidDocumentError, @@ -1730,6 +1730,12 @@ class InstanceTest(unittest.TestCase): self.assertEqual(pickle_doc.string, "Two") self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) + def test_picklable_on_signals(self): + pickle_doc = PickleSignalsTest(number=1, string="One", lists=['1', '2']) + pickle_doc.embedded = PickleEmbedded() + pickle_doc.save() + pickle_doc.delete() + def test_throw_invalid_document_error(self): # test handles people trying to upsert diff --git a/tests/fixtures.py b/tests/fixtures.py index fd9062e..a35f144 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,6 +1,8 @@ +import pickle from datetime import datetime from mongoengine import * +from mongoengine import signals class PickleEmbedded(EmbeddedDocument): @@ -15,6 +17,24 @@ class PickleTest(Document): photo = FileField() +class PickleSignalsTest(Document): + number = IntField() + string = StringField(choices=(('One', '1'), ('Two', '2'))) + embedded = EmbeddedDocumentField(PickleEmbedded) + lists = ListField(StringField()) + + @classmethod + def post_save(self, sender, document, created, **kwargs): + pickled = pickle.dumps(document) + + @classmethod + def post_delete(self, sender, document, **kwargs): + pickled = pickle.dumps(document) + +signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) +signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) + + class Mixin(object): name = StringField() From f6d0b53ae57e37cc2dab7782b7077df1b9536b35 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 13 May 2013 21:42:20 -0700 Subject: [PATCH 286/464] test reference to a custom pk doc --- tests/queryset/queryset.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 9e1fda2..01c53d0 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3361,5 +3361,25 @@ class QuerySetTest(unittest.TestCase): for obj in C.objects.no_sub_classes(): self.assertEqual(obj.__class__, C) + def test_query_reference_to_custom_pk_doc(self): + + class A(Document): + id = StringField(unique=True, primary_key=True) + + class B(Document): + a = ReferenceField(A) + + A.drop_collection() + B.drop_collection() + + a = A.objects.create(id='custom_id') + + b = B.objects.create(a=a) + + self.assertEqual(B.objects.count(), 1) + self.assertEqual(B.objects.get(a=a).a, a) + self.assertEqual(B.objects.get(a=a.id).a, a) + + if __name__ == '__main__': unittest.main() From 731d8fc6bed91bb12598c70d29205985f2b9f7fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Thu, 16 May 2013 12:50:34 -0300 Subject: [PATCH 287/464] added get_next_value to SequenceField --- mongoengine/fields.py | 11 +++++++++++ tests/fields/fields.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 2e14933..b2f5488 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1465,6 +1465,17 @@ class SequenceField(BaseField): upsert=True) return self.value_decorator(counter['next']) + def get_next_value(self): + sequence_name = self.get_sequence_name() + sequence_id = "%s.%s" % (sequence_name, self.name) + collection = get_db(alias=self.db_alias)[self.collection_name] + data = collection.find_one({"_id": sequence_id}) + + if data: + return data['next'] + + return 1 + def get_sequence_name(self): if self.sequence_name: return self.sequence_name diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 4fa6989..444b71a 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1917,6 +1917,23 @@ class FieldTest(unittest.TestCase): c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 1000) + + def test_sequence_field_get_next_value(self): + class Person(Document): + id = SequenceField(primary_key=True) + name = StringField() + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + + for x in xrange(10): + Person(name="Person %s" % x).save() + + self.assertEqual(Person.id.get_next_value(), 10) + self.db['mongoengine.counters'].drop() + + self.assertEqual(Person.id.get_next_value(), 1) + def test_sequence_field_sequence_name(self): class Person(Document): id = SequenceField(primary_key=True, sequence_name='jelly') From 0b1e11ba1fd351fd78d33f03090b5f20e21f5085 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Thu, 16 May 2013 12:55:16 -0300 Subject: [PATCH 288/464] added my github profile --- AUTHORS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 0ff48e8..fbe697a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -25,7 +25,7 @@ that much better: * flosch * Deepak Thukral * Colin Howe - * Wilson Júnior + * Wilson Júnior (https://github.com/wpjunior) * Alistair Roche * Dan Crosta * Viktor Kerkez From f7e22d2b8bc8acb2e4ab6d33c8814d8f6d49d63c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Thu, 16 May 2013 13:05:07 -0300 Subject: [PATCH 289/464] fixes for get_next_value --- mongoengine/fields.py | 2 +- tests/fields/fields.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 0b6486a..a56bad8 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1449,7 +1449,7 @@ class SequenceField(BaseField): data = collection.find_one({"_id": sequence_id}) if data: - return data['next'] + return data['next']+1 return 1 diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 527baa9..a9fed3c 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1910,7 +1910,7 @@ class FieldTest(unittest.TestCase): for x in xrange(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), 10) + self.assertEqual(Person.id.get_next_value(), 11) self.db['mongoengine.counters'].drop() self.assertEqual(Person.id.get_next_value(), 1) From bc92f78afb3694f1eb78104f6dc09902516541b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Thu, 16 May 2013 13:12:49 -0300 Subject: [PATCH 290/464] fixes for value_decorator --- mongoengine/fields.py | 4 ++-- tests/fields/fields.py | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index a56bad8..b192961 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1449,9 +1449,9 @@ class SequenceField(BaseField): data = collection.find_one({"_id": sequence_id}) if data: - return data['next']+1 + return self.value_decorator(data['next']+1) - return 1 + return self.value_decorator(1) def get_sequence_name(self): if self.sequence_name: diff --git a/tests/fields/fields.py b/tests/fields/fields.py index a9fed3c..e803af8 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1914,6 +1914,21 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() self.assertEqual(Person.id.get_next_value(), 1) + + class Person(Document): + id = SequenceField(primary_key=True, value_decorator=str) + name = StringField() + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + + for x in xrange(10): + Person(name="Person %s" % x).save() + + self.assertEqual(Person.id.get_next_value(), '11') + self.db['mongoengine.counters'].drop() + + self.assertEqual(Person.id.get_next_value(), '1') def test_sequence_field_sequence_name(self): class Person(Document): From 36a3770673b34e912b894043f4c3d7ce8771c594 Mon Sep 17 00:00:00 2001 From: Daniel Axtens Date: Mon, 20 May 2013 15:49:01 +1000 Subject: [PATCH 291/464] If you need to read from another database, use switch_db not switch_collection. --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 6c1045b..89627dc 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -388,7 +388,7 @@ class Document(BaseDocument): user.save() If you need to read from another database see - :class:`~mongoengine.context_managers.switch_collection` + :class:`~mongoengine.context_managers.switch_db` :param collection_name: The database alias to use for saving the document From 89f1c21f20bdbe5ab635f67b3f9f41c19108b54d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 08:08:52 +0000 Subject: [PATCH 292/464] Updated AUTHORS (#325) --- AUTHORS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 0ff48e8..b3756e8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -159,4 +159,4 @@ that much better: * Nicolas Cortot * Alex (https://github.com/kelsta) * Jin Zhang - + * Daniel Axtens From 8165131419641205b4cba45110df1849ccb3009d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 08:12:09 +0000 Subject: [PATCH 293/464] Doc updated --- mongoengine/fields.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b192961..a2ba202 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1443,6 +1443,11 @@ class SequenceField(BaseField): return self.value_decorator(counter['next']) def get_next_value(self): + """Helper method to get the next value for previewing. + + .. warning:: There is no guarantee this will be the next value + as it is only fixed on set. + """ sequence_name = self.get_sequence_name() sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] From 367f49ce1c6831d202b2ef511ce03f131456490e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 08:12:50 +0000 Subject: [PATCH 294/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index c3e50e4..07145b2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Added `get_next_value` preview for SequenceFields (#319) - Added no_sub_classes context manager and queryset helper (#312) - Querysets now utilises a local cache - Changed __len__ behavour in the queryset (#247, #311) From 6299015039895cadf518fcee7941267161f9ef8f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 10:04:17 +0000 Subject: [PATCH 295/464] Updated pickling (#316) --- mongoengine/base/document.py | 18 +++++++++--------- tests/fixtures.py | 4 ++-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index c2ccc48..e3202b9 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -141,16 +141,16 @@ class BaseDocument(object): super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): - removals = ("get_%s_display" % k - for k, v in self._fields.items() if v.choices) - for k in removals: - if hasattr(self, k): - delattr(self, k) - return self.__dict__ + data = {} + for k in ('_changed_fields', '_initialised', '_created'): + data[k] = getattr(self, k) + data['_data'] = self.to_mongo() + return data - def __setstate__(self, __dict__): - self.__dict__ = __dict__ - self.__set_field_display() + def __setstate__(self, data): + for k in ('_changed_fields', '_initialised', '_created'): + setattr(self, k, data[k]) + self._data = self.__class__._from_son(data["_data"])._data def __iter__(self): if 'id' in self._fields and 'id' not in self._fields_ordered: diff --git a/tests/fixtures.py b/tests/fixtures.py index a35f144..e207044 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -25,11 +25,11 @@ class PickleSignalsTest(Document): @classmethod def post_save(self, sender, document, created, **kwargs): - pickled = pickle.dumps(document) + pickled = pickle.dumps(document) @classmethod def post_delete(self, sender, document, **kwargs): - pickled = pickle.dumps(document) + pickled = pickle.dumps(document) signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) From 56cd73823e7ed4b216bb740c612c21eea59fd1a7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 10:09:16 +0000 Subject: [PATCH 296/464] Add backwards compat for pickle --- mongoengine/base/document.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index e3202b9..018adbf 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -148,9 +148,10 @@ class BaseDocument(object): return data def __setstate__(self, data): - for k in ('_changed_fields', '_initialised', '_created'): + if isinstance(data["_data"], SON): + data["_data"] = self.__class__._from_son(data["_data"])._data + for k in ('_changed_fields', '_initialised', '_created', '_data'): setattr(self, k, data[k]) - self._data = self.__class__._from_son(data["_data"])._data def __iter__(self): if 'id' in self._fields and 'id' not in self._fields_ordered: From a6bc870815d78021adeb57119b68376a44864f82 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 10:10:53 +0000 Subject: [PATCH 297/464] Fixed pickle issues with collections (#316) --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index f014a9f..40ba450 100644 --- a/AUTHORS +++ b/AUTHORS @@ -160,3 +160,4 @@ that much better: * Alex (https://github.com/kelsta) * Jin Zhang * Daniel Axtens + * Leo-Naeka diff --git a/docs/changelog.rst b/docs/changelog.rst index 07145b2..46b641b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Fixed pickle issues with collections (#316) - Added `get_next_value` preview for SequenceFields (#319) - Added no_sub_classes context manager and queryset helper (#312) - Querysets now utilises a local cache From ebdd2d730cb2bafe2d62eb5935a77a86a6affc03 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 10:20:43 +0000 Subject: [PATCH 298/464] Fixed querying ReferenceField custom_id (#317) --- docs/changelog.rst | 1 + mongoengine/fields.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 46b641b..383f9af 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.0 ================ +- Fixed querying ReferenceField custom_id (#317) - Fixed pickle issues with collections (#316) - Added `get_next_value` preview for SequenceFields (#319) - Added no_sub_classes context manager and queryset helper (#312) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index a2ba202..df2c19e 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -853,8 +853,6 @@ class ReferenceField(BaseField): if not self.dbref: return document.id return document - elif not self.dbref and isinstance(document, basestring): - return ObjectId(document) id_field_name = self.document_type._meta['id_field'] id_field = self.document_type._fields[id_field_name] From 5ef56116820f60e1761e37685ade6c1623373f65 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 May 2013 12:34:47 +0000 Subject: [PATCH 299/464] 0.8.0 is a go --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 0f8913a..7c8407b 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0, 'RC4') +VERSION = (0, 8, 0) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index be9c67b..1a26f47 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.0.RC4 +Version: 0.8.0 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 306f9c5ffd046a5702b98de9aa9ed47be6d88622 Mon Sep 17 00:00:00 2001 From: Mitar Date: Mon, 20 May 2013 17:30:41 -0700 Subject: [PATCH 300/464] importlib does not exist on Python 2.6. Use Django version. --- mongoengine/django/mongo_auth/models.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py index 9629e64..3529d8e 100644 --- a/mongoengine/django/mongo_auth/models.py +++ b/mongoengine/django/mongo_auth/models.py @@ -1,9 +1,8 @@ -from importlib import import_module - from django.conf import settings from django.contrib.auth.models import UserManager from django.core.exceptions import ImproperlyConfigured from django.db import models +from django.utils.importlib import import_module from django.utils.translation import ugettext_lazy as _ From d060da094f5415288fa2c27d5f4c887a04905f8b Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 20 May 2013 17:40:56 -0700 Subject: [PATCH 301/464] update pickling test case to show the error --- tests/document/instance.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/document/instance.py b/tests/document/instance.py index d972ae5..cdc6fe0 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1709,6 +1709,7 @@ class InstanceTest(unittest.TestCase): pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() + pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) From 9aa77bb3c967f3ceb5e14047791a7b8cc4176503 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 May 2013 07:07:17 +0000 Subject: [PATCH 302/464] Fixed pickle unsaved document regression (#327) --- docs/changelog.rst | 4 ++++ mongoengine/base/document.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 383f9af..6954cfd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,10 @@ Changelog ========= +Changes in 0.8.1 +================ +- Fixed pickle unsaved document regression (#327) + Changes in 0.8.0 ================ - Fixed querying ReferenceField custom_id (#317) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 018adbf..719d886 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -143,7 +143,8 @@ class BaseDocument(object): def __getstate__(self): data = {} for k in ('_changed_fields', '_initialised', '_created'): - data[k] = getattr(self, k) + if hasattr(self, k): + data[k] = getattr(self, k) data['_data'] = self.to_mongo() return data From 50f1ca91d478136d1d39969dbc1c132b5b84a21a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 May 2013 09:05:55 +0000 Subject: [PATCH 303/464] Updated Changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6954cfd..c016676 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.1 ================ +- Fixed Python 2.6 django auth importlib issue (#326) - Fixed pickle unsaved document regression (#327) Changes in 0.8.0 From a7470360d2cb33d3d5c82b4c065511133fd1ea12 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 May 2013 09:12:09 +0000 Subject: [PATCH 304/464] Version bump --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 7c8407b..8c167f0 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 0) +VERSION = (0, 8, 1) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 1a26f47..7c87b1c 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.0 +Version: 0.8.1 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 3ffc9dffc22ab326d22db02a84c5d90e514dc321 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 May 2013 09:37:22 +0000 Subject: [PATCH 305/464] Updated requirements for test suite --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 594f7f8..365791f 100644 --- a/setup.py +++ b/setup.py @@ -51,13 +51,13 @@ CLASSIFIERS = [ extra_opts = {} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6'] extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From a84e1f17bb209e294cf437f3c51a08207eb9bc9b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 May 2013 09:42:22 +0000 Subject: [PATCH 306/464] Fixing django tests for py 2.6 --- tests/test_django.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_django.py b/tests/test_django.py index f81213c..63e3245 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -275,7 +275,7 @@ class MongoAuthTest(unittest.TestCase): def test_user_manager(self): manager = get_user_model()._default_manager - self.assertIsInstance(manager, MongoUserManager) + self.assertTrue(isinstance(manager, MongoUserManager)) def test_user_manager_exception(self): manager = get_user_model()._default_manager @@ -285,14 +285,14 @@ class MongoAuthTest(unittest.TestCase): def test_create_user(self): manager = get_user_model()._default_manager user = manager.create_user(**self.user_data) - self.assertIsInstance(user, User) + self.assertTrue(isinstance(user, User)) db_user = User.objects.get(username='user') self.assertEqual(user.id, db_user.id) def test_authenticate(self): get_user_model()._default_manager.create_user(**self.user_data) user = authenticate(username='user', password='fail') - self.assertIsNone(user) + self.assertEqual(None, user) user = authenticate(username='user', password='test') db_user = User.objects.get(username='user') self.assertEqual(user.id, db_user.id) From 1eb643668244c696b1f9f2320503de39fbe0617b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 22 May 2013 10:29:45 +0000 Subject: [PATCH 307/464] Added get image by grid_id example --- tests/fields/file_tests.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index 52bd88a..fa76175 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -407,6 +407,25 @@ class FileTest(unittest.TestCase): self.assertEqual(putfile, copy.copy(putfile)) self.assertEqual(putfile, copy.deepcopy(putfile)) + def test_get_image_by_grid_id(self): + + class TestImage(Document): + + image1 = ImageField() + image2 = ImageField() + + TestImage.drop_collection() + + t = TestImage() + t.image1.put(open(TEST_IMAGE_PATH, 'rb')) + t.image2.put(open(TEST_IMAGE2_PATH, 'rb')) + t.save() + + test = TestImage.objects.first() + grid_id = test.image1.grid_id + + self.assertEqual(1, TestImage.objects(Q(image1=grid_id) + or Q(image2=grid_id)).count()) if __name__ == '__main__': unittest.main() From c96a1b00cf86b0be61cdade0df3e48440a35c287 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 23 May 2013 19:09:05 +0000 Subject: [PATCH 308/464] Documentation cleanup (#328) --- mongoengine/queryset/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 5da6295..4222459 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1479,7 +1479,7 @@ class QuerySet(object): # Deprecated def ensure_index(self, **kwargs): - """Deprecated use :func:`~Document.ensure_index`""" + """Deprecated use :func:`Document.ensure_index`""" msg = ("Doc.objects()._ensure_index() is deprecated. " "Use Doc.ensure_index() instead.") warnings.warn(msg, DeprecationWarning) From 5f0d86f509ff02b3f9c14405bde1a15c8ecda9b1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 23 May 2013 19:12:13 +0000 Subject: [PATCH 309/464] Upgrade doc fix (#330) --- docs/upgrade.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index fe9e4fa..6d9f529 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -116,8 +116,8 @@ eg:: # Mark all ReferenceFields as dirty and save for p in Person.objects: - p._mark_as_dirty('parent') - p._mark_as_dirty('friends') + p._mark_as_changed('parent') + p._mark_as_changed('friends') p.save() `An example test migration for ReferenceFields is available on github @@ -145,7 +145,7 @@ eg:: # Mark all ReferenceFields as dirty and save for a in Animal.objects: - a._mark_as_dirty('uuid') + a._mark_as_changed('uuid') a.save() `An example test migration for UUIDFields is available on github @@ -174,7 +174,7 @@ eg:: # Mark all ReferenceFields as dirty and save for p in Person.objects: - p._mark_as_dirty('balance') + p._mark_as_changed('balance') p.save() .. note:: DecimalField's have also been improved with the addition of precision From b4a98a40001348a1dd657a80c2f6c33dcf59901d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 23 May 2013 19:30:57 +0000 Subject: [PATCH 310/464] More upgrade clarifications #331 --- docs/upgrade.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 6d9f529..b5f3304 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -123,6 +123,10 @@ eg:: `An example test migration for ReferenceFields is available on github `_. +.. Note:: Internally mongoengine handles ReferenceFields the same, so they are + converted to DBRef on loading and ObjectIds or DBRefs depending on settings + on storage. + UUIDField --------- @@ -143,7 +147,7 @@ eg:: class Animal(Document): uuid = UUIDField() - # Mark all ReferenceFields as dirty and save + # Mark all UUIDFields as dirty and save for a in Animal.objects: a._mark_as_changed('uuid') a.save() @@ -172,7 +176,7 @@ eg:: class Person(Document): balance = DecimalField() - # Mark all ReferenceFields as dirty and save + # Mark all DecimalField's as dirty and save for p in Person.objects: p._mark_as_changed('balance') p.save() From c5ce96c391bf6d45c0395392bec28051727e6db4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 23 May 2013 19:44:05 +0000 Subject: [PATCH 311/464] Fix py3 test --- tests/fields/file_tests.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index fa76175..b3b6108 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -409,6 +409,9 @@ class FileTest(unittest.TestCase): def test_get_image_by_grid_id(self): + if PY3: + raise SkipTest('PIL does not have Python 3 support') + class TestImage(Document): image1 = ImageField() From 774895ec8c43616b9e6f1ba0788dfe47f6cec4e1 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 23 May 2013 17:49:28 -0700 Subject: [PATCH 312/464] dont simplify queries with duplicate conditions --- mongoengine/queryset/visitor.py | 11 ++++++++--- tests/queryset/visitor.py | 6 ++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 95d11e8..024f454 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -23,6 +23,9 @@ class QNodeVisitor(object): return query +class DuplicateQueryConditionsError(InvalidQueryError): + pass + class SimplificationVisitor(QNodeVisitor): """Simplifies query trees by combinging unnecessary 'and' connection nodes into a single Q-object. @@ -33,7 +36,10 @@ class SimplificationVisitor(QNodeVisitor): # The simplification only applies to 'simple' queries if all(isinstance(node, Q) for node in combination.children): queries = [n.query for n in combination.children] - return Q(**self._query_conjunction(queries)) + try: + return Q(**self._query_conjunction(queries)) + except DuplicateQueryConditionsError: + pass return combination def _query_conjunction(self, queries): @@ -47,8 +53,7 @@ class SimplificationVisitor(QNodeVisitor): # to a single field intersection = ops.intersection(query_ops) if intersection: - msg = 'Duplicate query conditions: ' - raise InvalidQueryError(msg + ', '.join(intersection)) + raise DuplicateQueryConditionsError() query_ops.update(ops) combined_query.update(copy.deepcopy(query)) diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 2e9195e..8443621 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -69,10 +69,8 @@ class QTest(unittest.TestCase): y = StringField() # Check than an error is raised when conflicting queries are anded - def invalid_combination(): - query = Q(x__lt=7) & Q(x__lt=3) - query.to_query(TestDoc) - self.assertRaises(InvalidQueryError, invalid_combination) + query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) + self.assertEqual(query, {'$and': [ {'x': {'$lt': 7}}, {'x': {'$lt': 3}} ]}) # Check normal cases work without an error query = Q(x__lt=7) & Q(x__gt=3) From ab4ff99105d3ed946fae2de0bb36ddcfa9cbc522 Mon Sep 17 00:00:00 2001 From: Ryan Witt Date: Fri, 24 May 2013 11:24:40 -0300 Subject: [PATCH 313/464] fix guide link --- docs/tutorial.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index c2f481b..0c592a0 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -298,5 +298,5 @@ Learning more about mongoengine ------------------------------- If you got this far you've made a great start, so well done! The next step on -your mongoengine journey is the `full user guide `_, where you -can learn indepth about how to use mongoengine and mongodb. \ No newline at end of file +your mongoengine journey is the `full user guide `_, where you +can learn indepth about how to use mongoengine and mongodb. From 2b6c42a56c3e5de144eceae663688bb4e69a7992 Mon Sep 17 00:00:00 2001 From: Ryan Witt Date: Fri, 24 May 2013 11:34:15 -0300 Subject: [PATCH 314/464] minor typos --- docs/guide/connecting.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 8674b5e..854e2c3 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -36,7 +36,7 @@ MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetCl to use them please use a URI style connection and provide the `replicaSet` name in the connection kwargs. -Read preferences are supported throught the connection or via individual +Read preferences are supported through the connection or via individual queries by passing the read_preference :: Bar.objects().read_preference(ReadPreference.PRIMARY) @@ -83,7 +83,7 @@ reasons. The :class:`~mongoengine.context_managers.switch_db` context manager allows you to change the database alias for a given class allowing quick and easy -access to the same User document across databases.eg :: +access to the same User document across databases:: from mongoengine.context_managers import switch_db From 7a760f5640b77e1a17a783ca3a606818e523a384 Mon Sep 17 00:00:00 2001 From: Jin Zhang Date: Sat, 25 May 2013 06:46:23 -0600 Subject: [PATCH 315/464] Update django.rst --- docs/django.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/django.rst b/docs/django.rst index 09c91e7..da15188 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -27,9 +27,9 @@ MongoEngine includes a Django authentication backend, which uses MongoDB. The :class:`~mongoengine.Document`, but implements most of the methods and attributes that the standard Django :class:`User` model does - so the two are moderately compatible. Using this backend will allow you to store users in -MongoDB but still use many of the Django authentication infrastucture (such as +MongoDB but still use many of the Django authentication infrastructure (such as the :func:`login_required` decorator and the :func:`authenticate` function). To -enable the MongoEngine auth backend, add the following to you **settings.py** +enable the MongoEngine auth backend, add the following to your **settings.py** file:: AUTHENTICATION_BACKENDS = ( @@ -46,7 +46,7 @@ Custom User model ================= Django 1.5 introduced `Custom user Models ` -which can be used as an alternative the Mongoengine authentication backend. +which can be used as an alternative to the MongoEngine authentication backend. The main advantage of this option is that other components relying on :mod:`django.contrib.auth` and supporting the new swappable user model are more @@ -82,16 +82,16 @@ Sessions ======== Django allows the use of different backend stores for its sessions. MongoEngine provides a MongoDB-based session backend for Django, which allows you to use -sessions in you Django application with just MongoDB. To enable the MongoEngine +sessions in your Django application with just MongoDB. To enable the MongoEngine session backend, ensure that your settings module has ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the ``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your ``INSTALLED_APPS``. From there, all you need to do is add the following line -into you settings module:: +into your settings module:: SESSION_ENGINE = 'mongoengine.django.sessions' -Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL +Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL `_. .. versionadded:: 0.2.1 From 159ef12ed78fcded1a6ccc1fee6dde1752dc870b Mon Sep 17 00:00:00 2001 From: ichuang Date: Mon, 27 May 2013 11:19:34 -0400 Subject: [PATCH 316/464] FileField should pass db_alias to GridFSProxy in __set__ call --- mongoengine/fields.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index df2c19e..b588eaa 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1194,6 +1194,7 @@ class FileField(BaseField): # Create a new proxy object as we don't already have one instance._data[key] = self.proxy_class(key=key, instance=instance, + db_alias=self.db_alias, collection_name=self.collection_name) instance._data[key].put(value) else: From 4670f09a6720f523938355376849f7e54f08b0d5 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 27 May 2013 13:48:02 -0700 Subject: [PATCH 317/464] fix __set_state__ --- mongoengine/base/document.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 719d886..2ffcbc5 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -152,7 +152,8 @@ class BaseDocument(object): if isinstance(data["_data"], SON): data["_data"] = self.__class__._from_son(data["_data"])._data for k in ('_changed_fields', '_initialised', '_created', '_data'): - setattr(self, k, data[k]) + if k in data: + setattr(self, k, data[k]) def __iter__(self): if 'id' in self._fields and 'id' not in self._fields_ordered: From 18d8008b895d0f0a1f94bf23b0e93dba666ef4e7 Mon Sep 17 00:00:00 2001 From: Paul Swartz Date: Tue, 28 May 2013 15:59:32 -0400 Subject: [PATCH 318/464] if `dateutil` is available, use it to parse datetimes In particular, this picks up the default `datetime.isoformat()` output, with a "T" as the separator. --- mongoengine/fields.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index df2c19e..8ea48c2 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -7,6 +7,12 @@ import urllib2 import uuid import warnings from operator import itemgetter +try: + import dateutil +except ImportError: + dateutil = None +else: + import dateutil.parser import pymongo import gridfs @@ -371,6 +377,8 @@ class DateTimeField(BaseField): return value() # Attempt to parse a datetime: + if dateutil: + return dateutil.parser.parse(value) # value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: From 1302316eb0ebd2c40c20402f5013c1b977f78cbc Mon Sep 17 00:00:00 2001 From: Paul Swartz Date: Tue, 28 May 2013 16:08:33 -0400 Subject: [PATCH 319/464] add some tests --- tests/fields/fields.py | 64 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/tests/fields/fields.py b/tests/fields/fields.py index e803af8..6c3f49f 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -6,6 +6,11 @@ import datetime import unittest import uuid +try: + import dateutil +except ImportError: + dateutil = None + from decimal import Decimal from bson import Binary, DBRef, ObjectId @@ -482,6 +487,65 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() + def test_datetime_usage(self): + """Tests for regular datetime fields""" + class LogEntry(Document): + date = DateTimeField() + + LogEntry.drop_collection() + + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01) + log = LogEntry() + log.date = d1 + log.save() + + for query in (d1, d1.isoformat(' ')): + log1 = LogEntry.objects.get(date=query) + self.assertEqual(log, log1) + + if dateutil: + log1 = LogEntry.objects.get(date=d1.isoformat('T')) + self.assertEqual(log, log1) + + LogEntry.drop_collection() + + # create 60 log entries + for i in xrange(1950, 2010): + d = datetime.datetime(i, 01, 01, 00, 00, 01) + LogEntry(date=d).save() + + self.assertEqual(LogEntry.objects.count(), 60) + + # Test ordering + logs = LogEntry.objects.order_by("date") + count = logs.count() + i = 0 + while i == count - 1: + self.assertTrue(logs[i].date <= logs[i + 1].date) + i += 1 + + logs = LogEntry.objects.order_by("-date") + count = logs.count() + i = 0 + while i == count - 1: + self.assertTrue(logs[i].date >= logs[i + 1].date) + i += 1 + + # Test searching + logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter( + date__lte=datetime.datetime(2011, 1, 1), + date__gte=datetime.datetime(2000, 1, 1), + ) + self.assertEqual(logs.count(), 10) + + LogEntry.drop_collection() + def test_complexdatetime_storage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. From c0571beec82cedc5bd4f52463deb449b6226d89c Mon Sep 17 00:00:00 2001 From: Paul Swartz Date: Tue, 28 May 2013 17:18:54 -0400 Subject: [PATCH 320/464] fix change tracking for ComplexBaseFields --- mongoengine/base/fields.py | 6 ------ tests/fields/fields.py | 21 +++++++++++++++++++++ 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 72a9e8e..9f08c09 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -205,12 +205,6 @@ class ComplexBaseField(BaseField): return value - def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - instance._data[self.name] = value - instance._mark_as_changed(self.name) - def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. """ diff --git a/tests/fields/fields.py b/tests/fields/fields.py index e803af8..c9b3313 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -808,6 +808,27 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, e.save) + def test_complex_field_same_value_not_changed(self): + """ + If a complex field is set to the same value, it should not be marked as + changed. + """ + class Simple(Document): + mapping = ListField() + + Simple.drop_collection() + e = Simple().save() + e.mapping = [] + self.assertEqual([], e._changed_fields) + + class Simple(Document): + mapping = DictField() + + Simple.drop_collection() + e = Simple().save() + e.mapping = {} + self.assertEqual([], e._changed_fields) + def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" From 04592c876b6ff6fb0c11338499ed4e0bf6934330 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 12:04:53 -0400 Subject: [PATCH 321/464] Moved pre_save after validation and determination of creation state; added pre_save_validation where pre_save had been. --- mongoengine/document.py | 6 ++++-- mongoengine/signals.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 89627dc..9946ffa 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -195,7 +195,7 @@ class Document(BaseDocument): the cascade save using cascade_kwargs which overwrites the existing kwargs with custom values """ - signals.pre_save.send(self.__class__, document=self) + signals.pre_save_validation.send(self.__class__, document=self) if validate: self.validate(clean=clean) @@ -206,7 +206,9 @@ class Document(BaseDocument): doc = self.to_mongo() created = ('_id' not in doc or self._created or force_insert) - + + signals.pre_save.send(self.__class__, document=self, created=created) + try: collection = self._get_collection() if created: diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 52ef312..50f8e94 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -38,6 +38,7 @@ _signals = Namespace() pre_init = _signals.signal('pre_init') post_init = _signals.signal('post_init') +pre_save_validation = _signals.signal('pre_save_validation') pre_save = _signals.signal('pre_save') post_save = _signals.signal('post_save') pre_delete = _signals.signal('pre_delete') From 5d44e1d6ca8eb530ddc00c16c0d1cbf5452b90c8 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 12:12:51 -0400 Subject: [PATCH 322/464] Added missing reference in __all__. --- mongoengine/signals.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 50f8e94..f12ab1b 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', - 'pre_delete', 'post_delete'] +__all__ = ['pre_init', 'post_init', 'pre_save_validation', 'pre_save', + 'post_save', 'pre_delete', 'post_delete'] signals_available = False try: From 12f6a3f5a3b0e791614bc5fa9f2ab63c0e8adf69 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 12:22:15 -0400 Subject: [PATCH 323/464] Added tests for pre_save_validation and updated tests for pre_save to encompass created flag. --- tests/test_signals.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_signals.py b/tests/test_signals.py index 32517dd..f1ce3c9 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -39,9 +39,18 @@ class SignalTests(unittest.TestCase): def post_init(cls, sender, document, **kwargs): signal_output.append('post_init signal, %s' % document) + @classmethod + def pre_save_validation(cls, sender, document, **kwargs): + signal_output.append('pre_save_validation signal, %s' % document) + @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save signal, %s' % document) + if 'created' in kwargs: + if kwargs['created']: + signal_output.append('Is created') + else: + signal_output.append('Is updated') @classmethod def post_save(cls, sender, document, **kwargs): @@ -89,9 +98,18 @@ class SignalTests(unittest.TestCase): def post_init(cls, sender, document, **kwargs): signal_output.append('post_init Another signal, %s' % document) + @classmethod + def pre_save_validation(cls, sender, document, **kwargs): + signal_output.append('pre_save_validation Another signal, %s' % document) + @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save Another signal, %s' % document) + if 'created' in kwargs: + if kwargs['created']: + signal_output.append('Is created') + else: + signal_output.append('Is updated') @classmethod def post_save(cls, sender, document, **kwargs): @@ -132,6 +150,7 @@ class SignalTests(unittest.TestCase): self.pre_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), + len(signals.pre_save_validation.receivers), len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), @@ -142,6 +161,7 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Author.pre_init, sender=Author) signals.post_init.connect(Author.post_init, sender=Author) + signals.pre_save_validation.connect(Author.pre_save_validation, sender=Author) signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author) @@ -151,6 +171,7 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Another.pre_init, sender=Another) signals.post_init.connect(Another.post_init, sender=Another) + signals.pre_save_validation.connect(Another.pre_save_validation, sender=Another) signals.pre_save.connect(Another.pre_save, sender=Another) signals.post_save.connect(Another.post_save, sender=Another) signals.pre_delete.connect(Another.pre_delete, sender=Another) @@ -165,6 +186,7 @@ class SignalTests(unittest.TestCase): signals.pre_delete.disconnect(self.Author.pre_delete) signals.post_save.disconnect(self.Author.post_save) signals.pre_save.disconnect(self.Author.pre_save) + signals.pre_save_validation.disconnect(self.Author.pre_save_validation) signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) @@ -174,6 +196,7 @@ class SignalTests(unittest.TestCase): signals.pre_delete.disconnect(self.Another.pre_delete) signals.post_save.disconnect(self.Another.post_save) signals.pre_save.disconnect(self.Another.pre_save) + signals.pre_save_validation.disconnect(self.Another.pre_save_validation) signals.post_save.disconnect(self.ExplicitId.post_save) @@ -181,6 +204,7 @@ class SignalTests(unittest.TestCase): post_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), + len(signals.pre_save_validation.receivers), len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), @@ -215,7 +239,9 @@ class SignalTests(unittest.TestCase): a1 = self.Author(name='Bill Shakespeare') self.assertEqual(self.get_signal_output(a1.save), [ + "pre_save_validation signal, Bill Shakespeare", "pre_save signal, Bill Shakespeare", + "Is created", "post_save signal, Bill Shakespeare", "Is created" ]) @@ -223,7 +249,9 @@ class SignalTests(unittest.TestCase): a1.reload() a1.name = 'William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ + "pre_save_validation signal, William Shakespeare", "pre_save signal, William Shakespeare", + "Is updated", "post_save signal, William Shakespeare", "Is updated" ]) From 122d75f677724e66260561b34f5b86d9b32794c8 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 12:23:32 -0400 Subject: [PATCH 324/464] Added pre_save_validation to signal list in documentation. --- docs/guide/signals.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 75f81e2..bc31fbd 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -15,6 +15,7 @@ The following document signals exist in MongoEngine and are pretty self-explanat * `mongoengine.signals.pre_init` * `mongoengine.signals.post_init` + * `mongoengine.signals.pre_save_validation` * `mongoengine.signals.pre_save` * `mongoengine.signals.post_save` * `mongoengine.signals.pre_delete` From f28f336026c8ea20f607a4324a5f17ea6b581d5b Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 13:17:08 -0400 Subject: [PATCH 325/464] Improved signals documentation and some typo fixes. --- docs/guide/defining-documents.rst | 2 +- docs/guide/signals.rst | 131 +++++++++++++++++++++++++----- 2 files changed, 111 insertions(+), 22 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 0ee5ad3..b5ba2bf 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -403,7 +403,7 @@ either a single field name, or a list or tuple of field names:: Skipping Document validation on save ------------------------------------ You can also skip the whole document validation process by setting -``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` +``validate=False`` when calling the :meth:`~mongoengine.document.Document.save` method:: class Recipient(Document): diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 75f81e2..3fef757 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -1,5 +1,6 @@ .. _signals: +======= Signals ======= @@ -7,36 +8,96 @@ Signals .. note:: - Signal support is provided by the excellent `blinker`_ library and - will gracefully fall back if it is not available. + Signal support is provided by the excellent `blinker`_ library. If you wish + to enable signal support this library must be installed, though it is not + required for MongoEngine to function. + +Overview +-------- + +Signals are found within the :module:`~mongoengine.signals` module. Unless +specified signals receive no additional arguments beyond the `sender` class and +`document` instance. Post-signals are only called if there were no exceptions +raised during the processing of their related function. + +Available signals include: + +`pre_init` + Called during the creation of a new :class:`~mongoengine.Document` or + :class:`~mongoengine.EmbeddedDocument` instance, after the constructor + arguments have been collected but before any additional processing has been + done to them. (I.e. assignment of default values.) Handlers for this signal + are passed the dictionary of arguments using the `values` keyword argument + and may modify this dictionary prior to returning. + +`post_init` + Called after all processing of a new :class:`~mongoengine.Document` or + :class:`~mongoengine.EmbeddedDocument` instance has been completed. + +`pre_save` + Called within :meth:`~mongoengine.document.Document.save` prior to performing + any actions. + +`post_save` + Called within :meth:`~mongoengine.document.Document.save` after all actions + (validation, insert/update, cascades, clearing dirty flags) have completed + successfully. Passed the additional boolean keyword argument `created` to + indicate if the save was an insert or an update. + +`pre_delete` + Called within :meth:`~mongoengine.document.Document.delete` prior to + attempting the delete operation. + +`post_delete` + Called within :meth:`~mongoengine.document.Document.delete` upon successful + deletion of the record. + +`pre_bulk_insert` + Called after validation of the documents to insert, but prior to any data + being written. In this case, the `document` argument is replaced by a + `documents` argument representing the list of documents being inserted. + +`post_bulk_insert` + Called after a successful bulk insert operation. As per `pre_bulk_insert`, + the `document` argument is omitted and replaced with a `documents` argument. + An additional boolean argument, `loaded`, identifies the contents of + `documents` as either :class:`~mongoengine.Document` instances when `True` or + simply a list of primary key values for the inserted records if `False`. -The following document signals exist in MongoEngine and are pretty self-explanatory: +Attaching Events +---------------- - * `mongoengine.signals.pre_init` - * `mongoengine.signals.post_init` - * `mongoengine.signals.pre_save` - * `mongoengine.signals.post_save` - * `mongoengine.signals.pre_delete` - * `mongoengine.signals.post_delete` - * `mongoengine.signals.pre_bulk_insert` - * `mongoengine.signals.post_bulk_insert` - -Example usage:: +After writing a handler function like the following:: + import logging + from datetime import datetime + from mongoengine import * from mongoengine import signals + + def update_modified(sender, document): + document.modified = datetime.utcnow() + +You attach the event handler to your :class:`~mongoengine.Document` or +:class:`~mongoengine.EmbeddedDocument` subclass:: + + class Record(Document): + modified = DateTimeField() + + signals.pre_save.connect(update_modified) + +While this is not the most elaborate document model, it does demonstrate the +concepts involved. As a more complete demonstration you can also define your +handlers within your subclass:: class Author(Document): name = StringField() - - def __unicode__(self): - return self.name - + @classmethod def pre_save(cls, sender, document, **kwargs): logging.debug("Pre Save: %s" % document.name) - + @classmethod def post_save(cls, sender, document, **kwargs): logging.debug("Post Save: %s" % document.name) @@ -45,16 +106,44 @@ Example usage:: logging.debug("Created") else: logging.debug("Updated") - + signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) +Finally, you can also use this small decorator to quickly create a number of +signals and attach them to your :class:`~mongoengine.Document` or +:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: -ReferenceFields and signals + def handler(event): + """Signal decorator to allow use of callback functions as class decorators.""" + + def decorator(fn): + def apply(cls): + event.connect(fn, sender=cls) + return cls + + fn.apply = apply + return fn + + return decorator + +Using the first example of updating a modification time the code is now much +cleaner looking while still allowing manual execution of the callback:: + + @handler(signals.pre_save) + def update_modified(sender, document): + document.modified = datetime.utcnow() + + @update_modified.apply + class Record(Document): + modified = DateTimeField() + + +ReferenceFields and Signals --------------------------- Currently `reverse_delete_rules` do not trigger signals on the other part of -the relationship. If this is required you must manually handled the +the relationship. If this is required you must manually handle the reverse deletion. .. _blinker: http://pypi.python.org/pypi/blinker From 35f084ba76d0a6aaf2eaf900530c885ac953da19 Mon Sep 17 00:00:00 2001 From: Alice Bevan-McGregor Date: Wed, 29 May 2013 13:23:18 -0400 Subject: [PATCH 326/464] Fixed :module: reference in docs and added myself to authors. --- AUTHORS | 1 + docs/guide/signals.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 40ba450..ba69bc6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -161,3 +161,4 @@ that much better: * Jin Zhang * Daniel Axtens * Leo-Naeka + * Alice Bevan-McGregor (https://github.com/amcgregor/) diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 3fef757..16c1cd0 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -15,7 +15,7 @@ Signals Overview -------- -Signals are found within the :module:`~mongoengine.signals` module. Unless +Signals are found within the `mongoengine.signals` module. Unless specified signals receive no additional arguments beyond the `sender` class and `document` instance. Post-signals are only called if there were no exceptions raised during the processing of their related function. From 4c9e90732e711dfe3fac8f4f887330673147f51c Mon Sep 17 00:00:00 2001 From: Nigel McNie Date: Thu, 30 May 2013 16:37:40 +1200 Subject: [PATCH 327/464] Apply defaults to fields with None value at 'set' time. If a field has a default, and you explicitly set it to None, the behaviour before this patch was very confusing: class Person(Document): created = DateTimeField(default=datetime.datetime.utcnow) >>> p = Person(created=None) >>> p.created datetime.datetime(2013, 5, 30, 0, 18, 20, 242628) >>> p.created datetime.datetime(2013, 5, 30, 0, 18, 20, 995248) >>> p.created datetime.datetime(2013, 5, 30, 0, 18, 21, 370578) It would be stored as None, and then at 'get' time, the default would be applied. As you can see, if the default is a generator, this leads to some crazy behaviour. There's an argument that if I asked it to be set to None, why not respect that? But I don't think that's how the rest of mongoengine seems to work (for example, setting a field to None seems to mean it doesn't even get set in mongo - as opposed to being set but with a 'null' value). Besides, as the code shows above, you'd expect p.created to return None. So clearly, mongoengine is already expecting None to mean 'default' where a default is available. This bug also interacts nastily with required=True - if you're forcibly setting the field to None, then at validation time, the None will fail validation despite a perfectly valid default being available. With this patch, when the field is set, the default is immediately applied. This means any generation happens once, the getter always returns the same value, and 'required' validation always respects the default. Note: this breakage seems to be new since mongoengine 0.8. --- mongoengine/base/fields.py | 6 ++++++ tests/fields/fields.py | 13 +++++++++++++ 2 files changed, 19 insertions(+) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 72a9e8e..9454023 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -82,6 +82,12 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ + if value is None: + value = self.default + # Allow callable default values + if callable(value): + value = value() + if instance._initialised: try: if (self.name not in instance._data or diff --git a/tests/fields/fields.py b/tests/fields/fields.py index e803af8..32c33f7 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -44,6 +44,19 @@ class FieldTest(unittest.TestCase): self.assertEqual(person._fields['age'].help_text, "Your real age") self.assertEqual(person._fields['userid'].verbose_name, "User Identity") + class Person2(Document): + created = DateTimeField(default=datetime.datetime.utcnow) + + person = Person2() + date1 = person.created + date2 = person.created + self.assertEqual(date1, date2) + + person = Person2(created=None) + date1 = person.created + date2 = person.created + self.assertEqual(date1, date2) + def test_required_values(self): """Ensure that required field constraints are enforced. """ From 0493bbbc76457b12dfaaf2a5558a84bc36a1b62a Mon Sep 17 00:00:00 2001 From: Jiequan Date: Sun, 2 Jun 2013 20:46:51 +0800 Subject: [PATCH 328/464] Update upgrade.rst Added docs for the new function: clean() --- docs/upgrade.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index b5f3304..c3d3182 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -91,6 +91,13 @@ the case and the data is set only in the ``document._data`` dictionary: :: File "", line 1, in AttributeError: 'Animal' object has no attribute 'size' +The Document class has introduced a reserved function `clean()`, which will be +called before saving the document. If your document class happen to have a method +with the same name, please try rename it. + + def clean(self): + pass + ReferenceField -------------- From 0fb976a80a3a9a90e7acea5eca07c8eec1c2941c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:01:48 +0000 Subject: [PATCH 329/464] Added Ryan to AUTHORS #334 --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 40ba450..39621e9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -161,3 +161,4 @@ that much better: * Jin Zhang * Daniel Axtens * Leo-Naeka + * Ryan Witt (https://github.com/ryanwitt) From 2fe1c20475443cadcf8a38a826485bffdb0b617a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:03:07 +0000 Subject: [PATCH 330/464] Added Jiequan to AUTHORS #354 --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 39621e9..c3d6ff9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -162,3 +162,4 @@ that much better: * Daniel Axtens * Leo-Naeka * Ryan Witt (https://github.com/ryanwitt) + * Jiequan (https://github.com/Jiequan) \ No newline at end of file From b2f78fadd92823120ea73b3ed27ebac7585e1fcb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:05:52 +0000 Subject: [PATCH 331/464] Added test for upsert & update_one #336 --- tests/queryset/queryset.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 01c53d0..5425741 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -545,6 +545,15 @@ class QuerySetTest(unittest.TestCase): self.assertEqual("Bob", bob.name) self.assertEqual(30, bob.age) + def test_upsert_one(self): + self.Person.drop_collection() + + self.Person.objects(name="Bob", age=30).update_one(upsert=True) + + bob = self.Person.objects.first() + self.assertEqual("Bob", bob.name) + self.assertEqual(30, bob.age) + def test_set_on_insert(self): self.Person.drop_collection() From 8d2e7b43726c44eb1bff5ffb3e012d9aa6ec2d6a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:31:35 +0000 Subject: [PATCH 332/464] Django session ttl index expiry fixed (#329) --- mongoengine/django/sessions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index 29583f5..c90807e 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -39,7 +39,7 @@ class MongoSession(Document): 'indexes': [ { 'fields': ['expire_date'], - 'expireAfterSeconds': settings.SESSION_COOKIE_AGE + 'expireAfterSeconds': 0 } ] } From fbc46a52af132dd82a207350e43072ba956d5b21 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:31:42 +0000 Subject: [PATCH 333/464] Updated changelog --- docs/changelog.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index c016676..02fb824 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,13 @@ Changelog ========= + +Changes in 0.8.2 +================ +- Django session ttl index expiry fixed (#329) +- Fixed pickle.loads (#342) +- Documentation fixes + Changes in 0.8.1 ================ - Fixed Python 2.6 django auth importlib issue (#326) From 7e6b035ca21282cb57762311fe876ec912be31e1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:32:30 +0000 Subject: [PATCH 334/464] Added hensom to AUTHORS #329 --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index c3d6ff9..11f2fa7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -162,4 +162,5 @@ that much better: * Daniel Axtens * Leo-Naeka * Ryan Witt (https://github.com/ryanwitt) - * Jiequan (https://github.com/Jiequan) \ No newline at end of file + * Jiequan (https://github.com/Jiequan) + * hensom (https://github.com/hensom) From ceece5a7e214b4bfae9902e03dcd6130c7783b22 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 13:38:58 +0000 Subject: [PATCH 335/464] Improved PIL detection for tests --- tests/fields/file_tests.py | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index b3b6108..d9dec6f 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -14,6 +14,12 @@ from mongoengine import * from mongoengine.connection import get_db from mongoengine.python_support import PY3, b, StringIO +try: + from PIL import Image + HAS_PIL = True +except ImportError: + HAS_PIL = False + TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') @@ -255,8 +261,8 @@ class FileTest(unittest.TestCase): self.assertFalse(test_file.the_file in [{"test": 1}]) def test_image_field(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestImage(Document): image = ImageField() @@ -278,8 +284,8 @@ class FileTest(unittest.TestCase): t.image.delete() def test_image_field_reassigning(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestFile(Document): the_file = ImageField() @@ -294,8 +300,8 @@ class FileTest(unittest.TestCase): self.assertEqual(test_file.the_file.size, (45, 101)) def test_image_field_resize(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestImage(Document): image = ImageField(size=(185, 37)) @@ -317,8 +323,8 @@ class FileTest(unittest.TestCase): t.image.delete() def test_image_field_resize_force(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestImage(Document): image = ImageField(size=(185, 37, True)) @@ -340,8 +346,8 @@ class FileTest(unittest.TestCase): t.image.delete() def test_image_field_thumbnail(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestImage(Document): image = ImageField(thumbnail_size=(92, 18)) @@ -409,8 +415,8 @@ class FileTest(unittest.TestCase): def test_get_image_by_grid_id(self): - if PY3: - raise SkipTest('PIL does not have Python 3 support') + if not HAS_PIL: + raise SkipTest('PIL not installed') class TestImage(Document): From 4c8dfc3fc25e03c8e58209d48cb524194934b3ff Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 3 Jun 2013 15:40:54 +0000 Subject: [PATCH 336/464] Fixed Doc.objects(read_preference=X) not setting read preference (#352) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 10 +++++++--- tests/queryset/queryset.py | 5 ++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 02fb824..6a4dab6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Fixed Doc.objects(read_preference=X) not setting read preference (#352) - Django session ttl index expiry fixed (#329) - Fixed pickle.loads (#342) - Documentation fixes diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4222459..00a0abc 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -104,13 +104,17 @@ class QuerySet(object): raise InvalidQueryError(msg) query &= q_obj - queryset = self.clone() + if read_preference is None: + queryset = self.clone() + else: + # Use the clone provided when setting read_preference + queryset = self.read_preference(read_preference) + queryset._query_obj &= query queryset._mongo_query = None queryset._cursor_obj = None - if read_preference is not None: - queryset.read_preference(read_preference) queryset._class_check = class_check + return queryset def __len__(self): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 5425741..507408d 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3098,7 +3098,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual([], bars) self.assertRaises(ConfigurationError, Bar.objects, - read_preference='Primary') + read_preference='Primary') + + bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) def test_json_simple(self): From 5447c6e947fb6cf16c3995cd24fe7618e0707855 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 09:08:13 +0000 Subject: [PATCH 337/464] DateTimeField now auto converts valid datetime isostrings into dates (#343) --- docs/changelog.rst | 2 ++ mongoengine/fields.py | 25 +++++++++++++++++++------ setup.py | 2 +- tests/fields/fields.py | 12 ++++++++++-- 4 files changed, 32 insertions(+), 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6a4dab6..6b666aa 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,8 @@ Changelog Changes in 0.8.2 ================ +- DateTimeField now auto converts valid datetime isostrings into dates (#343) +- DateTimeField now uses dateutil for parsing if available (#343) - Fixed Doc.objects(read_preference=X) not setting read preference (#352) - Django session ttl index expiry fixed (#329) - Fixed pickle.loads (#342) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 8ea48c2..2b0e395 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -7,6 +7,7 @@ import urllib2 import uuid import warnings from operator import itemgetter + try: import dateutil except ImportError: @@ -353,6 +354,11 @@ class BooleanField(BaseField): class DateTimeField(BaseField): """A datetime field. + Uses the python-dateutil library if available alternatively use time.strptime + to parse the dates. Note: python-dateutil's parser is fully featured and when + installed you can utilise it to convert varing types of date formats into valid + python datetime objects. + Note: Microseconds are rounded to the nearest millisecond. Pre UTC microsecond support is effecively broken. Use :class:`~mongoengine.fields.ComplexDateTimeField` if you @@ -360,13 +366,11 @@ class DateTimeField(BaseField): """ def validate(self, value): - if not isinstance(value, (datetime.datetime, datetime.date)): + new_value = self.to_mongo(value) + if not isinstance(new_value, (datetime.datetime, datetime.date)): self.error(u'cannot parse date "%s"' % value) def to_mongo(self, value): - return self.prepare_query_value(None, value) - - def prepare_query_value(self, op, value): if value is None: return value if isinstance(value, datetime.datetime): @@ -376,10 +380,16 @@ class DateTimeField(BaseField): if callable(value): return value() + if not isinstance(value, basestring): + return None + # Attempt to parse a datetime: if dateutil: - return dateutil.parser.parse(value) - # value = smart_str(value) + try: + return dateutil.parser.parse(value) + except ValueError: + return None + # split usecs, because they are not recognized by strptime. if '.' in value: try: @@ -404,6 +414,9 @@ class DateTimeField(BaseField): except ValueError: return None + def prepare_query_value(self, op, value): + return self.to_mongo(value) + class ComplexDateTimeField(StringField): """ diff --git a/setup.py b/setup.py index 365791f..1888828 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil==1.5'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 6c3f49f..00a4bd7 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -408,9 +408,16 @@ class FieldTest(unittest.TestCase): log.time = datetime.date.today() log.validate() + log.time = datetime.datetime.now().isoformat(' ') + log.validate() + + if dateutil: + log.time = datetime.datetime.now().isoformat('T') + log.validate() + log.time = -1 self.assertRaises(ValidationError, log.validate) - log.time = '1pm' + log.time = 'ABC' self.assertRaises(ValidationError, log.validate) def test_datetime_tz_aware_mark_as_changed(self): @@ -497,6 +504,7 @@ class FieldTest(unittest.TestCase): d1 = datetime.datetime(1970, 01, 01, 00, 00, 01) log = LogEntry() log.date = d1 + log.validate() log.save() for query in (d1, d1.isoformat(' ')): @@ -1993,7 +2001,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() self.assertEqual(Person.id.get_next_value(), '1') - + def test_sequence_field_sequence_name(self): class Person(Document): id = SequenceField(primary_key=True, sequence_name='jelly') From 4244e7569b7b7d1c131fb4aa842810d976e3d655 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 09:35:44 +0000 Subject: [PATCH 338/464] Added pre_save_post_validation signal (#345) --- docs/changelog.rst | 1 + mongoengine/document.py | 8 ++++---- mongoengine/signals.py | 4 ++-- setup.py | 2 +- tests/test_signals.py | 36 ++++++++++++++++++------------------ 5 files changed, 26 insertions(+), 25 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6b666aa..006dfc7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Added pre_save_post_validation signal (#345) - DateTimeField now auto converts valid datetime isostrings into dates (#343) - DateTimeField now uses dateutil for parsing if available (#343) - Fixed Doc.objects(read_preference=X) not setting read preference (#352) diff --git a/mongoengine/document.py b/mongoengine/document.py index 9946ffa..92d0631 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -195,7 +195,7 @@ class Document(BaseDocument): the cascade save using cascade_kwargs which overwrites the existing kwargs with custom values """ - signals.pre_save_validation.send(self.__class__, document=self) + signals.pre_save.send(self.__class__, document=self) if validate: self.validate(clean=clean) @@ -206,9 +206,9 @@ class Document(BaseDocument): doc = self.to_mongo() created = ('_id' not in doc or self._created or force_insert) - - signals.pre_save.send(self.__class__, document=self, created=created) - + + signals.pre_save_post_validation.send(self.__class__, document=self, created=created) + try: collection = self._get_collection() if created: diff --git a/mongoengine/signals.py b/mongoengine/signals.py index f12ab1b..06fb8b4 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -__all__ = ['pre_init', 'post_init', 'pre_save_validation', 'pre_save', +__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', 'post_save', 'pre_delete', 'post_delete'] signals_available = False @@ -38,8 +38,8 @@ _signals = Namespace() pre_init = _signals.signal('pre_init') post_init = _signals.signal('post_init') -pre_save_validation = _signals.signal('pre_save_validation') pre_save = _signals.signal('pre_save') +pre_save_post_validation = _signals.signal('pre_save_post_validation') post_save = _signals.signal('post_save') pre_delete = _signals.signal('pre_delete') post_delete = _signals.signal('post_delete') diff --git a/setup.py b/setup.py index 1888828..effb6f1 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ if sys.version_info[0] == 3: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil==1.5'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', diff --git a/tests/test_signals.py b/tests/test_signals.py index f1ce3c9..65289c2 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -40,12 +40,12 @@ class SignalTests(unittest.TestCase): signal_output.append('post_init signal, %s' % document) @classmethod - def pre_save_validation(cls, sender, document, **kwargs): - signal_output.append('pre_save_validation signal, %s' % document) + def pre_save(cls, sender, document, **kwargs): + signal_output.append('pre_save signal,, %s' % document) @classmethod - def pre_save(cls, sender, document, **kwargs): - signal_output.append('pre_save signal, %s' % document) + def pre_save_post_validation(cls, sender, document, **kwargs): + signal_output.append('pre_save_post_validation signal, %s' % document) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') @@ -98,13 +98,13 @@ class SignalTests(unittest.TestCase): def post_init(cls, sender, document, **kwargs): signal_output.append('post_init Another signal, %s' % document) - @classmethod - def pre_save_validation(cls, sender, document, **kwargs): - signal_output.append('pre_save_validation Another signal, %s' % document) - @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save Another signal, %s' % document) + + @classmethod + def pre_save_post_validation(cls, sender, document, **kwargs): + signal_output.append('pre_save_post_validation Another signal, %s' % document) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') @@ -150,8 +150,8 @@ class SignalTests(unittest.TestCase): self.pre_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), - len(signals.pre_save_validation.receivers), len(signals.pre_save.receivers), + len(signals.pre_save_post_validation.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), @@ -161,8 +161,8 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Author.pre_init, sender=Author) signals.post_init.connect(Author.post_init, sender=Author) - signals.pre_save_validation.connect(Author.pre_save_validation, sender=Author) signals.pre_save.connect(Author.pre_save, sender=Author) + signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author) signals.post_delete.connect(Author.post_delete, sender=Author) @@ -171,8 +171,8 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Another.pre_init, sender=Another) signals.post_init.connect(Another.post_init, sender=Another) - signals.pre_save_validation.connect(Another.pre_save_validation, sender=Another) signals.pre_save.connect(Another.pre_save, sender=Another) + signals.pre_save_post_validation.connect(Another.pre_save_post_validation, sender=Another) signals.post_save.connect(Another.post_save, sender=Another) signals.pre_delete.connect(Another.pre_delete, sender=Another) signals.post_delete.connect(Another.post_delete, sender=Another) @@ -185,8 +185,8 @@ class SignalTests(unittest.TestCase): signals.post_delete.disconnect(self.Author.post_delete) signals.pre_delete.disconnect(self.Author.pre_delete) signals.post_save.disconnect(self.Author.post_save) + signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation) signals.pre_save.disconnect(self.Author.pre_save) - signals.pre_save_validation.disconnect(self.Author.pre_save_validation) signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) @@ -195,8 +195,8 @@ class SignalTests(unittest.TestCase): signals.post_delete.disconnect(self.Another.post_delete) signals.pre_delete.disconnect(self.Another.pre_delete) signals.post_save.disconnect(self.Another.post_save) + signals.pre_save_post_validation.disconnect(self.Another.pre_save_post_validation) signals.pre_save.disconnect(self.Another.pre_save) - signals.pre_save_validation.disconnect(self.Another.pre_save_validation) signals.post_save.disconnect(self.ExplicitId.post_save) @@ -204,8 +204,8 @@ class SignalTests(unittest.TestCase): post_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), - len(signals.pre_save_validation.receivers), len(signals.pre_save.receivers), + len(signals.pre_save_post_validation.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), @@ -239,8 +239,8 @@ class SignalTests(unittest.TestCase): a1 = self.Author(name='Bill Shakespeare') self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save_validation signal, Bill Shakespeare", - "pre_save signal, Bill Shakespeare", + "pre_save signal,, Bill Shakespeare", + "pre_save_post_validation signal, Bill Shakespeare", "Is created", "post_save signal, Bill Shakespeare", "Is created" @@ -249,8 +249,8 @@ class SignalTests(unittest.TestCase): a1.reload() a1.name = 'William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save_validation signal, William Shakespeare", - "pre_save signal, William Shakespeare", + "pre_save signal,, William Shakespeare", + "pre_save_post_validation signal, William Shakespeare", "Is updated", "post_save signal, William Shakespeare", "Is updated" From 626a3369b522de7fc0f1af268d833e0207290237 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 09:51:58 +0000 Subject: [PATCH 339/464] Removed unused var in _get_changed_fields (#347) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 006dfc7..bc6f283 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Removed unused var in _get_changed_fields (#347) - Added pre_save_post_validation signal (#345) - DateTimeField now auto converts valid datetime isostrings into dates (#343) - DateTimeField now uses dateutil for parsing if available (#343) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 2ffcbc5..e2944fb 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -392,7 +392,7 @@ class BaseDocument(object): if field_value: field_value._clear_changed_fields() - def _get_changed_fields(self, key='', inspected=None): + def _get_changed_fields(self, inspected=None): """Returns a list of all fields that have explicitly been changed. """ EmbeddedDocument = _import_class("EmbeddedDocument") @@ -423,7 +423,7 @@ class BaseDocument(object): if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields): # Find all embedded fields that have been changed - changed = field._get_changed_fields(key, inspected) + changed = field._get_changed_fields(inspected) _changed_fields += ["%s%s" % (key, k) for k in changed if k] elif (isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields): @@ -437,7 +437,7 @@ class BaseDocument(object): if not hasattr(value, '_get_changed_fields'): continue list_key = "%s%s." % (key, index) - changed = value._get_changed_fields(list_key, inspected) + changed = value._get_changed_fields(inspected) _changed_fields += ["%s%s" % (list_key, k) for k in changed if k] return _changed_fields From f27a53653b0eac351af283757f01ba9bf94323b4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 09:56:38 +0000 Subject: [PATCH 340/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index bc6f283..640ac39 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Removed customised __set__ change tracking in ComplexBaseField (#344) - Removed unused var in _get_changed_fields (#347) - Added pre_save_post_validation signal (#345) - DateTimeField now auto converts valid datetime isostrings into dates (#343) From d94a191656b0761bf554ea15ec49e683e3085ef0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 10:20:24 +0000 Subject: [PATCH 341/464] Updated Changelog added test for #341 --- docs/changelog.rst | 1 + tests/fields/file_tests.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 640ac39..0113a72 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- FileField now honouring db_alias (#341) - Removed customised __set__ change tracking in ComplexBaseField (#344) - Removed unused var in _get_changed_fields (#347) - Added pre_save_post_validation signal (#345) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index d9dec6f..5bcc3a2 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -394,6 +394,14 @@ class FileTest(unittest.TestCase): self.assertEqual(test_file.the_file.read(), b('Hello, World!')) + test_file = TestFile.objects.first() + test_file.the_file = b('HELLO, WORLD!') + test_file.save() + + test_file = TestFile.objects.first() + self.assertEqual(test_file.the_file.read(), + b('HELLO, WORLD!')) + def test_copyable(self): class PutFile(Document): the_file = FileField() From 0d35e3a3e91b98ede77921d93b3c5a76132ff15f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 10:20:49 +0000 Subject: [PATCH 342/464] Added debugging for query counter --- mongoengine/context_managers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 1280e11..a5e2524 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -189,7 +189,10 @@ class query_counter(object): def __eq__(self, value): """ == Compare querycounter. """ - return value == self._get_count() + counter = self._get_count() + if value != counter: + print [x for x in self.db.system.profile.find()] + return value == counter def __ne__(self, value): """ != Compare querycounter. """ From ee725354db066ea11f25dd01387f8d3dcb721c6c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 10:46:38 +0000 Subject: [PATCH 343/464] Querysets are now lest restrictive when querying duplicate fields (#332, #333) --- docs/changelog.rst | 1 + mongoengine/queryset/visitor.py | 5 +++-- tests/queryset/visitor.py | 30 ++++++++++++++++++++++++------ 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0113a72..3e86988 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Querysets are now lest restrictive when querying duplicate fields (#332, #333) - FileField now honouring db_alias (#341) - Removed customised __set__ change tracking in ComplexBaseField (#344) - Removed unused var in _get_changed_fields (#347) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 024f454..41f4ebf 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -26,6 +26,7 @@ class QNodeVisitor(object): class DuplicateQueryConditionsError(InvalidQueryError): pass + class SimplificationVisitor(QNodeVisitor): """Simplifies query trees by combinging unnecessary 'and' connection nodes into a single Q-object. @@ -39,6 +40,7 @@ class SimplificationVisitor(QNodeVisitor): try: return Q(**self._query_conjunction(queries)) except DuplicateQueryConditionsError: + # Cannot be simplified pass return combination @@ -127,8 +129,7 @@ class QCombination(QNode): # If the child is a combination of the same type, we can merge its # children directly into this combinations children if isinstance(node, QCombination) and node.operation == operation: - # self.children += node.children - self.children.append(node) + self.children += node.children else: self.children.append(node) diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 8443621..0bb6f69 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -68,9 +68,11 @@ class QTest(unittest.TestCase): x = IntField() y = StringField() - # Check than an error is raised when conflicting queries are anded query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {'$and': [ {'x': {'$lt': 7}}, {'x': {'$lt': 3}} ]}) + self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) + + query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) + self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) # Check normal cases work without an error query = Q(x__lt=7) & Q(x__gt=3) @@ -323,10 +325,26 @@ class QTest(unittest.TestCase): pk = ObjectId() User(email='example@example.com', pk=pk).save() - self.assertEqual(1, User.objects.filter( - Q(email='example@example.com') | - Q(name='John Doe') - ).limit(2).filter(pk=pk).count()) + self.assertEqual(1, User.objects.filter(Q(email='example@example.com') | + Q(name='John Doe')).limit(2).filter(pk=pk).count()) + + def test_chained_q_or_filtering(self): + + class Post(EmbeddedDocument): + name = StringField(required=True) + + class Item(Document): + postables = ListField(EmbeddedDocumentField(Post)) + + Item.drop_collection() + + Item(postables=[Post(name="a"), Post(name="b")]).save() + Item(postables=[Post(name="a"), Post(name="c")]).save() + Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save() + + self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2) + self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2) + if __name__ == '__main__': unittest.main() From d47134bbf13a73e0a4b2168f709305a4bca93430 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 11:03:50 +0000 Subject: [PATCH 344/464] Reload forces read preference to be PRIMARY (#355) --- docs/changelog.rst | 1 + mongoengine/document.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3e86988..20e2046 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Reload forces read preference to be PRIMARY (#355) - Querysets are now lest restrictive when querying duplicate fields (#332, #333) - FileField now honouring db_alias (#341) - Removed customised __set__ change tracking in ComplexBaseField (#344) diff --git a/mongoengine/document.py b/mongoengine/document.py index 92d0631..e04e2bc 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -3,6 +3,7 @@ import warnings import pymongo import re +from pymongo.read_preferences import ReadPreference from bson.dbref import DBRef from mongoengine import signals from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, @@ -421,8 +422,9 @@ class Document(BaseDocument): .. versionchanged:: 0.6 Now chainable """ id_field = self._meta['id_field'] - obj = self._qs.filter(**{id_field: self[id_field]} - ).limit(1).select_related(max_depth=max_depth) + obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( + **{id_field: self[id_field]}).limit(1).select_related(max_depth=max_depth) + if obj: obj = obj[0] else: From eeb5a83e98c598ad18b6183495a89cd0c7d1cf32 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 16:35:25 +0000 Subject: [PATCH 345/464] Added lock when calling doc.Delete() for when signals have no sender (#350) --- docs/changelog.rst | 1 + mongoengine/document.py | 3 +- mongoengine/queryset/queryset.py | 17 +++++--- tests/test_signals.py | 70 ++------------------------------ 4 files changed, 16 insertions(+), 75 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 20e2046..df24a5f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) - Querysets are now lest restrictive when querying duplicate fields (#332, #333) - FileField now honouring db_alias (#341) diff --git a/mongoengine/document.py b/mongoengine/document.py index e04e2bc..5edfc81 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -347,11 +347,10 @@ class Document(BaseDocument): signals.pre_delete.send(self.__class__, document=self) try: - self._qs.filter(**self._object_key).delete(write_concern=write_concern) + self._qs.filter(**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) except pymongo.errors.OperationFailure, err: message = u'Could not delete document (%s)' % err.message raise OperationError(message) - signals.post_delete.send(self.__class__, document=self) def switch_db(self, db_alias): diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 00a0abc..5077f89 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -407,7 +407,7 @@ class QuerySet(object): self._len = count return count - def delete(self, write_concern=None): + def delete(self, write_concern=None, _from_doc_delete=False): """Delete the documents matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -416,20 +416,25 @@ class QuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param _from_doc_delete: True when called from document delete therefore + signals will have been triggered so don't loop. """ queryset = self.clone() doc = queryset._document + if not write_concern: + write_concern = {} + + # Handle deletes where skips or limits have been applied or + # there is an untriggered delete signal has_delete_signal = signals.signals_available and ( signals.pre_delete.has_receivers_for(self._document) or signals.post_delete.has_receivers_for(self._document)) - if not write_concern: - write_concern = {} + call_document_delete = (queryset._skip or queryset._limit or + has_delete_signal) and not _from_doc_delete - # Handle deletes where skips or limits have been applied or has a - # delete signal - if queryset._skip or queryset._limit or has_delete_signal: + if call_document_delete: for doc in queryset: doc.delete(write_concern=write_concern) return diff --git a/tests/test_signals.py b/tests/test_signals.py index 65289c2..27614bd 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -41,7 +41,7 @@ class SignalTests(unittest.TestCase): @classmethod def pre_save(cls, sender, document, **kwargs): - signal_output.append('pre_save signal,, %s' % document) + signal_output.append('pre_save signal, %s' % document) @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): @@ -83,54 +83,6 @@ class SignalTests(unittest.TestCase): self.Author = Author Author.drop_collection() - class Another(Document): - name = StringField() - - def __unicode__(self): - return self.name - - @classmethod - def pre_init(cls, sender, document, **kwargs): - signal_output.append('pre_init Another signal, %s' % cls.__name__) - signal_output.append(str(kwargs['values'])) - - @classmethod - def post_init(cls, sender, document, **kwargs): - signal_output.append('post_init Another signal, %s' % document) - - @classmethod - def pre_save(cls, sender, document, **kwargs): - signal_output.append('pre_save Another signal, %s' % document) - - @classmethod - def pre_save_post_validation(cls, sender, document, **kwargs): - signal_output.append('pre_save_post_validation Another signal, %s' % document) - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') - else: - signal_output.append('Is updated') - - @classmethod - def post_save(cls, sender, document, **kwargs): - signal_output.append('post_save Another signal, %s' % document) - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') - else: - signal_output.append('Is updated') - - @classmethod - def pre_delete(cls, sender, document, **kwargs): - signal_output.append('pre_delete Another signal, %s' % document) - - @classmethod - def post_delete(cls, sender, document, **kwargs): - signal_output.append('post_delete Another signal, %s' % document) - - self.Another = Another - Another.drop_collection() - class ExplicitId(Document): id = IntField(primary_key=True) @@ -169,14 +121,6 @@ class SignalTests(unittest.TestCase): signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) - signals.pre_init.connect(Another.pre_init, sender=Another) - signals.post_init.connect(Another.post_init, sender=Another) - signals.pre_save.connect(Another.pre_save, sender=Another) - signals.pre_save_post_validation.connect(Another.pre_save_post_validation, sender=Another) - signals.post_save.connect(Another.post_save, sender=Another) - signals.pre_delete.connect(Another.pre_delete, sender=Another) - signals.post_delete.connect(Another.post_delete, sender=Another) - signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) def tearDown(self): @@ -190,14 +134,6 @@ class SignalTests(unittest.TestCase): signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) - signals.pre_init.disconnect(self.Another.pre_init) - signals.post_init.disconnect(self.Another.post_init) - signals.post_delete.disconnect(self.Another.post_delete) - signals.pre_delete.disconnect(self.Another.pre_delete) - signals.post_save.disconnect(self.Another.post_save) - signals.pre_save_post_validation.disconnect(self.Another.pre_save_post_validation) - signals.pre_save.disconnect(self.Another.pre_save) - signals.post_save.disconnect(self.ExplicitId.post_save) # Check that all our signals got disconnected properly. @@ -239,7 +175,7 @@ class SignalTests(unittest.TestCase): a1 = self.Author(name='Bill Shakespeare') self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save signal,, Bill Shakespeare", + "pre_save signal, Bill Shakespeare", "pre_save_post_validation signal, Bill Shakespeare", "Is created", "post_save signal, Bill Shakespeare", @@ -249,7 +185,7 @@ class SignalTests(unittest.TestCase): a1.reload() a1.name = 'William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save signal,, William Shakespeare", + "pre_save signal, William Shakespeare", "pre_save_post_validation signal, William Shakespeare", "Is updated", "post_save signal, William Shakespeare", From 74a3fd7596c66da36aa3e7fd77ed05665d0712de Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 4 Jun 2013 16:59:25 +0000 Subject: [PATCH 346/464] Added queryset delete tests for signals --- tests/test_signals.py | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/tests/test_signals.py b/tests/test_signals.py index 27614bd..50e5e6b 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -83,6 +83,24 @@ class SignalTests(unittest.TestCase): self.Author = Author Author.drop_collection() + class Another(Document): + + name = StringField() + + def __unicode__(self): + return self.name + + @classmethod + def pre_delete(cls, sender, document, **kwargs): + signal_output.append('pre_delete signal, %s' % document) + + @classmethod + def post_delete(cls, sender, document, **kwargs): + signal_output.append('post_delete signal, %s' % document) + + self.Another = Another + Another.drop_collection() + class ExplicitId(Document): id = IntField(primary_key=True) @@ -121,6 +139,9 @@ class SignalTests(unittest.TestCase): signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) + signals.pre_delete.connect(Another.pre_delete, sender=Another) + signals.post_delete.connect(Another.post_delete, sender=Another) + signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) def tearDown(self): @@ -134,6 +155,9 @@ class SignalTests(unittest.TestCase): signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) + signals.post_delete.disconnect(self.Another.post_delete) + signals.pre_delete.disconnect(self.Another.pre_delete) + signals.post_save.disconnect(self.ExplicitId.post_save) # Check that all our signals got disconnected properly. @@ -216,7 +240,14 @@ class SignalTests(unittest.TestCase): "Not loaded", ]) - self.Author.objects.delete() + def test_queryset_delete_signals(self): + """ Queryset delete should throw some signals. """ + + self.Another(name='Bill Shakespeare').save() + self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ + 'pre_delete signal, Bill Shakespeare', + 'post_delete signal, Bill Shakespeare', + ]) def test_signals_with_explicit_doc_ids(self): """ Model saves must have a created flag the first time.""" From eba81e368b8ca2377b85f2ac2ca0f6bd51a5e4a9 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 4 Jun 2013 15:32:23 -0700 Subject: [PATCH 347/464] dont use $in for _cls queries with a single subclass --- mongoengine/queryset/queryset.py | 5 ++++- tests/queryset/queryset.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 5077f89..dc5fab4 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -71,7 +71,10 @@ class QuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used if document._meta.get('allow_inheritance') is True: - self._initial_query = {"_cls": {"$in": self._document._subclasses}} + if len(self._document._subclasses) == 1: + self._initial_query = {"_cls": self._document._subclasses[0]} + else: + self._initial_query = {"_cls": {"$in": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None self._limit = None diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 507408d..07ddf2d 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3392,6 +3392,34 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(B.objects.get(a=a).a, a) self.assertEqual(B.objects.get(a=a.id).a, a) + def test_cls_query_in_subclassed_docs(self): + + class Animal(Document): + name = StringField() + + meta = { + 'allow_inheritance': True + } + + class Dog(Animal): + pass + + class Cat(Animal): + pass + + self.assertEqual(Animal.objects(name='Charlie')._query, { + 'name': 'Charlie', + '_cls': { '$in': ('Animal', 'Animal.Dog', 'Animal.Cat') } + }) + self.assertEqual(Dog.objects(name='Charlie')._query, { + 'name': 'Charlie', + '_cls': 'Animal.Dog' + }) + self.assertEqual(Cat.objects(name='Charlie')._query, { + 'name': 'Charlie', + '_cls': 'Animal.Cat' + }) + if __name__ == '__main__': unittest.main() From 27e8aa9c6815d1e514e003a7f7b32e98425d0d03 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 09:30:01 +0000 Subject: [PATCH 348/464] Added comment about why temp debugging exists --- mongoengine/context_managers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index a5e2524..db0830d 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -190,6 +190,7 @@ class query_counter(object): def __eq__(self, value): """ == Compare querycounter. """ counter = self._get_count() + # Temp debugging to try and understand intermittent travis-ci failures if value != counter: print [x for x in self.db.system.profile.find()] return value == counter From 940dfff625774aeb443434e23628279f5fbbc52e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 09:49:26 +0000 Subject: [PATCH 349/464] Code cleanup --- mongoengine/queryset/queryset.py | 1 - 1 file changed, 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index dc5fab4..7adfa65 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -185,7 +185,6 @@ class QuerySet(object): try: queryset._cursor_obj = queryset._cursor[key] queryset._skip, queryset._limit = key.start, key.stop - queryset._limit if key.start and key.stop: queryset._limit = key.stop - key.start except IndexError, err: From 1a54dad643b562539cc539f84801802831b5634b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 10:42:41 +0000 Subject: [PATCH 350/464] Filter out index scan for pymongo cache --- mongoengine/context_managers.py | 6 ++---- tests/queryset/queryset.py | 7 +++---- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index db0830d..13ed100 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -190,9 +190,6 @@ class query_counter(object): def __eq__(self, value): """ == Compare querycounter. """ counter = self._get_count() - # Temp debugging to try and understand intermittent travis-ci failures - if value != counter: - print [x for x in self.db.system.profile.find()] return value == counter def __ne__(self, value): @@ -225,6 +222,7 @@ class query_counter(object): def _get_count(self): """ Get the number of queries. """ - count = self.db.system.profile.find().count() - self.counter + ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} + count = self.db.system.profile.find(ignore_query).count() - self.counter self.counter += 1 return count diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 07ddf2d..21df22c 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -631,14 +631,13 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 1) # 1 for the insert Blog.drop_collection() + Blog.ensure_indexes() + with query_counter() as q: self.assertEqual(q, 0) - Blog.ensure_indexes() - self.assertEqual(q, 1) - Blog.objects.insert(blogs) - self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total) + self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch Blog.drop_collection() From ce44843e27605a096d22ad6fd086ba616e7aab5f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 11:11:02 +0000 Subject: [PATCH 351/464] Doc fix for #340 --- docs/guide/querying.rst | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index f1b6470..1350130 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -15,11 +15,8 @@ fetch documents from the database:: .. note:: - Once the iteration finishes (when :class:`StopIteration` is raised), - :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the - :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The - results of the first iteration are *not* cached, so the database will be hit - each time the :class:`~mongoengine.queryset.QuerySet` is iterated over. + As of MongoEngine 0.8 the querysets utilise a local cache. So iterating + it multiple times will only cause a single query. Filtering queries ================= From a246154961b4625d140ea4b9cc619302abfaee6c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 11:31:13 +0000 Subject: [PATCH 352/464] Fixed hashing of EmbeddedDocuments (#348) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 2 +- tests/document/instance.py | 8 ++++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index df24a5f..b61a06d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Fixed hashing of EmbeddedDocuments (#348) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) - Querysets are now lest restrictive when querying duplicate fields (#332, #333) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index e2944fb..ca154a2 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -215,7 +215,7 @@ class BaseDocument(object): return not self.__eq__(other) def __hash__(self): - if self.pk is None: + if getattr(self, 'pk', None) is None: # For new object return super(BaseDocument, self).__hash__() else: diff --git a/tests/document/instance.py b/tests/document/instance.py index cdc6fe0..f29cec2 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1705,6 +1705,14 @@ class InstanceTest(unittest.TestCase): self.assertTrue(u1 in all_user_set) + def test_embedded_document_hash(self): + """Test embedded document can be hashed + """ + class User(EmbeddedDocument): + pass + + hash(User()) + def test_picklable(self): pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) From e5648a4af96d0681e3fe6b0a7657b86d3ab8ee34 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 11:45:08 +0000 Subject: [PATCH 353/464] ImageFields now include PIL error messages if invalid error (#353) --- docs/changelog.rst | 1 + mongoengine/fields.py | 4 ++-- tests/fields/file_tests.py | 11 +++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b61a06d..55cae98 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- ImageFields now include PIL error messages if invalid error (#353) - Fixed hashing of EmbeddedDocuments (#348) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9bc18e0..451f7ac 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1259,8 +1259,8 @@ class ImageGridFsProxy(GridFSProxy): try: img = Image.open(file_obj) img_format = img.format - except: - raise ValidationError('Invalid image') + except Exception, e: + raise ValidationError('Invalid image: %s' % e) if (field.size and (img.size[0] > field.size['width'] or img.size[1] > field.size['height'])): diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index 5bcc3a2..dfef9ee 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -269,6 +269,17 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() + with tempfile.TemporaryFile() as f: + f.write(b("Hello World!")) + f.flush() + + t = TestImage() + try: + t.image.put(f) + self.fail("Should have raised an invalidation error") + except ValidationError, e: + self.assertEquals("%s" % e, "Invalid image: cannot identify image file") + t = TestImage() t.image.put(open(TEST_IMAGE_PATH, 'rb')) t.save() From eb1df23e68ae88c10b44b6af45c77d38e61bee44 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 11:50:26 +0000 Subject: [PATCH 354/464] Updated AUTHORS (#340, #348, #353) --- AUTHORS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/AUTHORS b/AUTHORS index 3176238..4977f73 100644 --- a/AUTHORS +++ b/AUTHORS @@ -164,3 +164,6 @@ that much better: * Ryan Witt (https://github.com/ryanwitt) * Jiequan (https://github.com/Jiequan) * hensom (https://github.com/hensom) + * zhy0216 (https://github.com/zhy0216) + * istinspring (https://github.com/istinspring) + * Massimo Santini (https://github.com/mapio) From f8904a5504c74eb20b9b54f55eae8db569ae3e34 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 12:14:22 +0000 Subject: [PATCH 355/464] Explicitly set w:1 if None in save --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 5edfc81..563f57a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -202,7 +202,7 @@ class Document(BaseDocument): self.validate(clean=clean) if not write_concern: - write_concern = {} + write_concern = {"w": 1} doc = self.to_mongo() From 5cb281223149a34d9446dedfa0d051ce1c7eb1a7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 13:03:15 +0000 Subject: [PATCH 356/464] Reverting Fixed hashing of EmbeddedDocuments (#348) --- docs/changelog.rst | 1 - mongoengine/document.py | 5 +++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 55cae98..8ccd395 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,7 +6,6 @@ Changelog Changes in 0.8.2 ================ - ImageFields now include PIL error messages if invalid error (#353) -- Fixed hashing of EmbeddedDocuments (#348) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) - Querysets are now lest restrictive when querying duplicate fields (#332, #333) diff --git a/mongoengine/document.py b/mongoengine/document.py index 563f57a..585fcf7 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,9 +1,11 @@ import warnings +import hashlib import pymongo import re from pymongo.read_preferences import ReadPreference +from bson import ObjectId from bson.dbref import DBRef from mongoengine import signals from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, @@ -53,6 +55,9 @@ class EmbeddedDocument(BaseDocument): return self._data == other._data return False + def __ne__(self, other): + return not self.__eq__(other) + class Document(BaseDocument): """The base class used for defining the structure and properties of From c3a065dd3322d0642b325ae812dda79325e37f29 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 5 Jun 2013 13:44:21 +0000 Subject: [PATCH 357/464] Removing old test re: #348 --- tests/document/instance.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index f29cec2..cdc6fe0 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1705,14 +1705,6 @@ class InstanceTest(unittest.TestCase): self.assertTrue(u1 in all_user_set) - def test_embedded_document_hash(self): - """Test embedded document can be hashed - """ - class User(EmbeddedDocument): - pass - - hash(User()) - def test_picklable(self): pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) From ad15781d8f1b06562ab4fdcf255b60805952387e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 13:31:52 +0000 Subject: [PATCH 358/464] Fixed amibiguity and differing behaviour regarding field defaults (#349) Now field defaults are king, unsetting or setting to None on a field with a default means the default is reapplied. --- AUTHORS | 1 + docs/apireference.rst | 1 + docs/changelog.rst | 1 + mongoengine/base/fields.py | 38 ++++++-- mongoengine/document.py | 2 +- mongoengine/queryset/queryset.py | 6 +- tests/fields/fields.py | 144 ++++++++++++++++++++++++++----- 7 files changed, 160 insertions(+), 33 deletions(-) diff --git a/AUTHORS b/AUTHORS index 4977f73..4caed40 100644 --- a/AUTHORS +++ b/AUTHORS @@ -167,3 +167,4 @@ that much better: * zhy0216 (https://github.com/zhy0216) * istinspring (https://github.com/istinspring) * Massimo Santini (https://github.com/mapio) + * Nigel McNie (https://github.com/nigelmcnie) diff --git a/docs/apireference.rst b/docs/apireference.rst index 37370e2..0fa410e 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -54,6 +54,7 @@ Querying Fields ====== +.. autoclass:: mongoengine.base.fields.BaseField .. autoclass:: mongoengine.fields.StringField .. autoclass:: mongoengine.fields.URLField .. autoclass:: mongoengine.fields.EmailField diff --git a/docs/changelog.rst b/docs/changelog.rst index 8ccd395..bce26c5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Fixed amibiguity and differing behaviour regarding field defaults (#349) - ImageFields now include PIL error messages if invalid error (#353) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 35075ec..e4c88a7 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -36,6 +36,29 @@ class BaseField(object): unique=False, unique_with=None, primary_key=False, validation=None, choices=None, verbose_name=None, help_text=None): + """ + :param db_field: The database field to store this field in + (defaults to the name of the field) + :param name: Depreciated - use db_field + :param required: If the field is required. Whether it has to have a + value or not. Defaults to False. + :param default: (optional) The default value for this field if no value + has been set (or if the value has been unset). It Can be a + callable. + :param unique: Is the field value unique or not. Defaults to False. + :param unique_with: (optional) The other field this field should be + unique with. + :param primary_key: Mark this field as the primary key. Defaults to False. + :param validation: (optional) A callable to validate the value of the + field. Generally this is deprecated in favour of the + `FIELD.validate` method + :param choices: (optional) The valid choices + :param verbose_name: (optional) The verbose name for the field. + Designed to be human readable and is often used when generating + model forms from the document model. + :param help_text: (optional) The help text for this field and is often + used when generating model forms from the document model. + """ self.db_field = (db_field or name) if not primary_key else '_id' if name: msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" @@ -65,14 +88,9 @@ class BaseField(object): if instance is None: # Document class being used rather than a document object return self - # Get value from document instance if available, if not use default - value = instance._data.get(self.name) - if value is None: - value = self.default - # Allow callable default values - if callable(value): - value = value() + # Get value from document instance if available + value = instance._data.get(self.name) EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument) and value._instance is None: @@ -82,9 +100,11 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - if value is None: + + # If setting to None and theres a default + # Then set the value to the default value + if value is None and self.default is not None: value = self.default - # Allow callable default values if callable(value): value = value() diff --git a/mongoengine/document.py b/mongoengine/document.py index 585fcf7..8e7ccc2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -206,7 +206,7 @@ class Document(BaseDocument): if validate: self.validate(clean=clean) - if not write_concern: + if write_concern is None: write_concern = {"w": 1} doc = self.to_mongo() diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 7adfa65..d58a13b 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -348,7 +348,7 @@ class QuerySet(object): """ Document = _import_class('Document') - if not write_concern: + if write_concern is None: write_concern = {} docs = doc_or_docs @@ -424,7 +424,7 @@ class QuerySet(object): queryset = self.clone() doc = queryset._document - if not write_concern: + if write_concern is None: write_concern = {} # Handle deletes where skips or limits have been applied or @@ -490,7 +490,7 @@ class QuerySet(object): if not update and not upsert: raise OperationError("No update parameters, would remove data") - if not write_concern: + if write_concern is None: write_concern = {} queryset = self.clone() diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 5118437..3e48a21 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -34,33 +34,137 @@ class FieldTest(unittest.TestCase): self.db.drop_collection('fs.files') self.db.drop_collection('fs.chunks') - def test_default_values(self): + def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. """ class Person(Document): name = StringField() - age = IntField(default=30, help_text="Your real age") - userid = StringField(default=lambda: 'test', verbose_name="User Identity") - - person = Person(name='Test Person') - self.assertEqual(person._data['age'], 30) - self.assertEqual(person._data['userid'], 'test') - self.assertEqual(person._fields['name'].help_text, None) - self.assertEqual(person._fields['age'].help_text, "Your real age") - self.assertEqual(person._fields['userid'].verbose_name, "User Identity") - - class Person2(Document): + age = IntField(default=30, required=False) + userid = StringField(default=lambda: 'test', required=True) created = DateTimeField(default=datetime.datetime.utcnow) - person = Person2() - date1 = person.created - date2 = person.created - self.assertEqual(date1, date2) + person = Person(name="Ross") - person = Person2(created=None) - date1 = person.created - date2 = person.created - self.assertEqual(date1, date2) + # Confirm saving now would store values + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid']) + + self.assertTrue(person.validate() is None) + + self.assertEqual(person.name, person.name) + self.assertEqual(person.age, person.age) + self.assertEqual(person.userid, person.userid) + self.assertEqual(person.created, person.created) + + self.assertEqual(person._data['name'], person.name) + self.assertEqual(person._data['age'], person.age) + self.assertEqual(person._data['userid'], person.userid) + self.assertEqual(person._data['created'], person.created) + + # Confirm introspection changes nothing + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid']) + + def test_default_values_set_to_None(self): + """Ensure that default field values are used when creating a document. + """ + class Person(Document): + name = StringField() + age = IntField(default=30, required=False) + userid = StringField(default=lambda: 'test', required=True) + created = DateTimeField(default=datetime.datetime.utcnow) + + # Trying setting values to None + person = Person(name=None, age=None, userid=None, created=None) + + # Confirm saving now would store values + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + + self.assertTrue(person.validate() is None) + + self.assertEqual(person.name, person.name) + self.assertEqual(person.age, person.age) + self.assertEqual(person.userid, person.userid) + self.assertEqual(person.created, person.created) + + self.assertEqual(person._data['name'], person.name) + self.assertEqual(person._data['age'], person.age) + self.assertEqual(person._data['userid'], person.userid) + self.assertEqual(person._data['created'], person.created) + + # Confirm introspection changes nothing + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + + def test_default_values_when_setting_to_None(self): + """Ensure that default field values are used when creating a document. + """ + class Person(Document): + name = StringField() + age = IntField(default=30, required=False) + userid = StringField(default=lambda: 'test', required=True) + created = DateTimeField(default=datetime.datetime.utcnow) + + person = Person() + person.name = None + person.age = None + person.userid = None + person.created = None + + # Confirm saving now would store values + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + + self.assertTrue(person.validate() is None) + + self.assertEqual(person.name, person.name) + self.assertEqual(person.age, person.age) + self.assertEqual(person.userid, person.userid) + self.assertEqual(person.created, person.created) + + self.assertEqual(person._data['name'], person.name) + self.assertEqual(person._data['age'], person.age) + self.assertEqual(person._data['userid'], person.userid) + self.assertEqual(person._data['created'], person.created) + + # Confirm introspection changes nothing + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + + def test_default_values_when_deleting_value(self): + """Ensure that default field values are used when creating a document. + """ + class Person(Document): + name = StringField() + age = IntField(default=30, required=False) + userid = StringField(default=lambda: 'test', required=True) + created = DateTimeField(default=datetime.datetime.utcnow) + + person = Person(name="Ross") + del person.name + del person.age + del person.userid + del person.created + + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + + self.assertTrue(person.validate() is None) + + self.assertEqual(person.name, person.name) + self.assertEqual(person.age, person.age) + self.assertEqual(person.userid, person.userid) + self.assertEqual(person.created, person.created) + + self.assertEqual(person._data['name'], person.name) + self.assertEqual(person._data['age'], person.age) + self.assertEqual(person._data['userid'], person.userid) + self.assertEqual(person._data['created'], person.created) + + # Confirm introspection changes nothing + data_to_be_saved = sorted(person.to_mongo().keys()) + self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) def test_required_values(self): """Ensure that required field constraints are enforced. From dc3b09c21879332b35bfc0d809226fe966b99016 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 16:36:17 +0000 Subject: [PATCH 359/464] Improved cascading saves write performance (#361) --- docs/changelog.rst | 1 + mongoengine/base/metaclasses.py | 8 +++ mongoengine/document.py | 11 ++-- tests/document/instance.py | 106 ++++++++++++++++++++++++++++++++ 4 files changed, 122 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index bce26c5..c9cdda5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Improved cascading saves write performance (#361) - Fixed amibiguity and differing behaviour regarding field defaults (#349) - ImageFields now include PIL error messages if invalid error (#353) - Added lock when calling doc.Delete() for when signals have no sender (#350) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 444d9a2..651228d 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -97,6 +97,14 @@ class DocumentMetaclass(type): attrs['_reverse_db_field_map'] = dict( (v, k) for k, v in attrs['_db_field_map'].iteritems()) + # Set cascade flag if not set + if 'cascade' not in attrs['_meta']: + ReferenceField = _import_class('ReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') + cascade = any([isinstance(x, (ReferenceField, GenericReferenceField)) + for x in doc_fields.values()]) + attrs['_meta']['cascade'] = cascade + # # Set document hierarchy # diff --git a/mongoengine/document.py b/mongoengine/document.py index 8e7ccc2..2bdecb7 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -8,6 +8,7 @@ from pymongo.read_preferences import ReadPreference from bson import ObjectId from bson.dbref import DBRef from mongoengine import signals +from mongoengine.common import _import_class from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, BaseDict, BaseList, ALLOW_INHERITANCE, get_document) @@ -284,15 +285,17 @@ class Document(BaseDocument): def cascade_save(self, *args, **kwargs): """Recursively saves any references / generic references on an objects""" - import fields _refs = kwargs.get('_refs', []) or [] + ReferenceField = _import_class('ReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') + for name, cls in self._fields.items(): - if not isinstance(cls, (fields.ReferenceField, - fields.GenericReferenceField)): + if not isinstance(cls, (ReferenceField, + GenericReferenceField)): continue - ref = getattr(self, name) + ref = self._data.get(name) if not ref or isinstance(ref, DBRef): continue diff --git a/tests/document/instance.py b/tests/document/instance.py index cdc6fe0..f4abd02 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -646,6 +646,22 @@ class InstanceTest(unittest.TestCase): self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) + def test_setting_cascade(self): + + class ForcedCascade(Document): + meta = {'cascade': True} + + class Feed(Document): + name = StringField() + + class Subscription(Document): + name = StringField() + feed = ReferenceField(Feed) + + self.assertTrue(ForcedCascade._meta['cascade']) + self.assertTrue(Subscription._meta['cascade']) + self.assertFalse(Feed._meta['cascade']) + def test_save_cascades(self): class Person(Document): @@ -1018,6 +1034,96 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.age, 21) self.assertEqual(person.active, False) + def test_query_count_when_saving(self): + """Ensure references don't cause extra fetches when saving""" + class Organization(Document): + name = StringField() + + class User(Document): + name = StringField() + orgs = ListField(ReferenceField('Organization')) + + class Feed(Document): + name = StringField() + + class UserSubscription(Document): + name = StringField() + user = ReferenceField(User) + feed = ReferenceField(Feed) + + Organization.drop_collection() + User.drop_collection() + Feed.drop_collection() + UserSubscription.drop_collection() + + self.assertTrue(UserSubscription._meta['cascade']) + + o1 = Organization(name="o1").save() + o2 = Organization(name="o2").save() + + u1 = User(name="Ross", orgs=[o1, o2]).save() + f1 = Feed(name="MongoEngine").save() + + sub = UserSubscription(user=u1, feed=f1).save() + + user = User.objects.first() + # Even if stored as ObjectId's internally mongoengine uses DBRefs + # As ObjectId's aren't automatically derefenced + self.assertTrue(isinstance(user._data['orgs'][0], DBRef)) + self.assertTrue(isinstance(user.orgs[0], Organization)) + self.assertTrue(isinstance(user._data['orgs'][0], Organization)) + + # Changing a value + with query_counter() as q: + self.assertEqual(q, 0) + sub = UserSubscription.objects.first() + self.assertEqual(q, 1) + sub.name = "Test Sub" + sub.save() + self.assertEqual(q, 2) + + # Changing a value that will cascade + with query_counter() as q: + self.assertEqual(q, 0) + sub = UserSubscription.objects.first() + self.assertEqual(q, 1) + sub.user.name = "Test" + self.assertEqual(q, 2) + sub.save() + self.assertEqual(q, 3) + + # Changing a value and one that will cascade + with query_counter() as q: + self.assertEqual(q, 0) + sub = UserSubscription.objects.first() + sub.name = "Test Sub 2" + self.assertEqual(q, 1) + sub.user.name = "Test 2" + self.assertEqual(q, 2) + sub.save() + self.assertEqual(q, 4) # One for the UserSub and one for the User + + # Saving with just the refs + with query_counter() as q: + self.assertEqual(q, 0) + sub = UserSubscription(user=u1.pk, feed=f1.pk) + sub.validate() + self.assertEqual(q, 0) # Check no change + sub.save() + self.assertEqual(q, 1) + + # Saving new objects + with query_counter() as q: + self.assertEqual(q, 0) + user = User.objects.first() + self.assertEqual(q, 1) + feed = Feed.objects.first() + self.assertEqual(q, 2) + sub = UserSubscription(user=user, feed=feed) + self.assertEqual(q, 2) # Check no change + sub.save() + self.assertEqual(q, 3) + def test_set_unset_one_operation(self): """Ensure that $set and $unset actions are performed in the same operation. From 06f5dc6ad74c7ffc708c2da73df6267b451eba0f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 16:44:43 +0000 Subject: [PATCH 360/464] Docs update --- docs/guide/defining-documents.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index b5ba2bf..ed9c142 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -100,9 +100,6 @@ arguments can be set on all fields: :attr:`db_field` (Default: None) The MongoDB field name. -:attr:`name` (Default: None) - The mongoengine field name. - :attr:`required` (Default: False) If set to True and the field is not set on the document instance, a :class:`~mongoengine.ValidationError` will be raised when the document is @@ -129,6 +126,7 @@ arguments can be set on all fields: # instead to just an object values = ListField(IntField(), default=[1,2,3]) + .. note:: Unsetting a field with a default value will revert back to the default. :attr:`unique` (Default: False) When True, no documents in the collection will have the same value for this From 9f3394dc6d65c0eab8179dc78350f3f48d004b35 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 17:19:19 +0000 Subject: [PATCH 361/464] Added testcase for ListFields with just pks (#361) --- tests/document/instance.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index f4abd02..35338ab 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -9,6 +9,7 @@ import unittest import uuid from datetime import datetime +from bson import DBRef from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest from mongoengine import * @@ -1107,11 +1108,16 @@ class InstanceTest(unittest.TestCase): with query_counter() as q: self.assertEqual(q, 0) sub = UserSubscription(user=u1.pk, feed=f1.pk) - sub.validate() - self.assertEqual(q, 0) # Check no change + self.assertEqual(q, 0) sub.save() self.assertEqual(q, 1) + # Saving with just the refs on a ListField + with query_counter() as q: + self.assertEqual(q, 0) + User(name="Bob", orgs=[o1.pk, o2.pk]).save() + self.assertEqual(q, 1) + # Saving new objects with query_counter() as q: self.assertEqual(q, 0) From 542049f2526d393605d58a688698f04d2d70a46c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 17:31:50 +0000 Subject: [PATCH 362/464] Trying to fix annoying python-dateutil bug --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index b7c56a0..f6870a7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install $TRAVIS_PYTHON_VERSION-dateutil ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From 8aae4f0ed085c3c4d90d8df90c8040aa9d33fefb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 17:34:34 +0000 Subject: [PATCH 363/464] Trying to stabalise the build --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f6870a7..173f739 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install $TRAVIS_PYTHON_VERSION-dateutil ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install python-dateutil ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From a7631223a38879f4e59fd4050acf446dda0a4916 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 17:58:10 +0000 Subject: [PATCH 364/464] Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) --- docs/changelog.rst | 1 + mongoengine/base/datastructures.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c9cdda5..c35cd9c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) - Improved cascading saves write performance (#361) - Fixed amibiguity and differing behaviour regarding field defaults (#349) - ImageFields now include PIL error messages if invalid error (#353) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index c750b5b..adcd8d0 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -13,7 +13,11 @@ class BaseDict(dict): _name = None def __init__(self, dict_items, instance, name): - self._instance = weakref.proxy(instance) + Document = _import_class('Document') + EmbeddedDocument = _import_class('EmbeddedDocument') + + if isinstance(instance, (Document, EmbeddedDocument)): + self._instance = weakref.proxy(instance) self._name = name return super(BaseDict, self).__init__(dict_items) @@ -80,7 +84,11 @@ class BaseList(list): _name = None def __init__(self, list_items, instance, name): - self._instance = weakref.proxy(instance) + Document = _import_class('Document') + EmbeddedDocument = _import_class('EmbeddedDocument') + + if isinstance(instance, (Document, EmbeddedDocument)): + self._instance = weakref.proxy(instance) self._name = name return super(BaseList, self).__init__(list_items) From f3af76e38cb760566cdaf488defe55876a0b8507 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 17:59:07 +0000 Subject: [PATCH 365/464] Added ygbourhis to AUTHORS (#363) --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 4caed40..72b1124 100644 --- a/AUTHORS +++ b/AUTHORS @@ -168,3 +168,4 @@ that much better: * istinspring (https://github.com/istinspring) * Massimo Santini (https://github.com/mapio) * Nigel McNie (https://github.com/nigelmcnie) + * ygbourhis (https://github.com/ygbourhis) \ No newline at end of file From d935b5764a6c93e78750bbefee56f115e287591e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 18:02:06 +0000 Subject: [PATCH 366/464] apt only had an ancient version of python-dateutil *sigh* --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 173f739..b7c56a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,6 @@ install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install python-dateutil ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From 7451244cd27f83a82c4a3e65a767679f15c4af5e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 6 Jun 2013 21:04:54 +0000 Subject: [PATCH 367/464] Fixed cascading saves which weren't turned off as planned (#291) --- docs/changelog.rst | 1 + mongoengine/base/metaclasses.py | 8 -------- mongoengine/document.py | 18 ++++++++++------- tests/document/instance.py | 36 +++++++++------------------------ 4 files changed, 22 insertions(+), 41 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c35cd9c..a046847 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Fixed cascading saves which weren't turned off as planned (#291) - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) - Improved cascading saves write performance (#361) - Fixed amibiguity and differing behaviour regarding field defaults (#349) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 651228d..444d9a2 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -97,14 +97,6 @@ class DocumentMetaclass(type): attrs['_reverse_db_field_map'] = dict( (v, k) for k, v in attrs['_db_field_map'].iteritems()) - # Set cascade flag if not set - if 'cascade' not in attrs['_meta']: - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - cascade = any([isinstance(x, (ReferenceField, GenericReferenceField)) - for x in doc_fields.values()]) - attrs['_meta']['cascade'] = cascade - # # Set document hierarchy # diff --git a/mongoengine/document.py b/mongoengine/document.py index 2bdecb7..8b152d5 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -186,8 +186,8 @@ class Document(BaseDocument): will force an fsync on the primary server. :param cascade: Sets the flag for cascading saves. You can set a default by setting "cascade" in the document __meta__ - :param cascade_kwargs: optional kwargs dictionary to be passed throw - to cascading saves + :param cascade_kwargs: (optional) kwargs dictionary to be passed throw + to cascading saves. Implies ``cascade=True``. :param _refs: A list of processed references used in cascading saves .. versionchanged:: 0.5 @@ -196,11 +196,13 @@ class Document(BaseDocument): :class:`~bson.dbref.DBRef` objects that have changes are saved as well. .. versionchanged:: 0.6 - Cascade saves are optional = defaults to True, if you want + Added cascading saves + .. versionchanged:: 0.8 + Cascade saves are optional and default to False. If you want fine grain control then you can turn off using document - meta['cascade'] = False Also you can pass different kwargs to + meta['cascade'] = True. Also you can pass different kwargs to the cascade save using cascade_kwargs which overwrites the - existing kwargs with custom values + existing kwargs with custom values. """ signals.pre_save.send(self.__class__, document=self) @@ -251,8 +253,10 @@ class Document(BaseDocument): upsert=True, **write_concern) created = is_new_object(last_error) - cascade = (self._meta.get('cascade', True) - if cascade is None else cascade) + + if cascade is None: + cascade = self._meta.get('cascade', False) or cascade_kwargs is not None + if cascade: kwargs = { "force_insert": force_insert, diff --git a/tests/document/instance.py b/tests/document/instance.py index 35338ab..81734aa 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -647,22 +647,6 @@ class InstanceTest(unittest.TestCase): self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) - def test_setting_cascade(self): - - class ForcedCascade(Document): - meta = {'cascade': True} - - class Feed(Document): - name = StringField() - - class Subscription(Document): - name = StringField() - feed = ReferenceField(Feed) - - self.assertTrue(ForcedCascade._meta['cascade']) - self.assertTrue(Subscription._meta['cascade']) - self.assertFalse(Feed._meta['cascade']) - def test_save_cascades(self): class Person(Document): @@ -681,7 +665,7 @@ class InstanceTest(unittest.TestCase): p = Person.objects(name="Wilson Jr").get() p.parent.name = "Daddy Wilson" - p.save() + p.save(cascade=True) p1.reload() self.assertEqual(p1.name, p.parent.name) @@ -700,14 +684,12 @@ class InstanceTest(unittest.TestCase): p2 = Person(name="Wilson Jr") p2.parent = p1 + p1.name = "Daddy Wilson" p2.save(force_insert=True, cascade_kwargs={"force_insert": False}) - p = Person.objects(name="Wilson Jr").get() - p.parent.name = "Daddy Wilson" - p.save() - p1.reload() - self.assertEqual(p1.name, p.parent.name) + p2.reload() + self.assertEqual(p1.name, p2.parent.name) def test_save_cascade_meta_false(self): @@ -782,6 +764,10 @@ class InstanceTest(unittest.TestCase): p.parent.name = "Daddy Wilson" p.save() + p1.reload() + self.assertNotEqual(p1.name, p.parent.name) + + p.save(cascade=True) p1.reload() self.assertEqual(p1.name, p.parent.name) @@ -1057,8 +1043,6 @@ class InstanceTest(unittest.TestCase): Feed.drop_collection() UserSubscription.drop_collection() - self.assertTrue(UserSubscription._meta['cascade']) - o1 = Organization(name="o1").save() o2 = Organization(name="o2").save() @@ -1090,7 +1074,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(q, 1) sub.user.name = "Test" self.assertEqual(q, 2) - sub.save() + sub.save(cascade=True) self.assertEqual(q, 3) # Changing a value and one that will cascade @@ -1101,7 +1085,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(q, 1) sub.user.name = "Test 2" self.assertEqual(q, 2) - sub.save() + sub.save(cascade=True) self.assertEqual(q, 4) # One for the UserSub and one for the User # Saving with just the refs From c2928d8a57460e60d4b26d5f25f965d40eb4e1a6 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 6 Jun 2013 17:16:03 -0700 Subject: [PATCH 368/464] list_indexes and compare_indexes class methods + unit tests --- mongoengine/document.py | 88 ++++++++++++++++++++++--- tests/document/class_methods.py | 111 ++++++++++++++++++++++++++++++++ 2 files changed, 191 insertions(+), 8 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 585fcf7..83f60ee 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -20,6 +20,19 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') +def includes_cls(fields): + """ Helper function used for ensuring and comparing indexes + """ + + first_field = None + if len(fields): + if isinstance(fields[0], basestring): + first_field = fields[0] + elif isinstance(fields[0], (list, tuple)) and len(fields[0]): + first_field = fields[0][0] + return first_field == '_cls' + + class InvalidCollectionError(Exception): pass @@ -529,14 +542,6 @@ class Document(BaseDocument): # an extra index on _cls, as mongodb will use the existing # index to service queries against _cls cls_indexed = False - def includes_cls(fields): - first_field = None - if len(fields): - if isinstance(fields[0], basestring): - first_field = fields[0] - elif isinstance(fields[0], (list, tuple)) and len(fields[0]): - first_field = fields[0][0] - return first_field == '_cls' # Ensure document-defined indexes are created if cls._meta['index_specs']: @@ -557,6 +562,73 @@ class Document(BaseDocument): collection.ensure_index('_cls', background=background, **index_opts) + @classmethod + def list_indexes(cls, go_up=True, go_down=True): + """ Lists all of the indexes that should be created for given + collection. It includes all the indexes from super- and sub-classes. + """ + + if cls._meta.get('abstract'): + return [] + + indexes = [] + index_cls = cls._meta.get('index_cls', True) + + # Ensure document-defined indexes are created + if cls._meta['index_specs']: + index_spec = cls._meta['index_specs'] + for spec in index_spec: + spec = spec.copy() + fields = spec.pop('fields') + indexes.append(fields) + + # add all of the indexes from the base classes + if go_up: + for base_cls in cls.__bases__: + for index in base_cls.list_indexes(go_up=True, go_down=False): + if index not in indexes: + indexes.append(index) + + # add all of the indexes from subclasses + if go_down: + for subclass in cls.__subclasses__(): + for index in subclass.list_indexes(go_up=False, go_down=True): + if index not in indexes: + indexes.append(index) + + # finish up by appending _id, if needed + if go_up and go_down: + if [(u'_id', 1)] not in indexes: + indexes.append([(u'_id', 1)]) + if (index_cls and + cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): + indexes.append([(u'_cls', 1)]) + + return indexes + + @classmethod + def compare_indexes(cls): + """ Compares the indexes defined in MongoEngine with the ones existing + in the database. Returns any missing/extra indexes. + """ + + required = cls.list_indexes() + existing = [info['key'] for info in cls._get_collection().index_information().values()] + missing = [index for index in required if index not in existing] + extra = [index for index in existing if index not in required] + + # if { _cls: 1 } is missing, make sure it's *really* necessary + if [(u'_cls', 1)] in missing: + cls_obsolete = False + for index in existing: + if includes_cls(index) and index not in extra: + cls_obsolete = True + break + if cls_obsolete: + missing.remove([(u'_cls', 1)]) + + return {'missing': missing, 'extra': extra} + class DynamicDocument(Document): """A Dynamic Document class allowing flexible, expandable and uncontrolled diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index b2c7283..6bd2e3c 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -85,6 +85,117 @@ class ClassMethodsTest(unittest.TestCase): self.assertEqual(self.Person._meta['delete_rules'], {(Job, 'employee'): NULLIFY}) + def test_compare_indexes(self): + """ Ensure that the indexes are properly created and that + compare_indexes identifies the missing/extra indexes + """ + + class BlogPost(Document): + author = StringField() + title = StringField() + description = StringField() + tags = StringField() + + meta = { + 'indexes': [('author', 'title')] + } + + BlogPost.drop_collection() + + BlogPost.ensure_indexes() + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) + + BlogPost.ensure_index(['author', 'description']) + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] }) + + BlogPost._get_collection().drop_index('author_1_description_1') + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) + + BlogPost._get_collection().drop_index('author_1_title_1') + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] }) + + def test_compare_indexes_inheritance(self): + """ Ensure that the indexes are properly created and that + compare_indexes identifies the missing/extra indexes for subclassed + documents (_cls included) + """ + + class BlogPost(Document): + author = StringField() + title = StringField() + description = StringField() + + meta = { + 'allow_inheritance': True + } + + class BlogPostWithTags(BlogPost): + tags = StringField() + tag_list = ListField(StringField()) + + meta = { + 'indexes': [('author', 'tags')] + } + + BlogPost.drop_collection() + + BlogPost.ensure_indexes() + BlogPostWithTags.ensure_indexes() + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) + + BlogPostWithTags.ensure_index(['author', 'tag_list']) + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] }) + + BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) + + BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) + + def test_list_indexes_inheritance(self): + """ ensure that all of the indexes are listed regardless of the super- + or sub-class that we call it from + """ + + class BlogPost(Document): + author = StringField() + title = StringField() + description = StringField() + + meta = { + 'allow_inheritance': True + } + + class BlogPostWithTags(BlogPost): + tags = StringField() + + meta = { + 'indexes': [('author', 'tags')] + } + + class BlogPostWithTagsAndExtraText(BlogPostWithTags): + extra_text = StringField() + + meta = { + 'indexes': [('author', 'tags', 'extra_text')] + } + + BlogPost.drop_collection() + + BlogPost.ensure_indexes() + BlogPostWithTags.ensure_indexes() + BlogPostWithTagsAndExtraText.ensure_indexes() + + self.assertEqual(BlogPost.list_indexes(), + BlogPostWithTags.list_indexes()) + self.assertEqual(BlogPost.list_indexes(), + BlogPostWithTagsAndExtraText.list_indexes()) + print BlogPost.list_indexes() + self.assertEqual(BlogPost.list_indexes(), + [[('_cls', 1), ('author', 1), ('tags', 1)], + [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], + [(u'_id', 1)], [('_cls', 1)]]) + def test_register_delete_rule_inherited(self): class Vaccine(Document): From 305540f0fd1731e9dd232cb995ab457f9a0fc6f4 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 6 Jun 2013 17:21:27 -0700 Subject: [PATCH 369/464] better comment --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 83f60ee..95ad0dc 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -596,7 +596,7 @@ class Document(BaseDocument): if index not in indexes: indexes.append(index) - # finish up by appending _id, if needed + # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed if go_up and go_down: if [(u'_id', 1)] not in indexes: indexes.append([(u'_id', 1)]) From a2457df45e0fcc0077dde4f7fe09891e44ad0635 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 6 Jun 2013 19:14:21 -0700 Subject: [PATCH 370/464] make sure to only search for indexes in base classes inheriting from TopLevelDocumentMetaclass --- mongoengine/document.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 95ad0dc..3b6df4f 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -585,9 +585,10 @@ class Document(BaseDocument): # add all of the indexes from the base classes if go_up: for base_cls in cls.__bases__: - for index in base_cls.list_indexes(go_up=True, go_down=False): - if index not in indexes: - indexes.append(index) + if isinstance(base_cls, TopLevelDocumentMetaclass): + for index in base_cls.list_indexes(go_up=True, go_down=False): + if index not in indexes: + indexes.append(index) # add all of the indexes from subclasses if go_down: From ba7101ff92588185c4fa3355351d28c57ede4f78 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 6 Jun 2013 22:22:43 -0700 Subject: [PATCH 371/464] list_indexes support for multiple inheritance --- mongoengine/document.py | 70 ++++++++++++++++++++------------- tests/document/class_methods.py | 38 +++++++++++++++++- tests/document/inheritance.py | 35 +++++++++++++++++ 3 files changed, 115 insertions(+), 28 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 3b6df4f..6345e6d 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -571,39 +571,55 @@ class Document(BaseDocument): if cls._meta.get('abstract'): return [] - indexes = [] - index_cls = cls._meta.get('index_cls', True) + # get all the base classes, subclasses and sieblings + classes = [] + def get_classes(cls): - # Ensure document-defined indexes are created - if cls._meta['index_specs']: - index_spec = cls._meta['index_specs'] - for spec in index_spec: - spec = spec.copy() - fields = spec.pop('fields') - indexes.append(fields) + if (cls not in classes and + isinstance(cls, TopLevelDocumentMetaclass)): + classes.append(cls) - # add all of the indexes from the base classes - if go_up: for base_cls in cls.__bases__: - if isinstance(base_cls, TopLevelDocumentMetaclass): - for index in base_cls.list_indexes(go_up=True, go_down=False): - if index not in indexes: - indexes.append(index) - - # add all of the indexes from subclasses - if go_down: + if (isinstance(base_cls, TopLevelDocumentMetaclass) and + base_cls != Document and + not base_cls._meta.get('abstract') and + base_cls._get_collection().full_name == cls._get_collection().full_name and + base_cls not in classes): + classes.append(base_cls) + get_classes(base_cls) for subclass in cls.__subclasses__(): - for index in subclass.list_indexes(go_up=False, go_down=True): - if index not in indexes: - indexes.append(index) + if (isinstance(base_cls, TopLevelDocumentMetaclass) and + subclass._get_collection().full_name == cls._get_collection().full_name and + subclass not in classes): + classes.append(subclass) + get_classes(subclass) + + get_classes(cls) + + # get the indexes spec for all of the gathered classes + def get_indexes_spec(cls): + indexes = [] + + if cls._meta['index_specs']: + index_spec = cls._meta['index_specs'] + for spec in index_spec: + spec = spec.copy() + fields = spec.pop('fields') + indexes.append(fields) + return indexes + + indexes = [] + for cls in classes: + for index in get_indexes_spec(cls): + if index not in indexes: + indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if go_up and go_down: - if [(u'_id', 1)] not in indexes: - indexes.append([(u'_id', 1)]) - if (index_cls and - cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): - indexes.append([(u'_cls', 1)]) + if [(u'_id', 1)] not in indexes: + indexes.append([(u'_id', 1)]) + if (cls._meta.get('index_cls', True) and + cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): + indexes.append([(u'_cls', 1)]) return indexes diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 6bd2e3c..52e3794 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -152,6 +152,43 @@ class ClassMethodsTest(unittest.TestCase): BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) + def test_compare_indexes_multiple_subclasses(self): + """ Ensure that compare_indexes behaves correctly if called from a + class, which base class has multiple subclasses + """ + + class BlogPost(Document): + author = StringField() + title = StringField() + description = StringField() + + meta = { + 'allow_inheritance': True + } + + class BlogPostWithTags(BlogPost): + tags = StringField() + tag_list = ListField(StringField()) + + meta = { + 'indexes': [('author', 'tags')] + } + + class BlogPostWithCustomField(BlogPost): + custom = DictField() + + meta = { + 'indexes': [('author', 'custom')] + } + + BlogPost.ensure_indexes() + BlogPostWithTags.ensure_indexes() + BlogPostWithCustomField.ensure_indexes() + + self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) + self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) + self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) + def test_list_indexes_inheritance(self): """ ensure that all of the indexes are listed regardless of the super- or sub-class that we call it from @@ -190,7 +227,6 @@ class ClassMethodsTest(unittest.TestCase): BlogPostWithTags.list_indexes()) self.assertEqual(BlogPost.list_indexes(), BlogPostWithTagsAndExtraText.list_indexes()) - print BlogPost.list_indexes() self.assertEqual(BlogPost.list_indexes(), [[('_cls', 1), ('author', 1), ('tags', 1)], [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index f011631..28490c9 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -189,6 +189,41 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) + def test_indexes_and_multiple_inheritance(self): + """ Ensure that all of the indexes are created for a document with + multiple inheritance. + """ + + class A(Document): + a = StringField() + + meta = { + 'allow_inheritance': True, + 'indexes': ['a'] + } + + class B(Document): + b = StringField() + + meta = { + 'allow_inheritance': True, + 'indexes': ['b'] + } + + class C(A, B): + pass + + A.drop_collection() + B.drop_collection() + C.drop_collection() + + C.ensure_indexes() + + self.assertEqual( + [idx['key'] for idx in C._get_collection().index_information().values()], + [[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]] + ) + def test_polymorphic_queries(self): """Ensure that the correct subclasses are returned from a query """ From f0d4e76418ff3f02b3401ee41467356a939c39a0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:21:15 +0000 Subject: [PATCH 372/464] Documentation updates --- docs/apireference.rst | 5 +++++ mongoengine/base/fields.py | 3 +-- mongoengine/common.py | 14 +++++++++++++- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index 0fa410e..d062727 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -88,3 +88,8 @@ Fields .. autoclass:: mongoengine.fields.GridFSProxy .. autoclass:: mongoengine.fields.ImageGridFsProxy .. autoclass:: mongoengine.fields.ImproperlyConfigured + +Misc +==== + +.. autofunction:: mongoengine.common._import_class diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index e4c88a7..eda9b3c 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -82,8 +82,7 @@ class BaseField(object): BaseField.creation_counter += 1 def __get__(self, instance, owner): - """Descriptor for retrieving a value from a field in a document. Do - any necessary conversion between Python and MongoDB types. + """Descriptor for retrieving a value from a field in a document. """ if instance is None: # Document class being used rather than a document object diff --git a/mongoengine/common.py b/mongoengine/common.py index bff55ac..20d5138 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -2,7 +2,19 @@ _class_registry_cache = {} def _import_class(cls_name): - """Cached mechanism for imports""" + """Cache mechanism for imports. + + Due to complications of circular imports mongoengine needs to do lots of + inline imports in functions. This is inefficient as classes are + imported repeated throughout the mongoengine code. This is + compounded by some recursive functions requiring inline imports. + + :mod:`mongoengine.common` provides a single point to import all these + classes. Circular imports aren't an issue as it dynamically imports the + class when first needed. Subsequent calls to the + :func:`~mongoengine.common._import_class` can then directly retrieve the + class from the :data:`mongoengine.common._class_registry_cache`. + """ if cls_name in _class_registry_cache: return _class_registry_cache.get(cls_name) From 000eff73ccdad3f2b5a8dbb283ccc035707c46b8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:33:34 +0000 Subject: [PATCH 373/464] Make test_indexes_and_multiple_inheritance place nice with py3.3 (#364) --- tests/document/inheritance.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index 28490c9..5a48f75 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -220,8 +220,8 @@ class InheritanceTest(unittest.TestCase): C.ensure_indexes() self.assertEqual( - [idx['key'] for idx in C._get_collection().index_information().values()], - [[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]] + sorted([idx['key'] for idx in C._get_collection().index_information().values()]), + sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) ) def test_polymorphic_queries(self): From 025c16c95d920865e1f60c3f38aa70f07fae0f3a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:34:57 +0000 Subject: [PATCH 374/464] Add BobDickinson to authors (#361) --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 72b1124..7788139 100644 --- a/AUTHORS +++ b/AUTHORS @@ -168,4 +168,5 @@ that much better: * istinspring (https://github.com/istinspring) * Massimo Santini (https://github.com/mapio) * Nigel McNie (https://github.com/nigelmcnie) - * ygbourhis (https://github.com/ygbourhis) \ No newline at end of file + * ygbourhis (https://github.com/ygbourhis) + * Bob Dickinson (https://github.com/BobDickinson) \ No newline at end of file From e2b32b4bb378bedf6fa962b02ebe893e7b8e1e4b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:43:05 +0000 Subject: [PATCH 375/464] Added more docs about compare_indexes (#364) --- docs/guide/defining-documents.rst | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index ed9c142..b3b1e59 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -497,7 +497,6 @@ in this case use 'dot' notation to identify the value to index eg: `rank.title` Geospatial indexes ------------------ - The best geo index for mongodb is the new "2dsphere", which has an improved spherical model and provides better performance and more options when querying. The following fields will explicitly add a "2dsphere" index: @@ -559,6 +558,14 @@ documentation for more information. A common usecase might be session data:: ] } +Comparing Indexes +----------------- + +Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in +the database to those that your document definitions define. This is useful +for maintenance purposes and ensuring you have the correct indexes for your +schema. + Ordering ======== A default ordering can be specified for your From a3d43b77ca59facb5a8cde618bfd33c9cb14eb07 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:44:33 +0000 Subject: [PATCH 376/464] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index a046847..0f86a62 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in 0.8.2 ================ +- Added compare_indexes helper (#361) - Fixed cascading saves which weren't turned off as planned (#291) - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) - Improved cascading saves write performance (#361) From ede9fcfb0021f8ff924bad894af77c6ee6a8ed35 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 7 Jun 2013 08:45:40 +0000 Subject: [PATCH 377/464] Version bump 0.8.2 --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 8c167f0..5bd1201 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 1) +VERSION = (0, 8, 2) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 7c87b1c..4eaba4d 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.1 +Version: 0.8.2 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From 44a2a164c0fa46bb3839a08d9b240243b9ab71fe Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 13 Jun 2013 10:54:39 +0000 Subject: [PATCH 378/464] Doc updates --- docs/changelog.rst | 2 +- docs/guide/defining-documents.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0f86a62..3a39752 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,7 @@ Changes in 0.8.2 - Fixed cascading saves which weren't turned off as planned (#291) - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) - Improved cascading saves write performance (#361) -- Fixed amibiguity and differing behaviour regarding field defaults (#349) +- Fixed ambiguity and differing behaviour regarding field defaults (#349) - ImageFields now include PIL error messages if invalid error (#353) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index b3b1e59..a61d8fe 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -450,8 +450,8 @@ by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may either be a single field name, a tuple containing multiple field names, or a dictionary containing a full index definition. A direction may be specified on -fields by prefixing the field name with a **+** or a **-** sign. Note that -direction only matters on multi-field indexes. :: +fields by prefixing the field name with a **+** (for ascending) or a **-** sign +(for descending). Note that direction only matters on multi-field indexes. :: class Page(Document): title = StringField() From c31d6a68985e9807795aaf60bc796ac86a227e84 Mon Sep 17 00:00:00 2001 From: kelvinhammond Date: Wed, 19 Jun 2013 10:34:33 -0400 Subject: [PATCH 379/464] Fixed sum and average mapreduce function for issue #375 --- AUTHORS | 3 ++- mongoengine/queryset/queryset.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/AUTHORS b/AUTHORS index 7788139..780c9e6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -16,6 +16,7 @@ Dervived from the git logs, inevitably incomplete but all of whom and others have submitted patches, reported bugs and generally helped make MongoEngine that much better: + * Kelvin Hammond (https://github.com/kelvinhammond) * Harry Marr * Ross Lawley * blackbrrr @@ -169,4 +170,4 @@ that much better: * Massimo Santini (https://github.com/mapio) * Nigel McNie (https://github.com/nigelmcnie) * ygbourhis (https://github.com/ygbourhis) - * Bob Dickinson (https://github.com/BobDickinson) \ No newline at end of file + * Bob Dickinson (https://github.com/BobDickinson) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index d58a13b..e2ff43f 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1062,7 +1062,7 @@ class QuerySet(object): """ map_func = Code(""" function() { - emit(1, this[field] || 0); + emit(1, eval("this." + field) || 0); } """, scope={'field': field}) @@ -1093,7 +1093,7 @@ class QuerySet(object): map_func = Code(""" function() { if (this.hasOwnProperty(field)) - emit(1, {t: this[field] || 0, c: 1}); + emit(1, {t: eval("this." + field) || 0, c: 1}); } """, scope={'field': field}) From 574f3c23d3ce633d830f55918a009c30305e3dbd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 09:35:22 +0000 Subject: [PATCH 380/464] get should clone before calling --- mongoengine/queryset/queryset.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index d58a13b..9b53df2 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -245,8 +245,10 @@ class QuerySet(object): .. versionadded:: 0.3 """ - queryset = self.__call__(*q_objs, **query) + queryset = self.clone() queryset = queryset.limit(2) + queryset = queryset.filter(*q_objs, **query) + try: result = queryset.next() except StopIteration: From f1a1aa54d8a31336d2a76a21f2e4b166d776f526 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 10:19:40 +0000 Subject: [PATCH 381/464] Added full_result kwarg to update (#380) --- docs/changelog.rst | 3 +++ mongoengine/document.py | 8 +++++++- mongoengine/queryset/queryset.py | 16 ++++++++++------ tests/queryset/queryset.py | 17 +++++++++++++++++ 4 files changed, 37 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a39752..8fa5af0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,9 @@ Changelog ========= +Changes in 0.8.3 +================ +- Added full_result kwarg to update (#380) Changes in 0.8.2 ================ diff --git a/mongoengine/document.py b/mongoengine/document.py index a61ed07..a1bac19 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -353,7 +353,13 @@ class Document(BaseDocument): been saved. """ if not self.pk: - raise OperationError('attempt to update a document not yet saved') + if kwargs.get('upsert', False): + query = self.to_mongo() + if "_cls" in query: + del(query["_cls"]) + return self._qs.filter(**query).update_one(**kwargs) + else: + raise OperationError('attempt to update a document not yet saved') # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 9b53df2..4b32ab1 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -474,7 +474,8 @@ class QuerySet(object): queryset._collection.remove(queryset._query, write_concern=write_concern) - def update(self, upsert=False, multi=True, write_concern=None, **update): + def update(self, upsert=False, multi=True, write_concern=None, + full_result=False, **update): """Perform an atomic update on the fields matched by the query. :param upsert: Any existing document with that "_id" is overwritten. @@ -485,6 +486,8 @@ class QuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param full_result: Return the full result rather than just the number + updated. :param update: Django-style update keyword arguments .. versionadded:: 0.2 @@ -506,12 +509,13 @@ class QuerySet(object): update["$set"]["_cls"] = queryset._document._class_name else: update["$set"] = {"_cls": queryset._document._class_name} - try: - ret = queryset._collection.update(query, update, multi=multi, - upsert=upsert, **write_concern) - if ret is not None and 'n' in ret: - return ret['n'] + result = queryset._collection.update(query, update, multi=multi, + upsert=upsert, **write_concern) + if full_result: + return result + elif result: + return result['n'] except pymongo.errors.OperationFailure, err: if unicode(err) == u'multi not coded yet': message = u'update() method requires MongoDB 1.1.3+' diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 21df22c..bd231e3 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -536,6 +536,23 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(club.members['John']['gender'], "F") self.assertEqual(club.members['John']['age'], 14) + def test_update_results(self): + self.Person.drop_collection() + + result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) + self.assertIsInstance(result, dict) + self.assertTrue("upserted" in result) + self.assertFalse(result["updatedExisting"]) + + bob = self.Person.objects.first() + result = bob.update(set__age=30, full_result=True) + self.assertIsInstance(result, dict) + self.assertTrue(result["updatedExisting"]) + + self.Person(name="Bob", age=20).save() + result = self.Person.objects(name="Bob").update(set__name="bobby", multi=True) + self.assertEqual(result, 2) + def test_upsert(self): self.Person.drop_collection() From e116bb92272246528df242c587f1b8e5e6fb6f48 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 10:39:10 +0000 Subject: [PATCH 382/464] Fixed queryset.get() respecting no_dereference (#373) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 4 ++-- tests/queryset/queryset.py | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8fa5af0..b04bba5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed queryset.get() respecting no_dereference (#373) - Added full_result kwarg to update (#380) Changes in 0.8.2 diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4b32ab1..ded8d5e 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1165,8 +1165,8 @@ class QuerySet(object): raw_doc = self._cursor.next() if self._as_pymongo: return self._get_as_pymongo(raw_doc) - - doc = self._document._from_son(raw_doc) + doc = self._document._from_son(raw_doc, + _auto_dereference=self._auto_dereference) if self._scalar: return self._get_scalar(doc) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index bd231e3..7c47360 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3258,6 +3258,8 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(isinstance(qs.first().organization, Organization)) self.assertFalse(isinstance(qs.no_dereference().first().organization, Organization)) + self.assertFalse(isinstance(qs.no_dereference().get().organization, + Organization)) self.assertTrue(isinstance(qs.first().organization, Organization)) def test_cached_queryset(self): From e6374ab425d45c3d4007a1773b364bd29a40bd24 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 10:40:15 +0000 Subject: [PATCH 383/464] Added Michael Bartnett to Authors (#373) --- AUTHORS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 7788139..a50eb57 100644 --- a/AUTHORS +++ b/AUTHORS @@ -169,4 +169,5 @@ that much better: * Massimo Santini (https://github.com/mapio) * Nigel McNie (https://github.com/nigelmcnie) * ygbourhis (https://github.com/ygbourhis) - * Bob Dickinson (https://github.com/BobDickinson) \ No newline at end of file + * Bob Dickinson (https://github.com/BobDickinson) + * Michael Bartnett (https://github.com/michaelbartnett) \ No newline at end of file From 9867e918fa58f99cceca8a8917c9673251cc309d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 11:04:29 +0000 Subject: [PATCH 384/464] Fixed weakref being valid after reload (#374) --- docs/changelog.rst | 1 + mongoengine/document.py | 1 + tests/queryset/queryset.py | 26 ++++++++++++++++++++++++++ 3 files changed, 28 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index b04bba5..265ad13 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed weakref being valid after reload (#374) - Fixed queryset.get() respecting no_dereference (#373) - Added full_result kwarg to update (#380) diff --git a/mongoengine/document.py b/mongoengine/document.py index a1bac19..ab8fa2a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -480,6 +480,7 @@ class Document(BaseDocument): value = [self._reload(key, v) for v in value] value = BaseList(value, self, key) elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): + value._instance = None value._changed_fields = [] return value diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 7c47360..6dcbd9f 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1613,6 +1613,32 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(message.authors[1].name, "Ross") self.assertEqual(message.authors[2].name, "Adam") + def test_reload_embedded_docs_instance(self): + + class SubDoc(EmbeddedDocument): + val = IntField() + + class Doc(Document): + embedded = EmbeddedDocumentField(SubDoc) + + doc = Doc(embedded=SubDoc(val=0)).save() + doc.reload() + + self.assertEqual(doc.pk, doc.embedded._instance.pk) + + def test_reload_list_embedded_docs_instance(self): + + class SubDoc(EmbeddedDocument): + val = IntField() + + class Doc(Document): + embedded = ListField(EmbeddedDocumentField(SubDoc)) + + doc = Doc(embedded=[SubDoc(val=0)]).save() + doc.reload() + + self.assertEqual(doc.pk, doc.embedded[0]._instance.pk) + def test_order_by(self): """Ensure that QuerySets may be ordered. """ From d6edef98c6b08383cff7384f0548cc8991adc7bb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 21 Jun 2013 11:29:23 +0000 Subject: [PATCH 385/464] Added match ($elemMatch) support for EmbeddedDocuments (#379) --- docs/changelog.rst | 1 + mongoengine/queryset/transform.py | 1 + tests/queryset/queryset.py | 13 ++++++++----- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 265ad13..1927bee 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Added match ($elemMatch) support for EmbeddedDocuments (#379) - Fixed weakref being valid after reload (#374) - Fixed queryset.get() respecting no_dereference (#373) - Added full_result kwarg to update (#380) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 4062fc1..352774f 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -95,6 +95,7 @@ def query(_doc_cls=None, _field_operation=False, **query): value = _geo_operator(field, op, value) elif op in CUSTOM_OPERATORS: if op == 'match': + value = field.prepare_query_value(op, value) value = {"$elemMatch": value} else: NotImplementedError("Custom method '%s' has not " diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 6dcbd9f..eabb3c5 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3091,7 +3091,7 @@ class QuerySetTest(unittest.TestCase): class Foo(EmbeddedDocument): shape = StringField() color = StringField() - trick = BooleanField() + thick = BooleanField() meta = {'allow_inheritance': False} class Bar(Document): @@ -3100,17 +3100,20 @@ class QuerySetTest(unittest.TestCase): Bar.drop_collection() - b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False), - Foo(shape= "circle", color ="red", thick = True)]) + b1 = Bar(foo=[Foo(shape="square", color="purple", thick=False), + Foo(shape="circle", color="red", thick=True)]) b1.save() - b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True), - Foo(shape= "circle", color ="purple", thick = False)]) + b2 = Bar(foo=[Foo(shape="square", color="red", thick=True), + Foo(shape="circle", color="purple", thick=False)]) b2.save() ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) + ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) + self.assertEqual([b1], ak) + def test_upsert_includes_cls(self): """Upserts should include _cls information for inheritable classes """ From caff44c663295322ba62c1dc26ff488b1d13b651 Mon Sep 17 00:00:00 2001 From: kelvinhammond Date: Fri, 21 Jun 2013 09:39:11 -0400 Subject: [PATCH 386/464] Fixed sum and average queryset function * Fixed sum and average map reduce functions for sum and average so that it works with mongo dot notation. * Added unittest cases / updated them for the new changes --- mongoengine/queryset/queryset.py | 37 +++++++++++++++++++++++++++++--- tests/queryset/queryset.py | 26 ++++++++++++++++++++++ 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index bf80c69..e5026fd 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1068,7 +1068,22 @@ class QuerySet(object): """ map_func = Code(""" function() { - emit(1, eval("this." + field) || 0); + function deepFind(obj, path) { + var paths = path.split('.') + , current = obj + , i; + + for (i = 0; i < paths.length; ++i) { + if (current[paths[i]] == undefined) { + return undefined; + } else { + current = current[paths[i]]; + } + } + return current; + } + + emit(1, deepFind(this, field) || 0); } """, scope={'field': field}) @@ -1098,8 +1113,24 @@ class QuerySet(object): """ map_func = Code(""" function() { - if (this.hasOwnProperty(field)) - emit(1, {t: eval("this." + field) || 0, c: 1}); + function deepFind(obj, path) { + var paths = path.split('.') + , current = obj + , i; + + for (i = 0; i < paths.length; ++i) { + if (current[paths[i]] == undefined) { + return undefined; + } else { + current = current[paths[i]]; + } + } + return current; + } + + val = deepFind(this, field) + if (val !== undefined) + emit(1, {t: val || 0, c: 1}); } """, scope={'field': field}) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 6dcbd9f..de66ddc 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -30,12 +30,17 @@ class QuerySetTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') + class PersonMeta(EmbeddedDocument): + weight = IntField() + class Person(Document): name = StringField() age = IntField() + person_meta = EmbeddedDocumentField(PersonMeta) meta = {'allow_inheritance': True} Person.drop_collection() + self.PersonMeta = PersonMeta self.Person = Person def test_initialisation(self): @@ -2208,6 +2213,19 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) + # dot notation + self.Person(name='person meta', person_meta=self.PersonMeta(weight=0)).save() + self.assertAlmostEqual(int(self.Person.objects.average('person_meta.weight')), 0) + + for i, weight in enumerate(ages): + self.Person(name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save() + + self.assertAlmostEqual(int(self.Person.objects.average('person_meta.weight')), avg) + + self.Person(name='test meta none').save() + self.assertEqual(int(self.Person.objects.average('person_meta.weight')), avg) + + def test_sum(self): """Ensure that field can be summed over correctly. """ @@ -2220,6 +2238,14 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + for i, age in enumerate(ages): + self.Person(name='test meta%s' % i, person_meta=self.PersonMeta(weight=age)).save() + + self.assertEqual(int(self.Person.objects.sum('person_meta.weight')), sum(ages)) + + self.Person(name='weightless person').save() + self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ From fbe5df84c0f5d49396093f0549b9b3fab28c454d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 25 Jun 2013 09:30:28 +0000 Subject: [PATCH 387/464] Remove users post uri test --- tests/test_connection.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_connection.py b/tests/test_connection.py index d792648..d27a66d 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -56,6 +56,9 @@ class ConnectionTest(unittest.TestCase): self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ From 8d21e5f3c10a0341db2687aef56385ec245f3bae Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 2 Jul 2013 09:47:54 +0000 Subject: [PATCH 388/464] Fix tests for py2.6 --- tests/queryset/queryset.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index eabb3c5..4d91b55 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -540,13 +540,13 @@ class QuerySetTest(unittest.TestCase): self.Person.drop_collection() result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) - self.assertIsInstance(result, dict) + self.assertTrue(isinstance(result, dict)) self.assertTrue("upserted" in result) self.assertFalse(result["updatedExisting"]) bob = self.Person.objects.first() result = bob.update(set__age=30, full_result=True) - self.assertIsInstance(result, dict) + self.assertTrue(isinstance(result, dict)) self.assertTrue(result["updatedExisting"]) self.Person(name="Bob", age=20).save() From 43d6e64cfa959e31ff6e983c74534c06ac5b3108 Mon Sep 17 00:00:00 2001 From: Jan Schrewe Date: Tue, 2 Jul 2013 17:04:15 +0200 Subject: [PATCH 389/464] Added a get_proxy_obj method to FileField and handle FileFields in container fields properly in ImageGridFsProxy. --- mongoengine/fields.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 451f7ac..727803f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1190,9 +1190,7 @@ class FileField(BaseField): # Check if a file already exists for this model grid_file = instance._data.get(self.name) if not isinstance(grid_file, self.proxy_class): - grid_file = self.proxy_class(key=self.name, instance=instance, - db_alias=self.db_alias, - collection_name=self.collection_name) + grid_file = self.get_proxy_obj(key=key, instance=instance) instance._data[self.name] = grid_file if not grid_file.key: @@ -1214,14 +1212,22 @@ class FileField(BaseField): pass # Create a new proxy object as we don't already have one - instance._data[key] = self.proxy_class(key=key, instance=instance, - db_alias=self.db_alias, - collection_name=self.collection_name) + instance._data[key] = self.get_proxy_obj(key=key, instance=instance) instance._data[key].put(value) else: instance._data[key] = value instance._mark_as_changed(key) + + def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None): + if db_alias is None: + db_alias = self.db_alias + if collection_name is None: + collection_name = self.collection_name + + return self.proxy_class(key=key, instance=instance, + db_alias=db_alias, + collection_name=collection_name) def to_mongo(self, value): # Store the GridFS file id in MongoDB @@ -1255,6 +1261,11 @@ class ImageGridFsProxy(GridFSProxy): applying field properties (size, thumbnail_size) """ field = self.instance._fields[self.key] + # if the field from the instance has an attribute field + # we use that one and hope for the best. Usually only container + # fields have a field attribute. + if hasattr(field, 'field'): + field = field.field try: img = Image.open(file_obj) From 5021b1053599a53a9b50d02236fa701230065a71 Mon Sep 17 00:00:00 2001 From: Serge Matveenko Date: Wed, 3 Jul 2013 01:17:40 +0400 Subject: [PATCH 390/464] Fix crash on Python 3.x and Django >= 1.5 --- mongoengine/django/sessions.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index c90807e..7e4e182 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -1,7 +1,10 @@ from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError from django.core.exceptions import SuspiciousOperation -from django.utils.encoding import force_unicode +try: + from django.utils.encoding import force_unicode +except ImportError: + from django.utils.encoding import force_text as force_unicode from mongoengine.document import Document from mongoengine import fields From 592c654916c38eb3b01bf98db7160f4c871ab936 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Fri, 5 Jul 2013 10:36:11 -0300 Subject: [PATCH 391/464] extending support for queryset.sum and queryset.average methods --- mongoengine/queryset/queryset.py | 51 ++++++++--- tests/queryset/queryset.py | 140 +++++++++++++++++++++++++++++++ 2 files changed, 181 insertions(+), 10 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index ded8d5e..86a14b5 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1066,11 +1066,27 @@ class QuerySet(object): .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work with sharding. """ - map_func = Code(""" + map_func = """ function() { - emit(1, this[field] || 0); + var path = '{{~%(field)s}}'.split('.'), + field = this; + + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(1, item||0); + }); + } else if (typeof field != 'undefined') { + emit(1, field||0); + } } - """, scope={'field': field}) + """ % dict(field=field) reduce_func = Code(""" function(key, values) { @@ -1096,13 +1112,28 @@ class QuerySet(object): .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work with sharding. """ - map_func = Code(""" + map_func = """ function() { - if (this.hasOwnProperty(field)) - emit(1, {t: this[field] || 0, c: 1}); - } - """, scope={'field': field}) + var path = '{{~%(field)s}}'.split('.'), + field = this; + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(1, {t: item||0, c: 1}); + }); + } else if (typeof field != 'undefined') { + emit(1, {t: field||0, c: 1}); + } + } + """ % dict(field=field) + reduce_func = Code(""" function(key, values) { var out = {t: 0, c: 0}; @@ -1263,8 +1294,8 @@ class QuerySet(object): def _item_frequencies_map_reduce(self, field, normalize=False): map_func = """ function() { - var path = '{{~%(field)s}}'.split('.'); - var field = this; + var path = '{{~%(field)s}}'.split('.'), + field = this; for (p in path) { if (typeof field != 'undefined') diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 4d91b55..3f9bd23 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2208,6 +2208,75 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) + def test_embedded_average(self): + class Pay(EmbeddedDocument): + value = DecimalField() + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(value=150)).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(value=530)).save() + + Doc(name=u"Tayza mariana", + pay=Pay(value=165)).save() + + Doc(name=u"Eliana Costa", + pay=Pay(value=115)).save() + + self.assertEqual( + Doc.objects.average('pay.value'), + 240) + + def test_embedded_array_average(self): + class Pay(EmbeddedDocument): + values = ListField(DecimalField()) + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(values=[150, 100])).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(values=[530, 100])).save() + + Doc(name=u"Tayza mariana", + pay=Pay(values=[165, 100])).save() + + Doc(name=u"Eliana Costa", + pay=Pay(values=[115, 100])).save() + + self.assertEqual( + Doc.objects.average('pay.values'), + 170) + + def test_array_average(self): + class Doc(Document): + values = ListField(DecimalField()) + + Doc.drop_collection() + + Doc(values=[150, 100]).save() + Doc(values=[530, 100]).save() + Doc(values=[165, 100]).save() + Doc(values=[115, 100]).save() + + self.assertEqual( + Doc.objects.average('values'), + 170) + def test_sum(self): """Ensure that field can be summed over correctly. """ @@ -2220,6 +2289,77 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + def test_embedded_sum(self): + class Pay(EmbeddedDocument): + value = DecimalField() + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(value=150)).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(value=530)).save() + + Doc(name=u"Tayza mariana", + pay=Pay(value=165)).save() + + Doc(name=u"Eliana Costa", + pay=Pay(value=115)).save() + + self.assertEqual( + Doc.objects.sum('pay.value'), + 960) + + + def test_embedded_array_sum(self): + class Pay(EmbeddedDocument): + values = ListField(DecimalField()) + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(values=[150, 100])).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(values=[530, 100])).save() + + Doc(name=u"Tayza mariana", + pay=Pay(values=[165, 100])).save() + + Doc(name=u"Eliana Costa", + pay=Pay(values=[115, 100])).save() + + self.assertEqual( + Doc.objects.sum('pay.values'), + 1360) + + def test_array_sum(self): + class Doc(Document): + values = ListField(DecimalField()) + + Doc.drop_collection() + + Doc(values=[150, 100]).save() + Doc(values=[530, 100]).save() + Doc(values=[165, 100]).save() + Doc(values=[115, 100]).save() + + self.assertEqual( + Doc.objects.sum('values'), + 1360) + + def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ From a1d142d3a4bcfb6dd2d9df5d3adf5eec2c51edb5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 08:38:13 +0000 Subject: [PATCH 392/464] Prep for django and py3 support --- .travis.yml | 2 ++ setup.py | 2 +- tests/test_django.py | 60 ++++++++++++++++++-------------------------- 3 files changed, 27 insertions(+), 37 deletions(-) diff --git a/.travis.yml b/.travis.yml index b7c56a0..2bb5863 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,8 @@ env: - PYMONGO=dev DJANGO=1.4.2 - PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.4.2 + - PYMONGO=3.2 DJANGO=1.5.1 + - PYMONGO=3.3 DJANGO=1.5.1 install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi diff --git a/setup.py b/setup.py index effb6f1..f6b3c1b 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ CLASSIFIERS = [ extra_opts = {} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") diff --git a/tests/test_django.py b/tests/test_django.py index 63e3245..d67b126 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -2,48 +2,42 @@ import sys sys.path[0:0] = [""] import unittest from nose.plugins.skip import SkipTest -from mongoengine.python_support import PY3 from mongoengine import * + +from mongoengine.django.shortcuts import get_document_or_404 + +from django.http import Http404 +from django.template import Context, Template +from django.conf import settings +from django.core.paginator import Paginator + +settings.configure( + USE_TZ=True, + INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), + AUTH_USER_MODEL=('mongo_auth.MongoUser'), +) + try: - from mongoengine.django.shortcuts import get_document_or_404 - - from django.http import Http404 - from django.template import Context, Template - from django.conf import settings - from django.core.paginator import Paginator - - settings.configure( - USE_TZ=True, - INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), - AUTH_USER_MODEL=('mongo_auth.MongoUser'), - ) - - try: - from django.contrib.auth import authenticate, get_user_model - from mongoengine.django.auth import User - from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager - DJ15 = True - except Exception: - DJ15 = False - from django.contrib.sessions.tests import SessionTestsMixin - from mongoengine.django.sessions import SessionStore, MongoSession -except Exception, err: - if PY3: - SessionTestsMixin = type # dummy value so no error - SessionStore = None # dummy value so no error - else: - raise err + from django.contrib.auth import authenticate, get_user_model + from mongoengine.django.auth import User + from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager + DJ15 = True +except Exception: + DJ15 = False +from django.contrib.sessions.tests import SessionTestsMixin +from mongoengine.django.sessions import SessionStore, MongoSession from datetime import tzinfo, timedelta ZERO = timedelta(0) + class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): - self.__offset = timedelta(minutes = offset) + self.__offset = timedelta(minutes=offset) self.__name = name def utcoffset(self, dt): @@ -70,8 +64,6 @@ def activate_timezone(tz): class QuerySetTest(unittest.TestCase): def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') connect(db='mongoenginetest') class Person(Document): @@ -223,8 +215,6 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') connect(db='mongoenginetest') MongoSession.drop_collection() super(MongoDBSessionTest, self).setUp() @@ -262,8 +252,6 @@ class MongoAuthTest(unittest.TestCase): } def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') if not DJ15: raise SkipTest('mongo_auth requires Django 1.5') connect(db='mongoenginetest') From 0cb40703641d94f6334b92ae23148a9b7331bfa4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 08:53:56 +0000 Subject: [PATCH 393/464] Added Django 1.5 PY3 support (#392) --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1927bee..f433f21 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Added Django 1.5 PY3 support (#392) - Added match ($elemMatch) support for EmbeddedDocuments (#379) - Fixed weakref being valid after reload (#374) - Fixed queryset.get() respecting no_dereference (#373) From 7cb46d0761e6b058d55f8ad93f584fa6bb6ade15 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 09:11:50 +0000 Subject: [PATCH 394/464] Fixed ListField setslice and delslice dirty tracking (#390) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + mongoengine/base/datastructures.py | 8 ++++++++ tests/fields/fields.py | 26 ++++++++++++++++++++++++++ 4 files changed, 37 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index a50eb57..e88de8c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -170,4 +170,5 @@ that much better: * Nigel McNie (https://github.com/nigelmcnie) * ygbourhis (https://github.com/ygbourhis) * Bob Dickinson (https://github.com/BobDickinson) - * Michael Bartnett (https://github.com/michaelbartnett) \ No newline at end of file + * Michael Bartnett (https://github.com/michaelbartnett) + * Alon Horev (https://github.com/alonho) \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index f433f21..27d51a4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) - Added match ($elemMatch) support for EmbeddedDocuments (#379) - Fixed weakref being valid after reload (#374) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index adcd8d0..4652fb5 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -108,6 +108,14 @@ class BaseList(list): self._mark_as_changed() return super(BaseList, self).__delitem__(*args, **kwargs) + def __setslice__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__setslice__(*args, **kwargs) + + def __delslice__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__delslice__(*args, **kwargs) + def __getstate__(self): self.instance = None self._dereferenced = False diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 3e48a21..8f02499 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1018,6 +1018,32 @@ class FieldTest(unittest.TestCase): e.mapping = {} self.assertEqual([], e._changed_fields) + def test_slice_marks_field_as_changed(self): + + class Simple(Document): + widgets = ListField() + + simple = Simple(widgets=[1, 2, 3, 4]).save() + simple.widgets[:3] = [] + self.assertEqual(['widgets'], simple._changed_fields) + simple.save() + + simple = simple.reload() + self.assertEqual(simple.widgets, [4]) + + def test_del_slice_marks_field_as_changed(self): + + class Simple(Document): + widgets = ListField() + + simple = Simple(widgets=[1, 2, 3, 4]).save() + del simple.widgets[:3] + self.assertEqual(['widgets'], simple._changed_fields) + simple.save() + + simple = simple.reload() + self.assertEqual(simple.widgets, [4]) + def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" From af86aee9700504458ae945914a2b9412c5da8ea6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 10:57:24 +0000 Subject: [PATCH 395/464] _dynamic field updates - fixed pickling and creation order Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) Fixed pickling dynamic documents `_dynamic_fields` (#387) --- docs/changelog.rst | 2 ++ docs/guide/defining-documents.rst | 2 +- docs/upgrade.rst | 10 +++++++ mongoengine/base/document.py | 44 ++++++++++++------------------- mongoengine/base/metaclasses.py | 11 ++++++-- mongoengine/document.py | 5 +--- tests/document/delta.py | 13 ++++----- tests/document/instance.py | 36 ++++++++++++++++++++++++- tests/fixtures.py | 8 ++++++ 9 files changed, 90 insertions(+), 41 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 27d51a4..78deafb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,8 @@ Changelog Changes in 0.8.3 ================ +- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) +- Fixed pickling dynamic documents `_dynamic_fields` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) - Added match ($elemMatch) support for EmbeddedDocuments (#379) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index a61d8fe..a50450e 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -54,7 +54,7 @@ be saved :: There is one caveat on Dynamic Documents: fields cannot start with `_` -Dynamic fields are stored in alphabetical order *after* any declared fields. +Dynamic fields are stored in creation order *after* any declared fields. Fields ====== diff --git a/docs/upgrade.rst b/docs/upgrade.rst index c3d3182..b8864b0 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -2,6 +2,16 @@ Upgrading ######### + +0.8.2 to 0.8.2 +************** + +Minor change that may impact users: + +DynamicDocument fields are now stored in creation order after any declared +fields. Previously they were stored alphabetically. + + 0.7 to 0.8 ********** diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ca154a2..04b0c05 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -42,6 +42,9 @@ class BaseDocument(object): # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) + # If its an automatic id field then skip to the first defined field + if self._auto_id_field: + next(field) for value in args: name = next(field) if name in values: @@ -51,6 +54,7 @@ class BaseDocument(object): signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} + self._dynamic_fields = SON() # Assign default values to instance for key, field in self._fields.iteritems(): @@ -61,7 +65,6 @@ class BaseDocument(object): # Set passed values after initialisation if self._dynamic: - self._dynamic_fields = {} dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': @@ -116,6 +119,7 @@ class BaseDocument(object): field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field + self._fields_ordered += (name,) if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) @@ -142,7 +146,8 @@ class BaseDocument(object): def __getstate__(self): data = {} - for k in ('_changed_fields', '_initialised', '_created'): + for k in ('_changed_fields', '_initialised', '_created', + '_dynamic_fields', '_fields_ordered'): if hasattr(self, k): data[k] = getattr(self, k) data['_data'] = self.to_mongo() @@ -151,21 +156,21 @@ class BaseDocument(object): def __setstate__(self, data): if isinstance(data["_data"], SON): data["_data"] = self.__class__._from_son(data["_data"])._data - for k in ('_changed_fields', '_initialised', '_created', '_data'): + for k in ('_changed_fields', '_initialised', '_created', '_data', + '_fields_ordered', '_dynamic_fields'): if k in data: setattr(self, k, data[k]) + for k in data.get('_dynamic_fields').keys(): + setattr(self, k, data["_data"].get(k)) def __iter__(self): - if 'id' in self._fields and 'id' not in self._fields_ordered: - return iter(('id', ) + self._fields_ordered) - return iter(self._fields_ordered) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. """ try: - if name in self._fields: + if name in self._fields_ordered: return getattr(self, name) except AttributeError: pass @@ -241,6 +246,8 @@ class BaseDocument(object): for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) + if field is None and self._dynamic: + field = self._dynamic_fields.get(field_name) if value is not None: value = field.to_mongo(value) @@ -265,15 +272,6 @@ class BaseDocument(object): not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') - if not self._dynamic: - return data - - # Sort dynamic fields by key - dynamic_fields = sorted(self._dynamic_fields.iteritems(), - key=operator.itemgetter(0)) - for name, field in dynamic_fields: - data[name] = field.to_mongo(self._data.get(name, None)) - return data def validate(self, clean=True): @@ -289,11 +287,8 @@ class BaseDocument(object): errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values - fields = [(field, self._data.get(name)) - for name, field in self._fields.items()] - if self._dynamic: - fields += [(field, self._data.get(name)) - for name, field in self._dynamic_fields.items()] + fields = [(self._fields.get(name, self._dynamic_fields.get(name)), + self._data.get(name)) for name in self._fields_ordered] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") @@ -406,11 +401,7 @@ class BaseDocument(object): return _changed_fields inspected.add(self.id) - field_list = self._fields.copy() - if self._dynamic: - field_list.update(self._dynamic_fields) - - for field_name in field_list: + for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name @@ -450,7 +441,6 @@ class BaseDocument(object): doc = self.to_mongo() set_fields = self._get_changed_fields() - set_data = {} unset_data = {} parts = [] if hasattr(self, '_changed_fields'): diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 444d9a2..ff5afdd 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -91,11 +91,12 @@ class DocumentMetaclass(type): attrs['_fields'] = doc_fields attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) for k, v in doc_fields.iteritems()]) + attrs['_reverse_db_field_map'] = dict( + (v, k) for k, v in attrs['_db_field_map'].iteritems()) + attrs['_fields_ordered'] = tuple(i[1] for i in sorted( (v.creation_counter, v.name) for v in doc_fields.itervalues())) - attrs['_reverse_db_field_map'] = dict( - (v, k) for k, v in attrs['_db_field_map'].iteritems()) # # Set document hierarchy @@ -358,12 +359,18 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class.id = field # Set primary key if not defined by the document + new_class._auto_id_field = False if not new_class._meta.get('id_field'): + new_class._auto_id_field = True new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class._fields['id'].name = 'id' new_class.id = new_class._fields['id'] + # Prepend id field to _fields_ordered + if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: + new_class._fields_ordered = ('id', ) + new_class._fields_ordered + # Merge in exceptions with parent hierarchy exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) module = attrs.get('__module__') diff --git a/mongoengine/document.py b/mongoengine/document.py index ab8fa2a..d0c9e61 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -460,11 +460,8 @@ class Document(BaseDocument): else: msg = "Reloaded document has been deleted" raise OperationError(msg) - for field in self._fields: + for field in self._fields_ordered: setattr(self, field, self._reload(field, obj[field])) - if self._dynamic: - for name in self._dynamic_fields.keys(): - setattr(self, name, self._reload(name, obj._data[name])) self._changed_fields = obj._changed_fields self._created = False return obj diff --git a/tests/document/delta.py b/tests/document/delta.py index 16ab609..3656d9e 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -3,6 +3,7 @@ import sys sys.path[0:0] = [""] import unittest +from bson import SON from mongoengine import * from mongoengine.connection import get_db @@ -613,13 +614,13 @@ class DeltaTest(unittest.TestCase): Person.drop_collection() p = Person(name="James", age=34) - self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', - '_cls': 'Person'}, {})) + self.assertEqual(p._delta(), ( + SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) p.doc = 123 del(p.doc) - self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', - '_cls': 'Person'}, {'doc': 1})) + self.assertEqual(p._delta(), ( + SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) p = Person() p.name = "Dean" @@ -631,14 +632,14 @@ class DeltaTest(unittest.TestCase): self.assertEqual(p._get_changed_fields(), ['age']) self.assertEqual(p._delta(), ({'age': 24}, {})) - p = self.Person.objects(age=22).get() + p = Person.objects(age=22).get() p.age = 24 self.assertEqual(p.age, 24) self.assertEqual(p._get_changed_fields(), ['age']) self.assertEqual(p._delta(), ({'age': 24}, {})) p.save() - self.assertEqual(1, self.Person.objects(age=24).count()) + self.assertEqual(1, Person.objects(age=24).count()) def test_dynamic_delta(self): diff --git a/tests/document/instance.py b/tests/document/instance.py index 81734aa..e85c9d8 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -10,7 +10,8 @@ import uuid from datetime import datetime from bson import DBRef -from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest +from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, + PickleDyanmicEmbedded, PickleDynamicTest) from mongoengine import * from mongoengine.errors import (NotRegistered, InvalidDocumentError, @@ -1827,6 +1828,29 @@ class InstanceTest(unittest.TestCase): self.assertEqual(pickle_doc.string, "Two") self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) + def test_dynamic_document_pickle(self): + + pickle_doc = PickleDynamicTest(name="test", number=1, string="One", lists=['1', '2']) + pickle_doc.embedded = PickleDyanmicEmbedded(foo="Bar") + pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + + pickle_doc.save() + + pickled_doc = pickle.dumps(pickle_doc) + resurrected = pickle.loads(pickled_doc) + + self.assertEqual(resurrected, pickle_doc) + self.assertEqual(resurrected._fields_ordered, + pickle_doc._fields_ordered) + self.assertEqual(resurrected._dynamic_fields.keys(), + pickle_doc._dynamic_fields.keys()) + + self.assertEqual(resurrected.embedded, pickle_doc.embedded) + self.assertEqual(resurrected.embedded._fields_ordered, + pickle_doc.embedded._fields_ordered) + self.assertEqual(resurrected.embedded._dynamic_fields.keys(), + pickle_doc.embedded._dynamic_fields.keys()) + def test_picklable_on_signals(self): pickle_doc = PickleSignalsTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() @@ -2289,6 +2313,16 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) + def test_mixed_creation_dynamic(self): + """Ensure that document may be created using mixed arguments. + """ + class Person(DynamicDocument): + name = StringField() + + person = Person("Test User", age=42) + self.assertEqual(person.name, "Test User") + self.assertEqual(person.age, 42) + def test_bad_mixed_creation(self): """Ensure that document gives correct error when duplicating arguments """ diff --git a/tests/fixtures.py b/tests/fixtures.py index e207044..f1344d7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -17,6 +17,14 @@ class PickleTest(Document): photo = FileField() +class PickleDyanmicEmbedded(DynamicEmbeddedDocument): + date = DateTimeField(default=datetime.now) + + +class PickleDynamicTest(DynamicDocument): + number = IntField() + + class PickleSignalsTest(Document): number = IntField() string = StringField(choices=(('One', '1'), ('Two', '2'))) From fa83fba6374d0da5774157012b20b74310c37984 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 11:18:49 +0000 Subject: [PATCH 396/464] Reload uses shard_key if applicable (#384) --- docs/changelog.rst | 1 + mongoengine/document.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 78deafb..42fd9bb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) - Fixed pickling dynamic documents `_dynamic_fields` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) diff --git a/mongoengine/document.py b/mongoengine/document.py index d0c9e61..c9901a2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -266,7 +266,6 @@ class Document(BaseDocument): upsert=True, **write_concern) created = is_new_object(last_error) - if cascade is None: cascade = self._meta.get('cascade', False) or cascade_kwargs is not None @@ -451,9 +450,8 @@ class Document(BaseDocument): .. versionadded:: 0.1.2 .. versionchanged:: 0.6 Now chainable """ - id_field = self._meta['id_field'] obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( - **{id_field: self[id_field]}).limit(1).select_related(max_depth=max_depth) + **self._object_key).limit(1).select_related(max_depth=max_depth) if obj: obj = obj[0] From 4209d61b1368717047927cee40a9d64768def93a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 12:49:19 +0000 Subject: [PATCH 397/464] Document.select_related() now respects `db_alias` (#377) --- docs/changelog.rst | 1 + mongoengine/document.py | 4 ++-- tests/fields/fields.py | 31 +++++++++++++++++++++++++++ tests/test_dereference.py | 45 +++++++++++++++++---------------------- 4 files changed, 54 insertions(+), 27 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 42fd9bb..926c6cb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Document.select_related() now respects `db_alias` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) - Fixed pickling dynamic documents `_dynamic_fields` (#387) diff --git a/mongoengine/document.py b/mongoengine/document.py index c9901a2..e331aa1 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -440,8 +440,8 @@ class Document(BaseDocument): .. versionadded:: 0.5 """ - import dereference - self._data = dereference.DeReference()(self._data, max_depth) + DeReference = _import_class('DeReference') + DeReference()([self], max_depth + 1) return self def reload(self, max_depth=1): diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 8f02499..b3d8d52 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -2474,6 +2474,37 @@ class FieldTest(unittest.TestCase): user = User(email='me@example.com') self.assertTrue(user.validate() is None) + def test_tuples_as_tuples(self): + """ + Ensure that tuples remain tuples when they are + inside a ComplexBaseField + """ + from mongoengine.base import BaseField + + class EnumField(BaseField): + + def __init__(self, **kwargs): + super(EnumField, self).__init__(**kwargs) + + def to_mongo(self, value): + return value + + def to_python(self, value): + return tuple(value) + + class TestDoc(Document): + items = ListField(EnumField()) + + TestDoc.drop_collection() + tuples = [(100, 'Testing')] + doc = TestDoc() + doc.items = tuples + doc.save() + x = TestDoc.objects().get() + self.assertTrue(x is not None) + self.assertTrue(len(x.items) == 1) + self.assertTrue(tuple(x.items[0]) in tuples) + self.assertTrue(x.items[0] in tuples) if __name__ == '__main__': unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index e146963..db9868a 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1121,37 +1121,32 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) - def test_tuples_as_tuples(self): - """ - Ensure that tuples remain tuples when they are - inside a ComplexBaseField - """ - from mongoengine.base import BaseField + def test_objectid_reference_across_databases(self): + # mongoenginetest - Is default connection alias from setUp() + # Register Aliases + register_connection('testdb-1', 'mongoenginetest2') - class EnumField(BaseField): + class User(Document): + name = StringField() + meta = {"db_alias": "testdb-1"} - def __init__(self, **kwargs): - super(EnumField, self).__init__(**kwargs) + class Book(Document): + name = StringField() + author = ReferenceField(User) - def to_mongo(self, value): - return value + # Drops + User.drop_collection() + Book.drop_collection() - def to_python(self, value): - return tuple(value) + user = User(name="Ross").save() + Book(name="MongoEngine for pros", author=user).save() - class TestDoc(Document): - items = ListField(EnumField()) + # Can't use query_counter across databases - so test the _data object + book = Book.objects.first() + self.assertFalse(isinstance(book._data['author'], User)) - TestDoc.drop_collection() - tuples = [(100, 'Testing')] - doc = TestDoc() - doc.items = tuples - doc.save() - x = TestDoc.objects().get() - self.assertTrue(x is not None) - self.assertTrue(len(x.items) == 1) - self.assertTrue(tuple(x.items[0]) in tuples) - self.assertTrue(x.items[0] in tuples) + book.select_related() + self.assertTrue(isinstance(book._data['author'], User)) def test_non_ascii_pk(self): """ From f34e8a0ff6ca84d1afd8de0bf78f7696c0c7aed2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 13:38:53 +0000 Subject: [PATCH 398/464] Fixed as_pymongo to return the id (#386) --- docs/changelog.rst | 1 + mongoengine/queryset/queryset.py | 3 ++- tests/queryset/queryset.py | 3 +++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 926c6cb..cbc2c94 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed as_pymongo to return the id (#386) - Document.select_related() now respects `db_alias` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index ded8d5e..c040e39 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1423,7 +1423,8 @@ class QuerySet(object): # used. If not, handle all fields. if not getattr(self, '__as_pymongo_fields', None): self.__as_pymongo_fields = [] - for field in self._loaded_fields.fields - set(['_cls', '_id']): + + for field in self._loaded_fields.fields - set(['_cls']): self.__as_pymongo_fields.append(field) while '.' in field: field, _ = field.rsplit('.', 1) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 4d91b55..566c14e 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3227,6 +3227,9 @@ class QuerySetTest(unittest.TestCase): User(name="Bob Dole", age=89, price=Decimal('1.11')).save() User(name="Barack Obama", age=51, price=Decimal('2.22')).save() + results = User.objects.only('id', 'name').as_pymongo() + self.assertEqual(results[0].keys(), ['_id', 'name']) + users = User.objects.only('name', 'price').as_pymongo() results = list(users) self.assertTrue(isinstance(results[0], dict)) From 8131f0a752d3e1b48713afe90301d77f224b9ace Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 13:53:18 +0000 Subject: [PATCH 399/464] Fixed sum and average mapreduce dot notation support (#375, #376) --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index cbc2c94..8bbc4b4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed sum and average mapreduce dot notation support (#375, #376) - Fixed as_pymongo to return the id (#386) - Document.select_related() now respects `db_alias` (#377) - Reload uses shard_key if applicable (#384) From daeecef59e47a5d23f9774f4dab70472b35465f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Wed, 10 Jul 2013 10:59:41 -0300 Subject: [PATCH 400/464] Update fields.py Typo in documentation for DecimalField --- mongoengine/fields.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 451f7ac..7f24be2 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -279,14 +279,14 @@ class DecimalField(BaseField): :param precision: Number of decimal places to store. :param rounding: The rounding rule from the python decimal libary: - - decimial.ROUND_CEILING (towards Infinity) - - decimial.ROUND_DOWN (towards zero) - - decimial.ROUND_FLOOR (towards -Infinity) - - decimial.ROUND_HALF_DOWN (to nearest with ties going towards zero) - - decimial.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) - - decimial.ROUND_HALF_UP (to nearest with ties going away from zero) - - decimial.ROUND_UP (away from zero) - - decimial.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) + - decimal.ROUND_CEILING (towards Infinity) + - decimal.ROUND_DOWN (towards zero) + - decimal.ROUND_FLOOR (towards -Infinity) + - decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero) + - decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) + - decimal.ROUND_HALF_UP (to nearest with ties going away from zero) + - decimal.ROUND_UP (away from zero) + - decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) Defaults to: ``decimal.ROUND_HALF_UP`` From 634b874c469e9a91f199d74c4f71464ed1d20da1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 16:16:50 +0000 Subject: [PATCH 401/464] Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) --- docs/apireference.rst | 5 + docs/changelog.rst | 1 + docs/guide/querying.rst | 4 +- mongoengine/queryset/base.py | 1479 ++++++++++++++++++++++++++++ mongoengine/queryset/queryset.py | 1545 ++---------------------------- tests/queryset/queryset.py | 30 +- 6 files changed, 1586 insertions(+), 1478 deletions(-) create mode 100644 mongoengine/queryset/base.py diff --git a/docs/apireference.rst b/docs/apireference.rst index d062727..774d3b8 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -49,6 +49,11 @@ Querying .. automethod:: mongoengine.queryset.QuerySet.__call__ +.. autoclass:: mongoengine.queryset.QuerySetNoCache + :members: + + .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ + .. autofunction:: mongoengine.queryset.queryset_manager Fields diff --git a/docs/changelog.rst b/docs/changelog.rst index 8bbc4b4..d875040 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) - Fixed sum and average mapreduce dot notation support (#375, #376) - Fixed as_pymongo to return the id (#386) - Document.select_related() now respects `db_alias` (#377) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 1350130..5fd0360 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -16,7 +16,9 @@ fetch documents from the database:: .. note:: As of MongoEngine 0.8 the querysets utilise a local cache. So iterating - it multiple times will only cause a single query. + it multiple times will only cause a single query. If this is not the + desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` to + return a non-caching queryset. Filtering queries ================= diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py new file mode 100644 index 0000000..0b2898f --- /dev/null +++ b/mongoengine/queryset/base.py @@ -0,0 +1,1479 @@ +from __future__ import absolute_import + +import copy +import itertools +import operator +import pprint +import re +import warnings + +from bson.code import Code +from bson import json_util +import pymongo +from pymongo.common import validate_read_preference + +from mongoengine import signals +from mongoengine.common import _import_class +from mongoengine.errors import (OperationError, NotUniqueError, + InvalidQueryError) + +from mongoengine.queryset import transform +from mongoengine.queryset.field_list import QueryFieldList +from mongoengine.queryset.visitor import Q, QNode + + +__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') + +# Delete rules +DO_NOTHING = 0 +NULLIFY = 1 +CASCADE = 2 +DENY = 3 +PULL = 4 + +RE_TYPE = type(re.compile('')) + + +class BaseQuerySet(object): + """A set of results returned from a query. Wraps a MongoDB cursor, + providing :class:`~mongoengine.Document` objects as the results. + """ + __dereference = False + _auto_dereference = True + + def __init__(self, document, collection): + self._document = document + self._collection_obj = collection + self._mongo_query = None + self._query_obj = Q() + self._initial_query = {} + self._where_clause = None + self._loaded_fields = QueryFieldList() + self._ordering = [] + self._snapshot = False + self._timeout = True + self._class_check = True + self._slave_okay = False + self._read_preference = None + self._iter = False + self._scalar = [] + self._none = False + self._as_pymongo = False + self._as_pymongo_coerce = False + self._len = None + + # If inheritance is allowed, only return instances and instances of + # subclasses of the class being used + if document._meta.get('allow_inheritance') is True: + if len(self._document._subclasses) == 1: + self._initial_query = {"_cls": self._document._subclasses[0]} + else: + self._initial_query = {"_cls": {"$in": self._document._subclasses}} + self._loaded_fields = QueryFieldList(always_include=['_cls']) + self._cursor_obj = None + self._limit = None + self._skip = None + self._hint = -1 # Using -1 as None is a valid value for hint + + def __call__(self, q_obj=None, class_check=True, slave_okay=False, + read_preference=None, **query): + """Filter the selected documents by calling the + :class:`~mongoengine.queryset.QuerySet` with a query. + + :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in + the query; the :class:`~mongoengine.queryset.QuerySet` is filtered + multiple times with different :class:`~mongoengine.queryset.Q` + objects, only the last one will be used + :param class_check: If set to False bypass class name check when + querying collection + :param slave_okay: if True, allows this query to be run against a + replica secondary. + :params read_preference: if set, overrides connection-level + read_preference from `ReplicaSetConnection`. + :param query: Django-style query keyword arguments + """ + query = Q(**query) + if q_obj: + # make sure proper query object is passed + if not isinstance(q_obj, QNode): + msg = ("Not a query object: %s. " + "Did you intend to use key=value?" % q_obj) + raise InvalidQueryError(msg) + query &= q_obj + + if read_preference is None: + queryset = self.clone() + else: + # Use the clone provided when setting read_preference + queryset = self.read_preference(read_preference) + + queryset._query_obj &= query + queryset._mongo_query = None + queryset._cursor_obj = None + queryset._class_check = class_check + + return queryset + + def __getitem__(self, key): + """Support skip and limit using getitem and slicing syntax. + """ + queryset = self.clone() + + # Slice provided + if isinstance(key, slice): + try: + queryset._cursor_obj = queryset._cursor[key] + queryset._skip, queryset._limit = key.start, key.stop + if key.start and key.stop: + queryset._limit = key.stop - key.start + except IndexError, err: + # PyMongo raises an error if key.start == key.stop, catch it, + # bin it, kill it. + start = key.start or 0 + if start >= 0 and key.stop >= 0 and key.step is None: + if start == key.stop: + queryset.limit(0) + queryset._skip = key.start + queryset._limit = key.stop - start + return queryset + raise err + # Allow further QuerySet modifications to be performed + return queryset + # Integer index provided + elif isinstance(key, int): + if queryset._scalar: + return queryset._get_scalar( + queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference)) + if queryset._as_pymongo: + return queryset._get_as_pymongo(queryset._cursor.next()) + return queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference) + raise AttributeError + + def __iter__(self): + raise NotImplementedError + + # Core functions + + def all(self): + """Returns all documents.""" + return self.__call__() + + def filter(self, *q_objs, **query): + """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` + """ + return self.__call__(*q_objs, **query) + + def get(self, *q_objs, **query): + """Retrieve the the matching object raising + :class:`~mongoengine.queryset.MultipleObjectsReturned` or + `DocumentName.MultipleObjectsReturned` exception if multiple results + and :class:`~mongoengine.queryset.DoesNotExist` or + `DocumentName.DoesNotExist` if no results are found. + + .. versionadded:: 0.3 + """ + queryset = self.clone() + queryset = queryset.limit(2) + queryset = queryset.filter(*q_objs, **query) + + try: + result = queryset.next() + except StopIteration: + msg = ("%s matching query does not exist." + % queryset._document._class_name) + raise queryset._document.DoesNotExist(msg) + try: + queryset.next() + except StopIteration: + return result + + queryset.rewind() + message = u'%d items returned, instead of 1' % queryset.count() + raise queryset._document.MultipleObjectsReturned(message) + + def create(self, **kwargs): + """Create new object. Returns the saved object instance. + + .. versionadded:: 0.4 + """ + return self._document(**kwargs).save() + + def get_or_create(self, write_concern=None, auto_save=True, + *q_objs, **query): + """Retrieve unique object or create, if it doesn't exist. Returns a + tuple of ``(object, created)``, where ``object`` is the retrieved or + created object and ``created`` is a boolean specifying whether a new + object was created. Raises + :class:`~mongoengine.queryset.MultipleObjectsReturned` or + `DocumentName.MultipleObjectsReturned` if multiple results are found. + A new document will be created if the document doesn't exists; a + dictionary of default values for the new document may be provided as a + keyword argument called :attr:`defaults`. + + .. note:: This requires two separate operations and therefore a + race condition exists. Because there are no transactions in + mongoDB other approaches should be investigated, to ensure you + don't accidently duplicate data when using this method. This is + now scheduled to be removed before 1.0 + + :param write_concern: optional extra keyword arguments used if we + have to create a new document. + Passes any write_concern onto :meth:`~mongoengine.Document.save` + + :param auto_save: if the object is to be saved automatically if + not found. + + .. deprecated:: 0.8 + .. versionchanged:: 0.6 - added `auto_save` + .. versionadded:: 0.3 + """ + msg = ("get_or_create is scheduled to be deprecated. The approach is " + "flawed without transactions. Upserts should be preferred.") + warnings.warn(msg, DeprecationWarning) + + defaults = query.get('defaults', {}) + if 'defaults' in query: + del query['defaults'] + + try: + doc = self.get(*q_objs, **query) + return doc, False + except self._document.DoesNotExist: + query.update(defaults) + doc = self._document(**query) + + if auto_save: + doc.save(write_concern=write_concern) + return doc, True + + def first(self): + """Retrieve the first object matching the query. + """ + queryset = self.clone() + try: + result = queryset[0] + except IndexError: + result = None + return result + + def insert(self, doc_or_docs, load_bulk=True, write_concern=None): + """bulk insert documents + + :param docs_or_doc: a document or list of documents to be inserted + :param load_bulk (optional): If True returns the list of document + instances + :param write_concern: Extra keyword arguments are passed down to + :meth:`~pymongo.collection.Collection.insert` + which will be used as options for the resultant + ``getLastError`` command. For example, + ``insert(..., {w: 2, fsync: True})`` will wait until at least + two servers have recorded the write and will force an fsync on + each server being written to. + + By default returns document instances, set ``load_bulk`` to False to + return just ``ObjectIds`` + + .. versionadded:: 0.5 + """ + Document = _import_class('Document') + + if write_concern is None: + write_concern = {} + + docs = doc_or_docs + return_one = False + if isinstance(docs, Document) or issubclass(docs.__class__, Document): + return_one = True + docs = [docs] + + raw = [] + for doc in docs: + if not isinstance(doc, self._document): + msg = ("Some documents inserted aren't instances of %s" + % str(self._document)) + raise OperationError(msg) + if doc.pk and not doc._created: + msg = "Some documents have ObjectIds use doc.update() instead" + raise OperationError(msg) + raw.append(doc.to_mongo()) + + signals.pre_bulk_insert.send(self._document, documents=docs) + try: + ids = self._collection.insert(raw, **write_concern) + except pymongo.errors.OperationFailure, err: + message = 'Could not save document (%s)' + if re.match('^E1100[01] duplicate key', unicode(err)): + # E11000 - duplicate key error index + # E11001 - duplicate key on update + message = u'Tried to save duplicate unique keys (%s)' + raise NotUniqueError(message % unicode(err)) + raise OperationError(message % unicode(err)) + + if not load_bulk: + signals.post_bulk_insert.send( + self._document, documents=docs, loaded=False) + return return_one and ids[0] or ids + + documents = self.in_bulk(ids) + results = [] + for obj_id in ids: + results.append(documents.get(obj_id)) + signals.post_bulk_insert.send( + self._document, documents=results, loaded=True) + return return_one and results[0] or results + + def count(self, with_limit_and_skip=True): + """Count the selected elements in the query. + + :param with_limit_and_skip (optional): take any :meth:`limit` or + :meth:`skip` that has been applied to this cursor into account when + getting the count + """ + if self._limit == 0: + return 0 + if with_limit_and_skip and self._len is not None: + return self._len + count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + if with_limit_and_skip: + self._len = count + return count + + def delete(self, write_concern=None, _from_doc_delete=False): + """Delete the documents matched by the query. + + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param _from_doc_delete: True when called from document delete therefore + signals will have been triggered so don't loop. + """ + queryset = self.clone() + doc = queryset._document + + if write_concern is None: + write_concern = {} + + # Handle deletes where skips or limits have been applied or + # there is an untriggered delete signal + has_delete_signal = signals.signals_available and ( + signals.pre_delete.has_receivers_for(self._document) or + signals.post_delete.has_receivers_for(self._document)) + + call_document_delete = (queryset._skip or queryset._limit or + has_delete_signal) and not _from_doc_delete + + if call_document_delete: + for doc in queryset: + doc.delete(write_concern=write_concern) + return + + delete_rules = doc._meta.get('delete_rules') or {} + # Check for DENY rules before actually deleting/nullifying any other + # references + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == DENY and document_cls.objects( + **{field_name + '__in': self}).count() > 0: + msg = ("Could not delete document (%s.%s refers to it)" + % (document_cls.__name__, field_name)) + raise OperationError(msg) + + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == CASCADE: + ref_q = document_cls.objects(**{field_name + '__in': self}) + ref_q_count = ref_q.count() + if (doc != document_cls and ref_q_count > 0 + or (doc == document_cls and ref_q_count > 0)): + ref_q.delete(write_concern=write_concern) + elif rule == NULLIFY: + document_cls.objects(**{field_name + '__in': self}).update( + write_concern=write_concern, **{'unset__%s' % field_name: 1}) + elif rule == PULL: + document_cls.objects(**{field_name + '__in': self}).update( + write_concern=write_concern, + **{'pull_all__%s' % field_name: self}) + + queryset._collection.remove(queryset._query, write_concern=write_concern) + + def update(self, upsert=False, multi=True, write_concern=None, + full_result=False, **update): + """Perform an atomic update on the fields matched by the query. + + :param upsert: Any existing document with that "_id" is overwritten. + :param multi: Update multiple documents. + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param full_result: Return the full result rather than just the number + updated. + :param update: Django-style update keyword arguments + + .. versionadded:: 0.2 + """ + if not update and not upsert: + raise OperationError("No update parameters, would remove data") + + if write_concern is None: + write_concern = {} + + queryset = self.clone() + query = queryset._query + update = transform.update(queryset._document, **update) + + # If doing an atomic upsert on an inheritable class + # then ensure we add _cls to the update operation + if upsert and '_cls' in query: + if '$set' in update: + update["$set"]["_cls"] = queryset._document._class_name + else: + update["$set"] = {"_cls": queryset._document._class_name} + try: + result = queryset._collection.update(query, update, multi=multi, + upsert=upsert, **write_concern) + if full_result: + return result + elif result: + return result['n'] + except pymongo.errors.OperationFailure, err: + if unicode(err) == u'multi not coded yet': + message = u'update() method requires MongoDB 1.1.3+' + raise OperationError(message) + raise OperationError(u'Update failed (%s)' % unicode(err)) + + def update_one(self, upsert=False, write_concern=None, **update): + """Perform an atomic update on first field matched by the query. + + :param upsert: Any existing document with that "_id" is overwritten. + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param update: Django-style update keyword arguments + + .. versionadded:: 0.2 + """ + return self.update( + upsert=upsert, multi=False, write_concern=write_concern, **update) + + def with_id(self, object_id): + """Retrieve the object matching the id provided. Uses `object_id` only + and raises InvalidQueryError if a filter has been applied. Returns + `None` if no document exists with that id. + + :param object_id: the value for the id of the document to look up + + .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set + """ + queryset = self.clone() + if not queryset._query_obj.empty: + msg = "Cannot use a filter whilst using `with_id`" + raise InvalidQueryError(msg) + return queryset.filter(pk=object_id).first() + + def in_bulk(self, object_ids): + """Retrieve a set of documents by their ids. + + :param object_ids: a list or tuple of ``ObjectId``\ s + :rtype: dict of ObjectIds as keys and collection-specific + Document subclasses as values. + + .. versionadded:: 0.3 + """ + doc_map = {} + + docs = self._collection.find({'_id': {'$in': object_ids}}, + **self._cursor_args) + if self._scalar: + for doc in docs: + doc_map[doc['_id']] = self._get_scalar( + self._document._from_son(doc)) + elif self._as_pymongo: + for doc in docs: + doc_map[doc['_id']] = self._get_as_pymongo(doc) + else: + for doc in docs: + doc_map[doc['_id']] = self._document._from_son(doc) + + return doc_map + + def none(self): + """Helper that just returns a list""" + queryset = self.clone() + queryset._none = True + return queryset + + def no_sub_classes(self): + """ + Only return instances of this document and not any inherited documents + """ + if self._document._meta.get('allow_inheritance') is True: + self._initial_query = {"_cls": self._document._class_name} + + return self + + def clone(self): + """Creates a copy of the current + :class:`~mongoengine.queryset.QuerySet` + + .. versionadded:: 0.5 + """ + return self.clone_into(self.__class__(self._document, self._collection_obj)) + + def clone_into(self, cls): + """Creates a copy of the current + :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class + """ + if not isinstance(cls, BaseQuerySet): + raise OperationError('%s is not a subclass of BaseQuerySet' % cls.__name__) + + copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', + '_where_clause', '_loaded_fields', '_ordering', '_snapshot', + '_timeout', '_class_check', '_slave_okay', '_read_preference', + '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', + '_limit', '_skip', '_hint', '_auto_dereference') + + for prop in copy_props: + val = getattr(self, prop) + setattr(cls, prop, copy.copy(val)) + + if self._cursor_obj: + cls._cursor_obj = self._cursor_obj.clone() + + return cls + + def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or + :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down + the number queries to mongodb. + + .. versionadded:: 0.5 + """ + # Make select related work the same for querysets + max_depth += 1 + queryset = self.clone() + return queryset._dereference(queryset, max_depth=max_depth) + + def limit(self, n): + """Limit the number of returned documents to `n`. This may also be + achieved using array-slicing syntax (e.g. ``User.objects[:5]``). + + :param n: the maximum number of objects to return + """ + queryset = self.clone() + if n == 0: + queryset._cursor.limit(1) + else: + queryset._cursor.limit(n) + queryset._limit = n + # Return self to allow chaining + return queryset + + def skip(self, n): + """Skip `n` documents before returning the results. This may also be + achieved using array-slicing syntax (e.g. ``User.objects[5:]``). + + :param n: the number of objects to skip before returning results + """ + queryset = self.clone() + queryset._cursor.skip(n) + queryset._skip = n + return queryset + + def hint(self, index=None): + """Added 'hint' support, telling Mongo the proper index to use for the + query. + + Judicious use of hints can greatly improve query performance. When + doing a query on multiple fields (at least one of which is indexed) + pass the indexed field as a hint to the query. + + Hinting will not do anything if the corresponding index does not exist. + The last hint applied to this cursor takes precedence over all others. + + .. versionadded:: 0.5 + """ + queryset = self.clone() + queryset._cursor.hint(index) + queryset._hint = index + return queryset + + def distinct(self, field): + """Return a list of distinct values for a given field. + + :param field: the field to select distinct values from + + .. note:: This is a command and won't take ordering or limit into + account. + + .. versionadded:: 0.4 + .. versionchanged:: 0.5 - Fixed handling references + .. versionchanged:: 0.6 - Improved db_field refrence handling + """ + queryset = self.clone() + try: + field = self._fields_to_dbfields([field]).pop() + finally: + return self._dereference(queryset._cursor.distinct(field), 1, + name=field, instance=self._document) + + def only(self, *fields): + """Load only a subset of this document's fields. :: + + post = BlogPost.objects(...).only("title", "author.name") + + .. note :: `only()` is chainable and will perform a union :: + So with the following it will fetch both: `title` and `author.name`:: + + post = BlogPost.objects.only("title").only("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + + :param fields: fields to include + + .. versionadded:: 0.3 + .. versionchanged:: 0.5 - Added subfield support + """ + fields = dict([(f, QueryFieldList.ONLY) for f in fields]) + return self.fields(True, **fields) + + def exclude(self, *fields): + """Opposite to .only(), exclude some document's fields. :: + + post = BlogPost.objects(...).exclude("comments") + + .. note :: `exclude()` is chainable and will perform a union :: + So with the following it will exclude both: `title` and `author.name`:: + + post = BlogPost.objects.exclude("title").exclude("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + + :param fields: fields to exclude + + .. versionadded:: 0.5 + """ + fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) + return self.fields(**fields) + + def fields(self, _only_called=False, **kwargs): + """Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. Fields also + allows for a greater level of control for example: + + Retrieving a Subrange of Array Elements: + + You can use the $slice operator to retrieve a subrange of elements in + an array. For example to get the first 5 comments:: + + post = BlogPost.objects(...).fields(slice__comments=5) + + :param kwargs: A dictionary identifying what to include + + .. versionadded:: 0.5 + """ + + # Check for an operator and transform to mongo-style if there is + operators = ["slice"] + cleaned_fields = [] + for key, value in kwargs.items(): + parts = key.split('__') + op = None + if parts[0] in operators: + op = parts.pop(0) + value = {'$' + op: value} + key = '.'.join(parts) + cleaned_fields.append((key, value)) + + fields = sorted(cleaned_fields, key=operator.itemgetter(1)) + queryset = self.clone() + for value, group in itertools.groupby(fields, lambda x: x[1]): + fields = [field for field, value in group] + fields = queryset._fields_to_dbfields(fields) + queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) + + return queryset + + def all_fields(self): + """Include all fields. Reset all previously calls of .only() or + .exclude(). :: + + post = BlogPost.objects.exclude("comments").all_fields() + + .. versionadded:: 0.5 + """ + queryset = self.clone() + queryset._loaded_fields = QueryFieldList( + always_include=queryset._loaded_fields.always_include) + return queryset + + def order_by(self, *keys): + """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The + order may be specified by prepending each of the keys by a + or a -. + Ascending order is assumed. + + :param keys: fields to order the query results by; keys may be + prefixed with **+** or **-** to determine the ordering direction + """ + queryset = self.clone() + queryset._ordering = queryset._get_order_by(keys) + return queryset + + def explain(self, format=False): + """Return an explain plan record for the + :class:`~mongoengine.queryset.QuerySet`\ 's cursor. + + :param format: format the plan before returning it + """ + plan = self._cursor.explain() + if format: + plan = pprint.pformat(plan) + return plan + + def snapshot(self, enabled): + """Enable or disable snapshot mode when querying. + + :param enabled: whether or not snapshot mode is enabled + + ..versionchanged:: 0.5 - made chainable + """ + queryset = self.clone() + queryset._snapshot = enabled + return queryset + + def timeout(self, enabled): + """Enable or disable the default mongod timeout when querying. + + :param enabled: whether or not the timeout is used + + ..versionchanged:: 0.5 - made chainable + """ + queryset = self.clone() + queryset._timeout = enabled + return queryset + + def slave_okay(self, enabled): + """Enable or disable the slave_okay when querying. + + :param enabled: whether or not the slave_okay is enabled + """ + queryset = self.clone() + queryset._slave_okay = enabled + return queryset + + def read_preference(self, read_preference): + """Change the read_preference when querying. + + :param read_preference: override ReplicaSetConnection-level + preference. + """ + validate_read_preference('read_preference', read_preference) + queryset = self.clone() + queryset._read_preference = read_preference + return queryset + + def scalar(self, *fields): + """Instead of returning Document instances, return either a specific + value or a tuple of values in order. + + Can be used along with + :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off + dereferencing. + + .. note:: This effects all results and can be unset by calling + ``scalar`` without arguments. Calls ``only`` automatically. + + :param fields: One or more fields to return instead of a Document. + """ + queryset = self.clone() + queryset._scalar = list(fields) + + if fields: + queryset = queryset.only(*fields) + else: + queryset = queryset.all_fields() + + return queryset + + def values_list(self, *fields): + """An alias for scalar""" + return self.scalar(*fields) + + def as_pymongo(self, coerce_types=False): + """Instead of returning Document instances, return raw values from + pymongo. + + :param coerce_type: Field types (if applicable) would be use to + coerce types. + """ + queryset = self.clone() + queryset._as_pymongo = True + queryset._as_pymongo_coerce = coerce_types + return queryset + + # JSON Helpers + + def to_json(self): + """Converts a queryset to JSON""" + return json_util.dumps(self.as_pymongo()) + + def from_json(self, json_data): + """Converts json data to unsaved objects""" + son_data = json_util.loads(json_data) + return [self._document._from_son(data) for data in son_data] + + # JS functionality + + def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, + scope=None): + """Perform a map/reduce query using the current query spec + and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, + it must be the last call made, as it does not return a maleable + ``QuerySet``. + + See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` + and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` + tests in ``tests.queryset.QuerySetTest`` for usage examples. + + :param map_f: map function, as :class:`~bson.code.Code` or string + :param reduce_f: reduce function, as + :class:`~bson.code.Code` or string + :param output: output collection name, if set to 'inline' will try to + use :class:`~pymongo.collection.Collection.inline_map_reduce` + This can also be a dictionary containing output options + see: http://docs.mongodb.org/manual/reference/commands/#mapReduce + :param finalize_f: finalize function, an optional function that + performs any post-reduction processing. + :param scope: values to insert into map/reduce global scope. Optional. + :param limit: number of objects from current query to provide + to map/reduce method + + Returns an iterator yielding + :class:`~mongoengine.document.MapReduceDocument`. + + .. note:: + + Map/Reduce changed in server version **>= 1.7.4**. The PyMongo + :meth:`~pymongo.collection.Collection.map_reduce` helper requires + PyMongo version **>= 1.11**. + + .. versionchanged:: 0.5 + - removed ``keep_temp`` keyword argument, which was only relevant + for MongoDB server versions older than 1.7.4 + + .. versionadded:: 0.3 + """ + queryset = self.clone() + + MapReduceDocument = _import_class('MapReduceDocument') + + if not hasattr(self._collection, "map_reduce"): + raise NotImplementedError("Requires MongoDB >= 1.7.1") + + map_f_scope = {} + if isinstance(map_f, Code): + map_f_scope = map_f.scope + map_f = unicode(map_f) + map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) + + reduce_f_scope = {} + if isinstance(reduce_f, Code): + reduce_f_scope = reduce_f.scope + reduce_f = unicode(reduce_f) + reduce_f_code = queryset._sub_js_fields(reduce_f) + reduce_f = Code(reduce_f_code, reduce_f_scope) + + mr_args = {'query': queryset._query} + + if finalize_f: + finalize_f_scope = {} + if isinstance(finalize_f, Code): + finalize_f_scope = finalize_f.scope + finalize_f = unicode(finalize_f) + finalize_f_code = queryset._sub_js_fields(finalize_f) + finalize_f = Code(finalize_f_code, finalize_f_scope) + mr_args['finalize'] = finalize_f + + if scope: + mr_args['scope'] = scope + + if limit: + mr_args['limit'] = limit + + if output == 'inline' and not queryset._ordering: + map_reduce_function = 'inline_map_reduce' + else: + map_reduce_function = 'map_reduce' + mr_args['out'] = output + + results = getattr(queryset._collection, map_reduce_function)( + map_f, reduce_f, **mr_args) + + if map_reduce_function == 'map_reduce': + results = results.find() + + if queryset._ordering: + results = results.sort(queryset._ordering) + + for doc in results: + yield MapReduceDocument(queryset._document, queryset._collection, + doc['_id'], doc['value']) + + def exec_js(self, code, *fields, **options): + """Execute a Javascript function on the server. A list of fields may be + provided, which will be translated to their correct names and supplied + as the arguments to the function. A few extra variables are added to + the function's scope: ``collection``, which is the name of the + collection in use; ``query``, which is an object representing the + current query; and ``options``, which is an object containing any + options specified as keyword arguments. + + As fields in MongoEngine may use different names in the database (set + using the :attr:`db_field` keyword argument to a :class:`Field` + constructor), a mechanism exists for replacing MongoEngine field names + with the database field names in Javascript code. When accessing a + field, use square-bracket notation, and prefix the MongoEngine field + name with a tilde (~). + + :param code: a string of Javascript code to execute + :param fields: fields that you will be using in your function, which + will be passed in to your function as arguments + :param options: options that you want available to the function + (accessed in Javascript through the ``options`` object) + """ + queryset = self.clone() + + code = queryset._sub_js_fields(code) + + fields = [queryset._document._translate_field_name(f) for f in fields] + collection = queryset._document._get_collection_name() + + scope = { + 'collection': collection, + 'options': options or {}, + } + + query = queryset._query + if queryset._where_clause: + query['$where'] = queryset._where_clause + + scope['query'] = query + code = Code(code, scope=scope) + + db = queryset._document._get_db() + return db.eval(code, *fields) + + def where(self, where_clause): + """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript + expression). Performs automatic field name substitution like + :meth:`mongoengine.queryset.Queryset.exec_js`. + + .. note:: When using this mode of query, the database will call your + function, or evaluate your predicate clause, for each object + in the collection. + + .. versionadded:: 0.5 + """ + queryset = self.clone() + where_clause = queryset._sub_js_fields(where_clause) + queryset._where_clause = where_clause + return queryset + + def sum(self, field): + """Sum over the values of the specified field. + + :param field: the field to sum over; use dot-notation to refer to + embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. + """ + map_func = Code(""" + function() { + function deepFind(obj, path) { + var paths = path.split('.') + , current = obj + , i; + + for (i = 0; i < paths.length; ++i) { + if (current[paths[i]] == undefined) { + return undefined; + } else { + current = current[paths[i]]; + } + } + return current; + } + + emit(1, deepFind(this, field) || 0); + } + """, scope={'field': field}) + + reduce_func = Code(""" + function(key, values) { + var sum = 0; + for (var i in values) { + sum += values[i]; + } + return sum; + } + """) + + for result in self.map_reduce(map_func, reduce_func, output='inline'): + return result.value + else: + return 0 + + def average(self, field): + """Average over the values of the specified field. + + :param field: the field to average over; use dot-notation to refer to + embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. + """ + map_func = Code(""" + function() { + function deepFind(obj, path) { + var paths = path.split('.') + , current = obj + , i; + + for (i = 0; i < paths.length; ++i) { + if (current[paths[i]] == undefined) { + return undefined; + } else { + current = current[paths[i]]; + } + } + return current; + } + + val = deepFind(this, field) + if (val !== undefined) + emit(1, {t: val || 0, c: 1}); + } + """, scope={'field': field}) + + reduce_func = Code(""" + function(key, values) { + var out = {t: 0, c: 0}; + for (var i in values) { + var value = values[i]; + out.t += value.t; + out.c += value.c; + } + return out; + } + """) + + finalize_func = Code(""" + function(key, value) { + return value.t / value.c; + } + """) + + for result in self.map_reduce(map_func, reduce_func, + finalize_f=finalize_func, output='inline'): + return result.value + else: + return 0 + + def item_frequencies(self, field, normalize=False, map_reduce=True): + """Returns a dictionary of all items present in a field across + the whole queried set of documents, and their corresponding frequency. + This is useful for generating tag clouds, or searching documents. + + .. note:: + + Can only do direct simple mappings and cannot map across + :class:`~mongoengine.fields.ReferenceField` or + :class:`~mongoengine.fields.GenericReferenceField` for more complex + counting a manual map reduce call would is required. + + If the field is a :class:`~mongoengine.fields.ListField`, the items within + each list will be counted individually. + + :param field: the field to use + :param normalize: normalize the results so they add to 1.0 + :param map_reduce: Use map_reduce over exec_js + + .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded + document lookups + """ + if map_reduce: + return self._item_frequencies_map_reduce(field, + normalize=normalize) + return self._item_frequencies_exec_js(field, normalize=normalize) + + # Iterator helpers + + def next(self): + """Wrap the result in a :class:`~mongoengine.Document` object. + """ + if self._limit == 0 or self._none: + raise StopIteration + + raw_doc = self._cursor.next() + if self._as_pymongo: + return self._get_as_pymongo(raw_doc) + doc = self._document._from_son(raw_doc, + _auto_dereference=self._auto_dereference) + if self._scalar: + return self._get_scalar(doc) + + return doc + + def rewind(self): + """Rewind the cursor to its unevaluated state. + + .. versionadded:: 0.3 + """ + self._iter = False + self._cursor.rewind() + + # Properties + + @property + def _collection(self): + """Property that returns the collection object. This allows us to + perform operations only if the collection is accessed. + """ + return self._collection_obj + + @property + def _cursor_args(self): + cursor_args = { + 'snapshot': self._snapshot, + 'timeout': self._timeout + } + if self._read_preference is not None: + cursor_args['read_preference'] = self._read_preference + else: + cursor_args['slave_okay'] = self._slave_okay + if self._loaded_fields: + cursor_args['fields'] = self._loaded_fields.as_dict() + return cursor_args + + @property + def _cursor(self): + if self._cursor_obj is None: + + self._cursor_obj = self._collection.find(self._query, + **self._cursor_args) + # Apply where clauses to cursor + if self._where_clause: + where_clause = self._sub_js_fields(self._where_clause) + self._cursor_obj.where(where_clause) + + if self._ordering: + # Apply query ordering + self._cursor_obj.sort(self._ordering) + elif self._document._meta['ordering']: + # Otherwise, apply the ordering from the document model + order = self._get_order_by(self._document._meta['ordering']) + self._cursor_obj.sort(order) + + if self._limit is not None: + self._cursor_obj.limit(self._limit) + + if self._skip is not None: + self._cursor_obj.skip(self._skip) + + if self._hint != -1: + self._cursor_obj.hint(self._hint) + + return self._cursor_obj + + def __deepcopy__(self, memo): + """Essential for chained queries with ReferenceFields involved""" + return self.clone() + + @property + def _query(self): + if self._mongo_query is None: + self._mongo_query = self._query_obj.to_query(self._document) + if self._class_check: + self._mongo_query.update(self._initial_query) + return self._mongo_query + + @property + def _dereference(self): + if not self.__dereference: + self.__dereference = _import_class('DeReference')() + return self.__dereference + + def no_dereference(self): + """Turn off any dereferencing for the results of this queryset. + """ + queryset = self.clone() + queryset._auto_dereference = False + return queryset + + # Helper Functions + + def _item_frequencies_map_reduce(self, field, normalize=False): + map_func = """ + function() { + var path = '{{~%(field)s}}'.split('.'); + var field = this; + + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(item, 1); + }); + } else if (typeof field != 'undefined') { + emit(field, 1); + } else { + emit(null, 1); + } + } + """ % dict(field=field) + reduce_func = """ + function(key, values) { + var total = 0; + var valuesSize = values.length; + for (var i=0; i < valuesSize; i++) { + total += parseInt(values[i], 10); + } + return total; + } + """ + values = self.map_reduce(map_func, reduce_func, 'inline') + frequencies = {} + for f in values: + key = f.key + if isinstance(key, float): + if int(key) == key: + key = int(key) + frequencies[key] = int(f.value) + + if normalize: + count = sum(frequencies.values()) + frequencies = dict([(k, float(v) / count) + for k, v in frequencies.items()]) + + return frequencies + + def _item_frequencies_exec_js(self, field, normalize=False): + """Uses exec_js to execute""" + freq_func = """ + function(path) { + var path = path.split('.'); + + var total = 0.0; + db[collection].find(query).forEach(function(doc) { + var field = doc; + for (p in path) { + if (field) + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + total += field.length; + } else { + total++; + } + }); + + var frequencies = {}; + var types = {}; + var inc = 1.0; + + db[collection].find(query).forEach(function(doc) { + field = doc; + for (p in path) { + if (field) + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + field.forEach(function(item) { + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); + }); + } else { + var item = field; + types[item] = item; + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); + } + }); + return [total, frequencies, types]; + } + """ + total, data, types = self.exec_js(freq_func, field) + values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) + + if normalize: + values = dict([(k, float(v) / total) for k, v in values.items()]) + + frequencies = {} + for k, v in values.iteritems(): + if isinstance(k, float): + if int(k) == k: + k = int(k) + + frequencies[k] = v + + return frequencies + + def _fields_to_dbfields(self, fields): + """Translate fields paths to its db equivalents""" + ret = [] + for field in fields: + field = ".".join(f.db_field for f in + self._document._lookup_field(field.split('.'))) + ret.append(field) + return ret + + def _get_order_by(self, keys): + """Creates a list of order by fields + """ + key_list = [] + for key in keys: + if not key: + continue + direction = pymongo.ASCENDING + if key[0] == '-': + direction = pymongo.DESCENDING + if key[0] in ('-', '+'): + key = key[1:] + key = key.replace('__', '.') + try: + key = self._document._translate_field_name(key) + except: + pass + key_list.append((key, direction)) + + if self._cursor_obj: + self._cursor_obj.sort(key_list) + return key_list + + def _get_scalar(self, doc): + + def lookup(obj, name): + chunks = name.split('__') + for chunk in chunks: + obj = getattr(obj, chunk) + return obj + + data = [lookup(doc, n) for n in self._scalar] + if len(data) == 1: + return data[0] + + return tuple(data) + + def _get_as_pymongo(self, row): + # Extract which fields paths we should follow if .fields(...) was + # used. If not, handle all fields. + if not getattr(self, '__as_pymongo_fields', None): + self.__as_pymongo_fields = [] + + for field in self._loaded_fields.fields - set(['_cls']): + self.__as_pymongo_fields.append(field) + while '.' in field: + field, _ = field.rsplit('.', 1) + self.__as_pymongo_fields.append(field) + + all_fields = not self.__as_pymongo_fields + + def clean(data, path=None): + path = path or '' + + if isinstance(data, dict): + new_data = {} + for key, value in data.iteritems(): + new_path = '%s.%s' % (path, key) if path else key + + if all_fields: + include_field = True + elif self._loaded_fields.value == QueryFieldList.ONLY: + include_field = new_path in self.__as_pymongo_fields + else: + include_field = new_path not in self.__as_pymongo_fields + + if include_field: + new_data[key] = clean(value, path=new_path) + data = new_data + elif isinstance(data, list): + data = [clean(d, path=path) for d in data] + else: + if self._as_pymongo_coerce: + # If we need to coerce types, we need to determine the + # type of this field and use the corresponding + # .to_python(...) + from mongoengine.fields import EmbeddedDocumentField + obj = self._document + for chunk in path.split('.'): + obj = getattr(obj, chunk, None) + if obj is None: + break + elif isinstance(obj, EmbeddedDocumentField): + obj = obj.document_type + if obj and data is not None: + data = obj.to_python(data) + return data + return clean(row) + + def _sub_js_fields(self, code): + """When fields are specified with [~fieldname] syntax, where + *fieldname* is the Python name of a field, *fieldname* will be + substituted for the MongoDB name of the field (specified using the + :attr:`name` keyword argument in a field's constructor). + """ + def field_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return u'["%s"]' % fields[-1].db_field + + def field_path_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return ".".join([f.db_field for f in fields]) + + code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) + code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, + code) + return code + + # Deprecated + def ensure_index(self, **kwargs): + """Deprecated use :func:`Document.ensure_index`""" + msg = ("Doc.objects()._ensure_index() is deprecated. " + "Use Doc.ensure_index() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_index(**kwargs) + return self + + def _ensure_indexes(self): + """Deprecated use :func:`~Document.ensure_indexes`""" + msg = ("Doc.objects()._ensure_indexes() is deprecated. " + "Use Doc.ensure_indexes() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_indexes() \ No newline at end of file diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 690e3f0..9db98a7 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,137 +1,26 @@ -from __future__ import absolute_import +from mongoengine.errors import OperationError +from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, + CASCADE, DENY, PULL) -import copy -import itertools -import operator -import pprint -import re -import warnings - -from bson.code import Code -from bson import json_util -import pymongo -from pymongo.common import validate_read_preference - -from mongoengine import signals -from mongoengine.common import _import_class -from mongoengine.errors import (OperationError, NotUniqueError, - InvalidQueryError) - -from mongoengine.queryset import transform -from mongoengine.queryset.field_list import QueryFieldList -from mongoengine.queryset.visitor import Q, QNode - - -__all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') +__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', + 'DENY', 'PULL') # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 ITER_CHUNK_SIZE = 100 -# Delete rules -DO_NOTHING = 0 -NULLIFY = 1 -CASCADE = 2 -DENY = 3 -PULL = 4 -RE_TYPE = type(re.compile('')) +class QuerySet(BaseQuerySet): + """The default queryset, that builds queries and handles a set of results + returned from a query. - -class QuerySet(object): - """A set of results returned from a query. Wraps a MongoDB cursor, - providing :class:`~mongoengine.Document` objects as the results. + Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as + the results. """ - __dereference = False - _auto_dereference = True - def __init__(self, document, collection): - self._document = document - self._collection_obj = collection - self._mongo_query = None - self._query_obj = Q() - self._initial_query = {} - self._where_clause = None - self._loaded_fields = QueryFieldList() - self._ordering = [] - self._snapshot = False - self._timeout = True - self._class_check = True - self._slave_okay = False - self._read_preference = None - self._iter = False - self._scalar = [] - self._none = False - self._as_pymongo = False - self._as_pymongo_coerce = False - self._result_cache = [] - self._has_more = True - self._len = None - - # If inheritance is allowed, only return instances and instances of - # subclasses of the class being used - if document._meta.get('allow_inheritance') is True: - if len(self._document._subclasses) == 1: - self._initial_query = {"_cls": self._document._subclasses[0]} - else: - self._initial_query = {"_cls": {"$in": self._document._subclasses}} - self._loaded_fields = QueryFieldList(always_include=['_cls']) - self._cursor_obj = None - self._limit = None - self._skip = None - self._hint = -1 # Using -1 as None is a valid value for hint - - def __call__(self, q_obj=None, class_check=True, slave_okay=False, - read_preference=None, **query): - """Filter the selected documents by calling the - :class:`~mongoengine.queryset.QuerySet` with a query. - - :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in - the query; the :class:`~mongoengine.queryset.QuerySet` is filtered - multiple times with different :class:`~mongoengine.queryset.Q` - objects, only the last one will be used - :param class_check: If set to False bypass class name check when - querying collection - :param slave_okay: if True, allows this query to be run against a - replica secondary. - :params read_preference: if set, overrides connection-level - read_preference from `ReplicaSetConnection`. - :param query: Django-style query keyword arguments - """ - query = Q(**query) - if q_obj: - # make sure proper query object is passed - if not isinstance(q_obj, QNode): - msg = ("Not a query object: %s. " - "Did you intend to use key=value?" % q_obj) - raise InvalidQueryError(msg) - query &= q_obj - - if read_preference is None: - queryset = self.clone() - else: - # Use the clone provided when setting read_preference - queryset = self.read_preference(read_preference) - - queryset._query_obj &= query - queryset._mongo_query = None - queryset._cursor_obj = None - queryset._class_check = class_check - - return queryset - - def __len__(self): - """Since __len__ is called quite frequently (for example, as part of - list(qs) we populate the result cache and cache the length. - """ - if self._len is not None: - return self._len - if self._has_more: - # populate the cache - list(self._iter_results()) - - self._len = len(self._result_cache) - return self._len + _has_more = True + _len = None + _result_cache = None def __iter__(self): """Iteration utilises a results cache which iterates the cursor @@ -147,11 +36,39 @@ class QuerySet(object): # iterating over the cache. return iter(self._result_cache) + def __len__(self): + """Since __len__ is called quite frequently (for example, as part of + list(qs) we populate the result cache and cache the length. + """ + if self._len is not None: + return self._len + if self._has_more: + # populate the cache + list(self._iter_results()) + + self._len = len(self._result_cache) + return self._len + + def __repr__(self): + """Provides the string representation of the QuerySet + """ + if self._iter: + return '.. queryset mid-iteration ..' + + self._populate_cache() + data = self._result_cache[:REPR_OUTPUT_SIZE + 1] + if len(data) > REPR_OUTPUT_SIZE: + data[-1] = "...(remaining elements truncated)..." + return repr(data) + + def _iter_results(self): """A generator for iterating over the result cache. Also populates the cache if there are more possible results to yield. Raises StopIteration when there are no more results""" + if self._result_cache is None: + self._result_cache = [] pos = 0 while True: upper = len(self._result_cache) @@ -168,6 +85,8 @@ class QuerySet(object): Populates the result cache with ``ITER_CHUNK_SIZE`` more entries (until the cursor is exhausted). """ + if self._result_cache is None: + self._result_cache = [] if self._has_more: try: for i in xrange(ITER_CHUNK_SIZE): @@ -175,1369 +94,43 @@ class QuerySet(object): except StopIteration: self._has_more = False - def __getitem__(self, key): - """Support skip and limit using getitem and slicing syntax. - """ - queryset = self.clone() + def no_cache(self): + """Convert to a non_caching queryset""" + if self._result_cache is not None: + raise OperationError("QuerySet already cached") + return self.clone_into(QuerySetNoCache(self._document, self._collection)) - # Slice provided - if isinstance(key, slice): - try: - queryset._cursor_obj = queryset._cursor[key] - queryset._skip, queryset._limit = key.start, key.stop - if key.start and key.stop: - queryset._limit = key.stop - key.start - except IndexError, err: - # PyMongo raises an error if key.start == key.stop, catch it, - # bin it, kill it. - start = key.start or 0 - if start >= 0 and key.stop >= 0 and key.step is None: - if start == key.stop: - queryset.limit(0) - queryset._skip = key.start - queryset._limit = key.stop - start - return queryset - raise err - # Allow further QuerySet modifications to be performed - return queryset - # Integer index provided - elif isinstance(key, int): - if queryset._scalar: - return queryset._get_scalar( - queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference)) - if queryset._as_pymongo: - return queryset._get_as_pymongo(queryset._cursor.next()) - return queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference) - raise AttributeError + +class QuerySetNoCache(BaseQuerySet): + """A non caching QuerySet""" + + def cache(self): + """Convert to a caching queryset""" + return self.clone_into(QuerySet(self._document, self._collection)) def __repr__(self): """Provides the string representation of the QuerySet - """ + .. versionchanged:: 0.6.13 Now doesnt modify the cursor + """ if self._iter: return '.. queryset mid-iteration ..' - self._populate_cache() - data = self._result_cache[:REPR_OUTPUT_SIZE + 1] + data = [] + for i in xrange(REPR_OUTPUT_SIZE + 1): + try: + data.append(self.next()) + except StopIteration: + break if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." + + self.rewind() return repr(data) - # Core functions - - def all(self): - """Returns all documents.""" - return self.__call__() - - def filter(self, *q_objs, **query): - """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` - """ - return self.__call__(*q_objs, **query) - - def get(self, *q_objs, **query): - """Retrieve the the matching object raising - :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` exception if multiple results - and :class:`~mongoengine.queryset.DoesNotExist` or - `DocumentName.DoesNotExist` if no results are found. - - .. versionadded:: 0.3 - """ - queryset = self.clone() - queryset = queryset.limit(2) - queryset = queryset.filter(*q_objs, **query) - - try: - result = queryset.next() - except StopIteration: - msg = ("%s matching query does not exist." - % queryset._document._class_name) - raise queryset._document.DoesNotExist(msg) - try: - queryset.next() - except StopIteration: - return result - + def __iter__(self): + queryset = self + if queryset._iter: + queryset = self.clone() queryset.rewind() - message = u'%d items returned, instead of 1' % queryset.count() - raise queryset._document.MultipleObjectsReturned(message) - - def create(self, **kwargs): - """Create new object. Returns the saved object instance. - - .. versionadded:: 0.4 - """ - return self._document(**kwargs).save() - - def get_or_create(self, write_concern=None, auto_save=True, - *q_objs, **query): - """Retrieve unique object or create, if it doesn't exist. Returns a - tuple of ``(object, created)``, where ``object`` is the retrieved or - created object and ``created`` is a boolean specifying whether a new - object was created. Raises - :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` if multiple results are found. - A new document will be created if the document doesn't exists; a - dictionary of default values for the new document may be provided as a - keyword argument called :attr:`defaults`. - - .. note:: This requires two separate operations and therefore a - race condition exists. Because there are no transactions in - mongoDB other approaches should be investigated, to ensure you - don't accidently duplicate data when using this method. This is - now scheduled to be removed before 1.0 - - :param write_concern: optional extra keyword arguments used if we - have to create a new document. - Passes any write_concern onto :meth:`~mongoengine.Document.save` - - :param auto_save: if the object is to be saved automatically if - not found. - - .. deprecated:: 0.8 - .. versionchanged:: 0.6 - added `auto_save` - .. versionadded:: 0.3 - """ - msg = ("get_or_create is scheduled to be deprecated. The approach is " - "flawed without transactions. Upserts should be preferred.") - warnings.warn(msg, DeprecationWarning) - - defaults = query.get('defaults', {}) - if 'defaults' in query: - del query['defaults'] - - try: - doc = self.get(*q_objs, **query) - return doc, False - except self._document.DoesNotExist: - query.update(defaults) - doc = self._document(**query) - - if auto_save: - doc.save(write_concern=write_concern) - return doc, True - - def first(self): - """Retrieve the first object matching the query. - """ - queryset = self.clone() - try: - result = queryset[0] - except IndexError: - result = None - return result - - def insert(self, doc_or_docs, load_bulk=True, write_concern=None): - """bulk insert documents - - :param docs_or_doc: a document or list of documents to be inserted - :param load_bulk (optional): If True returns the list of document - instances - :param write_concern: Extra keyword arguments are passed down to - :meth:`~pymongo.collection.Collection.insert` - which will be used as options for the resultant - ``getLastError`` command. For example, - ``insert(..., {w: 2, fsync: True})`` will wait until at least - two servers have recorded the write and will force an fsync on - each server being written to. - - By default returns document instances, set ``load_bulk`` to False to - return just ``ObjectIds`` - - .. versionadded:: 0.5 - """ - Document = _import_class('Document') - - if write_concern is None: - write_concern = {} - - docs = doc_or_docs - return_one = False - if isinstance(docs, Document) or issubclass(docs.__class__, Document): - return_one = True - docs = [docs] - - raw = [] - for doc in docs: - if not isinstance(doc, self._document): - msg = ("Some documents inserted aren't instances of %s" - % str(self._document)) - raise OperationError(msg) - if doc.pk and not doc._created: - msg = "Some documents have ObjectIds use doc.update() instead" - raise OperationError(msg) - raw.append(doc.to_mongo()) - - signals.pre_bulk_insert.send(self._document, documents=docs) - try: - ids = self._collection.insert(raw, **write_concern) - except pymongo.errors.OperationFailure, err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', unicode(err)): - # E11000 - duplicate key error index - # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % unicode(err)) - raise OperationError(message % unicode(err)) - - if not load_bulk: - signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False) - return return_one and ids[0] or ids - - documents = self.in_bulk(ids) - results = [] - for obj_id in ids: - results.append(documents.get(obj_id)) - signals.post_bulk_insert.send( - self._document, documents=results, loaded=True) - return return_one and results[0] or results - - def count(self, with_limit_and_skip=True): - """Count the selected elements in the query. - - :param with_limit_and_skip (optional): take any :meth:`limit` or - :meth:`skip` that has been applied to this cursor into account when - getting the count - """ - if self._limit == 0: - return 0 - if with_limit_and_skip and self._len is not None: - return self._len - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) - if with_limit_and_skip: - self._len = count - return count - - def delete(self, write_concern=None, _from_doc_delete=False): - """Delete the documents matched by the query. - - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param _from_doc_delete: True when called from document delete therefore - signals will have been triggered so don't loop. - """ - queryset = self.clone() - doc = queryset._document - - if write_concern is None: - write_concern = {} - - # Handle deletes where skips or limits have been applied or - # there is an untriggered delete signal - has_delete_signal = signals.signals_available and ( - signals.pre_delete.has_receivers_for(self._document) or - signals.post_delete.has_receivers_for(self._document)) - - call_document_delete = (queryset._skip or queryset._limit or - has_delete_signal) and not _from_doc_delete - - if call_document_delete: - for doc in queryset: - doc.delete(write_concern=write_concern) - return - - delete_rules = doc._meta.get('delete_rules') or {} - # Check for DENY rules before actually deleting/nullifying any other - # references - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects( - **{field_name + '__in': self}).count() > 0: - msg = ("Could not delete document (%s.%s refers to it)" - % (document_cls.__name__, field_name)) - raise OperationError(msg) - - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == CASCADE: - ref_q = document_cls.objects(**{field_name + '__in': self}) - ref_q_count = ref_q.count() - if (doc != document_cls and ref_q_count > 0 - or (doc == document_cls and ref_q_count > 0)): - ref_q.delete(write_concern=write_concern) - elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, **{'unset__%s' % field_name: 1}) - elif rule == PULL: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'pull_all__%s' % field_name: self}) - - queryset._collection.remove(queryset._query, write_concern=write_concern) - - def update(self, upsert=False, multi=True, write_concern=None, - full_result=False, **update): - """Perform an atomic update on the fields matched by the query. - - :param upsert: Any existing document with that "_id" is overwritten. - :param multi: Update multiple documents. - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param full_result: Return the full result rather than just the number - updated. - :param update: Django-style update keyword arguments - - .. versionadded:: 0.2 - """ - if not update and not upsert: - raise OperationError("No update parameters, would remove data") - - if write_concern is None: - write_concern = {} - - queryset = self.clone() - query = queryset._query - update = transform.update(queryset._document, **update) - - # If doing an atomic upsert on an inheritable class - # then ensure we add _cls to the update operation - if upsert and '_cls' in query: - if '$set' in update: - update["$set"]["_cls"] = queryset._document._class_name - else: - update["$set"] = {"_cls": queryset._document._class_name} - try: - result = queryset._collection.update(query, update, multi=multi, - upsert=upsert, **write_concern) - if full_result: - return result - elif result: - return result['n'] - except pymongo.errors.OperationFailure, err: - if unicode(err) == u'multi not coded yet': - message = u'update() method requires MongoDB 1.1.3+' - raise OperationError(message) - raise OperationError(u'Update failed (%s)' % unicode(err)) - - def update_one(self, upsert=False, write_concern=None, **update): - """Perform an atomic update on first field matched by the query. - - :param upsert: Any existing document with that "_id" is overwritten. - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param update: Django-style update keyword arguments - - .. versionadded:: 0.2 - """ - return self.update( - upsert=upsert, multi=False, write_concern=write_concern, **update) - - def with_id(self, object_id): - """Retrieve the object matching the id provided. Uses `object_id` only - and raises InvalidQueryError if a filter has been applied. Returns - `None` if no document exists with that id. - - :param object_id: the value for the id of the document to look up - - .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set - """ - queryset = self.clone() - if not queryset._query_obj.empty: - msg = "Cannot use a filter whilst using `with_id`" - raise InvalidQueryError(msg) - return queryset.filter(pk=object_id).first() - - def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. - - :param object_ids: a list or tuple of ``ObjectId``\ s - :rtype: dict of ObjectIds as keys and collection-specific - Document subclasses as values. - - .. versionadded:: 0.3 - """ - doc_map = {} - - docs = self._collection.find({'_id': {'$in': object_ids}}, - **self._cursor_args) - if self._scalar: - for doc in docs: - doc_map[doc['_id']] = self._get_scalar( - self._document._from_son(doc)) - elif self._as_pymongo: - for doc in docs: - doc_map[doc['_id']] = self._get_as_pymongo(doc) - else: - for doc in docs: - doc_map[doc['_id']] = self._document._from_son(doc) - - return doc_map - - def none(self): - """Helper that just returns a list""" - queryset = self.clone() - queryset._none = True return queryset - - def no_sub_classes(self): - """ - Only return instances of this document and not any inherited documents - """ - if self._document._meta.get('allow_inheritance') is True: - self._initial_query = {"_cls": self._document._class_name} - - return self - - def clone(self): - """Creates a copy of the current - :class:`~mongoengine.queryset.QuerySet` - - .. versionadded:: 0.5 - """ - c = self.__class__(self._document, self._collection_obj) - - copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_class_check', '_slave_okay', '_read_preference', - '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', - '_limit', '_skip', '_hint', '_auto_dereference') - - for prop in copy_props: - val = getattr(self, prop) - setattr(c, prop, copy.copy(val)) - - if self._cursor_obj: - c._cursor_obj = self._cursor_obj.clone() - - return c - - def select_related(self, max_depth=1): - """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or - :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down - the number queries to mongodb. - - .. versionadded:: 0.5 - """ - # Make select related work the same for querysets - max_depth += 1 - queryset = self.clone() - return queryset._dereference(queryset, max_depth=max_depth) - - def limit(self, n): - """Limit the number of returned documents to `n`. This may also be - achieved using array-slicing syntax (e.g. ``User.objects[:5]``). - - :param n: the maximum number of objects to return - """ - queryset = self.clone() - if n == 0: - queryset._cursor.limit(1) - else: - queryset._cursor.limit(n) - queryset._limit = n - # Return self to allow chaining - return queryset - - def skip(self, n): - """Skip `n` documents before returning the results. This may also be - achieved using array-slicing syntax (e.g. ``User.objects[5:]``). - - :param n: the number of objects to skip before returning results - """ - queryset = self.clone() - queryset._cursor.skip(n) - queryset._skip = n - return queryset - - def hint(self, index=None): - """Added 'hint' support, telling Mongo the proper index to use for the - query. - - Judicious use of hints can greatly improve query performance. When - doing a query on multiple fields (at least one of which is indexed) - pass the indexed field as a hint to the query. - - Hinting will not do anything if the corresponding index does not exist. - The last hint applied to this cursor takes precedence over all others. - - .. versionadded:: 0.5 - """ - queryset = self.clone() - queryset._cursor.hint(index) - queryset._hint = index - return queryset - - def distinct(self, field): - """Return a list of distinct values for a given field. - - :param field: the field to select distinct values from - - .. note:: This is a command and won't take ordering or limit into - account. - - .. versionadded:: 0.4 - .. versionchanged:: 0.5 - Fixed handling references - .. versionchanged:: 0.6 - Improved db_field refrence handling - """ - queryset = self.clone() - try: - field = self._fields_to_dbfields([field]).pop() - finally: - return self._dereference(queryset._cursor.distinct(field), 1, - name=field, instance=self._document) - - def only(self, *fields): - """Load only a subset of this document's fields. :: - - post = BlogPost.objects(...).only("title", "author.name") - - .. note :: `only()` is chainable and will perform a union :: - So with the following it will fetch both: `title` and `author.name`:: - - post = BlogPost.objects.only("title").only("author.name") - - :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any - field filters. - - :param fields: fields to include - - .. versionadded:: 0.3 - .. versionchanged:: 0.5 - Added subfield support - """ - fields = dict([(f, QueryFieldList.ONLY) for f in fields]) - return self.fields(True, **fields) - - def exclude(self, *fields): - """Opposite to .only(), exclude some document's fields. :: - - post = BlogPost.objects(...).exclude("comments") - - .. note :: `exclude()` is chainable and will perform a union :: - So with the following it will exclude both: `title` and `author.name`:: - - post = BlogPost.objects.exclude("title").exclude("author.name") - - :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any - field filters. - - :param fields: fields to exclude - - .. versionadded:: 0.5 - """ - fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) - return self.fields(**fields) - - def fields(self, _only_called=False, **kwargs): - """Manipulate how you load this document's fields. Used by `.only()` - and `.exclude()` to manipulate which fields to retrieve. Fields also - allows for a greater level of control for example: - - Retrieving a Subrange of Array Elements: - - You can use the $slice operator to retrieve a subrange of elements in - an array. For example to get the first 5 comments:: - - post = BlogPost.objects(...).fields(slice__comments=5) - - :param kwargs: A dictionary identifying what to include - - .. versionadded:: 0.5 - """ - - # Check for an operator and transform to mongo-style if there is - operators = ["slice"] - cleaned_fields = [] - for key, value in kwargs.items(): - parts = key.split('__') - op = None - if parts[0] in operators: - op = parts.pop(0) - value = {'$' + op: value} - key = '.'.join(parts) - cleaned_fields.append((key, value)) - - fields = sorted(cleaned_fields, key=operator.itemgetter(1)) - queryset = self.clone() - for value, group in itertools.groupby(fields, lambda x: x[1]): - fields = [field for field, value in group] - fields = queryset._fields_to_dbfields(fields) - queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) - - return queryset - - def all_fields(self): - """Include all fields. Reset all previously calls of .only() or - .exclude(). :: - - post = BlogPost.objects.exclude("comments").all_fields() - - .. versionadded:: 0.5 - """ - queryset = self.clone() - queryset._loaded_fields = QueryFieldList( - always_include=queryset._loaded_fields.always_include) - return queryset - - def order_by(self, *keys): - """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The - order may be specified by prepending each of the keys by a + or a -. - Ascending order is assumed. - - :param keys: fields to order the query results by; keys may be - prefixed with **+** or **-** to determine the ordering direction - """ - queryset = self.clone() - queryset._ordering = queryset._get_order_by(keys) - return queryset - - def explain(self, format=False): - """Return an explain plan record for the - :class:`~mongoengine.queryset.QuerySet`\ 's cursor. - - :param format: format the plan before returning it - """ - plan = self._cursor.explain() - if format: - plan = pprint.pformat(plan) - return plan - - def snapshot(self, enabled): - """Enable or disable snapshot mode when querying. - - :param enabled: whether or not snapshot mode is enabled - - ..versionchanged:: 0.5 - made chainable - """ - queryset = self.clone() - queryset._snapshot = enabled - return queryset - - def timeout(self, enabled): - """Enable or disable the default mongod timeout when querying. - - :param enabled: whether or not the timeout is used - - ..versionchanged:: 0.5 - made chainable - """ - queryset = self.clone() - queryset._timeout = enabled - return queryset - - def slave_okay(self, enabled): - """Enable or disable the slave_okay when querying. - - :param enabled: whether or not the slave_okay is enabled - """ - queryset = self.clone() - queryset._slave_okay = enabled - return queryset - - def read_preference(self, read_preference): - """Change the read_preference when querying. - - :param read_preference: override ReplicaSetConnection-level - preference. - """ - validate_read_preference('read_preference', read_preference) - queryset = self.clone() - queryset._read_preference = read_preference - return queryset - - def scalar(self, *fields): - """Instead of returning Document instances, return either a specific - value or a tuple of values in order. - - Can be used along with - :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off - dereferencing. - - .. note:: This effects all results and can be unset by calling - ``scalar`` without arguments. Calls ``only`` automatically. - - :param fields: One or more fields to return instead of a Document. - """ - queryset = self.clone() - queryset._scalar = list(fields) - - if fields: - queryset = queryset.only(*fields) - else: - queryset = queryset.all_fields() - - return queryset - - def values_list(self, *fields): - """An alias for scalar""" - return self.scalar(*fields) - - def as_pymongo(self, coerce_types=False): - """Instead of returning Document instances, return raw values from - pymongo. - - :param coerce_type: Field types (if applicable) would be use to - coerce types. - """ - queryset = self.clone() - queryset._as_pymongo = True - queryset._as_pymongo_coerce = coerce_types - return queryset - - # JSON Helpers - - def to_json(self): - """Converts a queryset to JSON""" - return json_util.dumps(self.as_pymongo()) - - def from_json(self, json_data): - """Converts json data to unsaved objects""" - son_data = json_util.loads(json_data) - return [self._document._from_son(data) for data in son_data] - - # JS functionality - - def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None): - """Perform a map/reduce query using the current query spec - and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, - it must be the last call made, as it does not return a maleable - ``QuerySet``. - - See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` - and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` - tests in ``tests.queryset.QuerySetTest`` for usage examples. - - :param map_f: map function, as :class:`~bson.code.Code` or string - :param reduce_f: reduce function, as - :class:`~bson.code.Code` or string - :param output: output collection name, if set to 'inline' will try to - use :class:`~pymongo.collection.Collection.inline_map_reduce` - This can also be a dictionary containing output options - see: http://docs.mongodb.org/manual/reference/commands/#mapReduce - :param finalize_f: finalize function, an optional function that - performs any post-reduction processing. - :param scope: values to insert into map/reduce global scope. Optional. - :param limit: number of objects from current query to provide - to map/reduce method - - Returns an iterator yielding - :class:`~mongoengine.document.MapReduceDocument`. - - .. note:: - - Map/Reduce changed in server version **>= 1.7.4**. The PyMongo - :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.11**. - - .. versionchanged:: 0.5 - - removed ``keep_temp`` keyword argument, which was only relevant - for MongoDB server versions older than 1.7.4 - - .. versionadded:: 0.3 - """ - queryset = self.clone() - - MapReduceDocument = _import_class('MapReduceDocument') - - if not hasattr(self._collection, "map_reduce"): - raise NotImplementedError("Requires MongoDB >= 1.7.1") - - map_f_scope = {} - if isinstance(map_f, Code): - map_f_scope = map_f.scope - map_f = unicode(map_f) - map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) - - reduce_f_scope = {} - if isinstance(reduce_f, Code): - reduce_f_scope = reduce_f.scope - reduce_f = unicode(reduce_f) - reduce_f_code = queryset._sub_js_fields(reduce_f) - reduce_f = Code(reduce_f_code, reduce_f_scope) - - mr_args = {'query': queryset._query} - - if finalize_f: - finalize_f_scope = {} - if isinstance(finalize_f, Code): - finalize_f_scope = finalize_f.scope - finalize_f = unicode(finalize_f) - finalize_f_code = queryset._sub_js_fields(finalize_f) - finalize_f = Code(finalize_f_code, finalize_f_scope) - mr_args['finalize'] = finalize_f - - if scope: - mr_args['scope'] = scope - - if limit: - mr_args['limit'] = limit - - if output == 'inline' and not queryset._ordering: - map_reduce_function = 'inline_map_reduce' - else: - map_reduce_function = 'map_reduce' - mr_args['out'] = output - - results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) - - if map_reduce_function == 'map_reduce': - results = results.find() - - if queryset._ordering: - results = results.sort(queryset._ordering) - - for doc in results: - yield MapReduceDocument(queryset._document, queryset._collection, - doc['_id'], doc['value']) - - def exec_js(self, code, *fields, **options): - """Execute a Javascript function on the server. A list of fields may be - provided, which will be translated to their correct names and supplied - as the arguments to the function. A few extra variables are added to - the function's scope: ``collection``, which is the name of the - collection in use; ``query``, which is an object representing the - current query; and ``options``, which is an object containing any - options specified as keyword arguments. - - As fields in MongoEngine may use different names in the database (set - using the :attr:`db_field` keyword argument to a :class:`Field` - constructor), a mechanism exists for replacing MongoEngine field names - with the database field names in Javascript code. When accessing a - field, use square-bracket notation, and prefix the MongoEngine field - name with a tilde (~). - - :param code: a string of Javascript code to execute - :param fields: fields that you will be using in your function, which - will be passed in to your function as arguments - :param options: options that you want available to the function - (accessed in Javascript through the ``options`` object) - """ - queryset = self.clone() - - code = queryset._sub_js_fields(code) - - fields = [queryset._document._translate_field_name(f) for f in fields] - collection = queryset._document._get_collection_name() - - scope = { - 'collection': collection, - 'options': options or {}, - } - - query = queryset._query - if queryset._where_clause: - query['$where'] = queryset._where_clause - - scope['query'] = query - code = Code(code, scope=scope) - - db = queryset._document._get_db() - return db.eval(code, *fields) - - def where(self, where_clause): - """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript - expression). Performs automatic field name substitution like - :meth:`mongoengine.queryset.Queryset.exec_js`. - - .. note:: When using this mode of query, the database will call your - function, or evaluate your predicate clause, for each object - in the collection. - - .. versionadded:: 0.5 - """ - queryset = self.clone() - where_clause = queryset._sub_js_fields(where_clause) - queryset._where_clause = where_clause - return queryset - - def sum(self, field): - """Sum over the values of the specified field. - - :param field: the field to sum over; use dot-notation to refer to - embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. - """ - map_func = Code(""" - function() { - function deepFind(obj, path) { - var paths = path.split('.') - , current = obj - , i; - - for (i = 0; i < paths.length; ++i) { - if (current[paths[i]] == undefined) { - return undefined; - } else { - current = current[paths[i]]; - } - } - return current; - } - - emit(1, deepFind(this, field) || 0); - } - """, scope={'field': field}) - - reduce_func = Code(""" - function(key, values) { - var sum = 0; - for (var i in values) { - sum += values[i]; - } - return sum; - } - """) - - for result in self.map_reduce(map_func, reduce_func, output='inline'): - return result.value - else: - return 0 - - def average(self, field): - """Average over the values of the specified field. - - :param field: the field to average over; use dot-notation to refer to - embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. - """ - map_func = Code(""" - function() { - function deepFind(obj, path) { - var paths = path.split('.') - , current = obj - , i; - - for (i = 0; i < paths.length; ++i) { - if (current[paths[i]] == undefined) { - return undefined; - } else { - current = current[paths[i]]; - } - } - return current; - } - - val = deepFind(this, field) - if (val !== undefined) - emit(1, {t: val || 0, c: 1}); - } - """, scope={'field': field}) - - reduce_func = Code(""" - function(key, values) { - var out = {t: 0, c: 0}; - for (var i in values) { - var value = values[i]; - out.t += value.t; - out.c += value.c; - } - return out; - } - """) - - finalize_func = Code(""" - function(key, value) { - return value.t / value.c; - } - """) - - for result in self.map_reduce(map_func, reduce_func, - finalize_f=finalize_func, output='inline'): - return result.value - else: - return 0 - - def item_frequencies(self, field, normalize=False, map_reduce=True): - """Returns a dictionary of all items present in a field across - the whole queried set of documents, and their corresponding frequency. - This is useful for generating tag clouds, or searching documents. - - .. note:: - - Can only do direct simple mappings and cannot map across - :class:`~mongoengine.fields.ReferenceField` or - :class:`~mongoengine.fields.GenericReferenceField` for more complex - counting a manual map reduce call would is required. - - If the field is a :class:`~mongoengine.fields.ListField`, the items within - each list will be counted individually. - - :param field: the field to use - :param normalize: normalize the results so they add to 1.0 - :param map_reduce: Use map_reduce over exec_js - - .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded - document lookups - """ - if map_reduce: - return self._item_frequencies_map_reduce(field, - normalize=normalize) - return self._item_frequencies_exec_js(field, normalize=normalize) - - # Iterator helpers - - def next(self): - """Wrap the result in a :class:`~mongoengine.Document` object. - """ - if self._limit == 0 or self._none: - raise StopIteration - - raw_doc = self._cursor.next() - if self._as_pymongo: - return self._get_as_pymongo(raw_doc) - doc = self._document._from_son(raw_doc, - _auto_dereference=self._auto_dereference) - if self._scalar: - return self._get_scalar(doc) - - return doc - - def rewind(self): - """Rewind the cursor to its unevaluated state. - - .. versionadded:: 0.3 - """ - self._iter = False - self._cursor.rewind() - - # Properties - - @property - def _collection(self): - """Property that returns the collection object. This allows us to - perform operations only if the collection is accessed. - """ - return self._collection_obj - - @property - def _cursor_args(self): - cursor_args = { - 'snapshot': self._snapshot, - 'timeout': self._timeout - } - if self._read_preference is not None: - cursor_args['read_preference'] = self._read_preference - else: - cursor_args['slave_okay'] = self._slave_okay - if self._loaded_fields: - cursor_args['fields'] = self._loaded_fields.as_dict() - return cursor_args - - @property - def _cursor(self): - if self._cursor_obj is None: - - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) - # Apply where clauses to cursor - if self._where_clause: - where_clause = self._sub_js_fields(self._where_clause) - self._cursor_obj.where(where_clause) - - if self._ordering: - # Apply query ordering - self._cursor_obj.sort(self._ordering) - elif self._document._meta['ordering']: - # Otherwise, apply the ordering from the document model - order = self._get_order_by(self._document._meta['ordering']) - self._cursor_obj.sort(order) - - if self._limit is not None: - self._cursor_obj.limit(self._limit) - - if self._skip is not None: - self._cursor_obj.skip(self._skip) - - if self._hint != -1: - self._cursor_obj.hint(self._hint) - - return self._cursor_obj - - def __deepcopy__(self, memo): - """Essential for chained queries with ReferenceFields involved""" - return self.clone() - - @property - def _query(self): - if self._mongo_query is None: - self._mongo_query = self._query_obj.to_query(self._document) - if self._class_check: - self._mongo_query.update(self._initial_query) - return self._mongo_query - - @property - def _dereference(self): - if not self.__dereference: - self.__dereference = _import_class('DeReference')() - return self.__dereference - - def no_dereference(self): - """Turn off any dereferencing for the results of this queryset. - """ - queryset = self.clone() - queryset._auto_dereference = False - return queryset - - # Helper Functions - - def _item_frequencies_map_reduce(self, field, normalize=False): - map_func = """ - function() { - var path = '{{~%(field)s}}'.split('.'); - var field = this; - - for (p in path) { - if (typeof field != 'undefined') - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - field.forEach(function(item) { - emit(item, 1); - }); - } else if (typeof field != 'undefined') { - emit(field, 1); - } else { - emit(null, 1); - } - } - """ % dict(field=field) - reduce_func = """ - function(key, values) { - var total = 0; - var valuesSize = values.length; - for (var i=0; i < valuesSize; i++) { - total += parseInt(values[i], 10); - } - return total; - } - """ - values = self.map_reduce(map_func, reduce_func, 'inline') - frequencies = {} - for f in values: - key = f.key - if isinstance(key, float): - if int(key) == key: - key = int(key) - frequencies[key] = int(f.value) - - if normalize: - count = sum(frequencies.values()) - frequencies = dict([(k, float(v) / count) - for k, v in frequencies.items()]) - - return frequencies - - def _item_frequencies_exec_js(self, field, normalize=False): - """Uses exec_js to execute""" - freq_func = """ - function(path) { - var path = path.split('.'); - - var total = 0.0; - db[collection].find(query).forEach(function(doc) { - var field = doc; - for (p in path) { - if (field) - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - total += field.length; - } else { - total++; - } - }); - - var frequencies = {}; - var types = {}; - var inc = 1.0; - - db[collection].find(query).forEach(function(doc) { - field = doc; - for (p in path) { - if (field) - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - field.forEach(function(item) { - frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); - }); - } else { - var item = field; - types[item] = item; - frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); - } - }); - return [total, frequencies, types]; - } - """ - total, data, types = self.exec_js(freq_func, field) - values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) - - if normalize: - values = dict([(k, float(v) / total) for k, v in values.items()]) - - frequencies = {} - for k, v in values.iteritems(): - if isinstance(k, float): - if int(k) == k: - k = int(k) - - frequencies[k] = v - - return frequencies - - def _fields_to_dbfields(self, fields): - """Translate fields paths to its db equivalents""" - ret = [] - for field in fields: - field = ".".join(f.db_field for f in - self._document._lookup_field(field.split('.'))) - ret.append(field) - return ret - - def _get_order_by(self, keys): - """Creates a list of order by fields - """ - key_list = [] - for key in keys: - if not key: - continue - direction = pymongo.ASCENDING - if key[0] == '-': - direction = pymongo.DESCENDING - if key[0] in ('-', '+'): - key = key[1:] - key = key.replace('__', '.') - try: - key = self._document._translate_field_name(key) - except: - pass - key_list.append((key, direction)) - - if self._cursor_obj: - self._cursor_obj.sort(key_list) - return key_list - - def _get_scalar(self, doc): - - def lookup(obj, name): - chunks = name.split('__') - for chunk in chunks: - obj = getattr(obj, chunk) - return obj - - data = [lookup(doc, n) for n in self._scalar] - if len(data) == 1: - return data[0] - - return tuple(data) - - def _get_as_pymongo(self, row): - # Extract which fields paths we should follow if .fields(...) was - # used. If not, handle all fields. - if not getattr(self, '__as_pymongo_fields', None): - self.__as_pymongo_fields = [] - - for field in self._loaded_fields.fields - set(['_cls']): - self.__as_pymongo_fields.append(field) - while '.' in field: - field, _ = field.rsplit('.', 1) - self.__as_pymongo_fields.append(field) - - all_fields = not self.__as_pymongo_fields - - def clean(data, path=None): - path = path or '' - - if isinstance(data, dict): - new_data = {} - for key, value in data.iteritems(): - new_path = '%s.%s' % (path, key) if path else key - - if all_fields: - include_field = True - elif self._loaded_fields.value == QueryFieldList.ONLY: - include_field = new_path in self.__as_pymongo_fields - else: - include_field = new_path not in self.__as_pymongo_fields - - if include_field: - new_data[key] = clean(value, path=new_path) - data = new_data - elif isinstance(data, list): - data = [clean(d, path=path) for d in data] - else: - if self._as_pymongo_coerce: - # If we need to coerce types, we need to determine the - # type of this field and use the corresponding - # .to_python(...) - from mongoengine.fields import EmbeddedDocumentField - obj = self._document - for chunk in path.split('.'): - obj = getattr(obj, chunk, None) - if obj is None: - break - elif isinstance(obj, EmbeddedDocumentField): - obj = obj.document_type - if obj and data is not None: - data = obj.to_python(data) - return data - return clean(row) - - def _sub_js_fields(self, code): - """When fields are specified with [~fieldname] syntax, where - *fieldname* is the Python name of a field, *fieldname* will be - substituted for the MongoDB name of the field (specified using the - :attr:`name` keyword argument in a field's constructor). - """ - def field_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field - - def field_path_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return ".".join([f.db_field for f in fields]) - - code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) - return code - - # Deprecated - def ensure_index(self, **kwargs): - """Deprecated use :func:`Document.ensure_index`""" - msg = ("Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ("Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 9495a25..6e3eb9b 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3254,7 +3254,7 @@ class QuerySetTest(unittest.TestCase): User(name="Barack Obama", age=51, price=Decimal('2.22')).save() results = User.objects.only('id', 'name').as_pymongo() - self.assertEqual(results[0].keys(), ['_id', 'name']) + self.assertEqual(sorted(results[0].keys()), sorted(['_id', 'name'])) users = User.objects.only('name', 'price').as_pymongo() results = list(users) @@ -3365,6 +3365,34 @@ class QuerySetTest(unittest.TestCase): self.assertEqual("%s" % users, "[]") self.assertEqual(1, len(users._result_cache)) + def test_no_cache(self): + """Ensure you can add meta data to file""" + + class Noddy(Document): + fields = DictField() + + Noddy.drop_collection() + for i in xrange(100): + noddy = Noddy() + for j in range(20): + noddy.fields["key"+str(j)] = "value "+str(j) + noddy.save() + + docs = Noddy.objects.no_cache() + + counter = len([1 for i in docs]) + self.assertEquals(counter, 100) + + self.assertEquals(len(list(docs)), 100) + self.assertRaises(TypeError, lambda: len(docs)) + + with query_counter() as q: + self.assertEqual(q, 0) + list(docs) + self.assertEqual(q, 1) + list(docs) + self.assertEqual(q, 2) + def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] From fb0dd2c1ca6eb4a1a8fef29cce2a39497231d9e3 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 19:54:30 +0000 Subject: [PATCH 402/464] Updated changelog --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d875040..76df230 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,7 +5,7 @@ Changelog Changes in 0.8.3 ================ - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) -- Fixed sum and average mapreduce dot notation support (#375, #376) +- Fixed sum and average mapreduce dot notation support (#375, #376, #393) - Fixed as_pymongo to return the id (#386) - Document.select_related() now respects `db_alias` (#377) - Reload uses shard_key if applicable (#384) From e155e1fa8621c158b619bcbb1c5b9601f5364634 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 20:10:01 +0000 Subject: [PATCH 403/464] Add a default for previously pickled versions --- mongoengine/base/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 04b0c05..0eb63d5 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -160,7 +160,7 @@ class BaseDocument(object): '_fields_ordered', '_dynamic_fields'): if k in data: setattr(self, k, data[k]) - for k in data.get('_dynamic_fields').keys(): + for k in data.get('_dynamic_fields', SON()).keys(): setattr(self, k, data["_data"].get(k)) def __iter__(self): From d9f538170b73f53f43f3f97ae3f6b66d7c40bb90 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 21:19:11 +0000 Subject: [PATCH 404/464] Added get_proxy_object helper to filefields (#391) --- docs/changelog.rst | 1 + mongoengine/fields.py | 18 ++++++++---------- tests/fields/file_tests.py | 26 ++++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 10 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 76df230..e0f47fe 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Added get_proxy_object helper to filefields (#391) - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) - Fixed sum and average mapreduce dot notation support (#375, #376, #393) - Fixed as_pymongo to return the id (#386) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9d3a668..47554e0 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1190,7 +1190,7 @@ class FileField(BaseField): # Check if a file already exists for this model grid_file = instance._data.get(self.name) if not isinstance(grid_file, self.proxy_class): - grid_file = self.get_proxy_obj(key=key, instance=instance) + grid_file = self.get_proxy_obj(key=self.name, instance=instance) instance._data[self.name] = grid_file if not grid_file.key: @@ -1218,16 +1218,16 @@ class FileField(BaseField): instance._data[key] = value instance._mark_as_changed(key) - + def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None): if db_alias is None: db_alias = self.db_alias if collection_name is None: collection_name = self.collection_name - - return self.proxy_class(key=key, instance=instance, - db_alias=db_alias, - collection_name=collection_name) + + return self.proxy_class(key=key, instance=instance, + db_alias=db_alias, + collection_name=collection_name) def to_mongo(self, value): # Store the GridFS file id in MongoDB @@ -1261,10 +1261,8 @@ class ImageGridFsProxy(GridFSProxy): applying field properties (size, thumbnail_size) """ field = self.instance._fields[self.key] - # if the field from the instance has an attribute field - # we use that one and hope for the best. Usually only container - # fields have a field attribute. - if hasattr(field, 'field'): + # Handle nested fields + if hasattr(field, 'field') and isinstance(field.field, FileField): field = field.field try: diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index dfef9ee..d044500 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -455,5 +455,31 @@ class FileTest(unittest.TestCase): self.assertEqual(1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()) + def test_complex_field_filefield(self): + """Ensure you can add meta data to file""" + + class Animal(Document): + genus = StringField() + family = StringField() + photos = ListField(FileField()) + + Animal.drop_collection() + marmot = Animal(genus='Marmota', family='Sciuridae') + + marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk + + photos_field = marmot._fields['photos'].field + new_proxy = photos_field.get_proxy_obj('photos', marmot) + new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') + marmot_photo.close() + + marmot.photos.append(new_proxy) + marmot.save() + + marmot = Animal.objects.get() + self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') + self.assertEqual(marmot.photos[0].foo, 'bar') + self.assertEqual(marmot.photos[0].get().length, 8313) + if __name__ == '__main__': unittest.main() From f48a0b7b7d8cee37c7519bab162123a5493e17f1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 10 Jul 2013 21:30:29 +0000 Subject: [PATCH 405/464] Trying to fix travis --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 2bb5863..8c4d5e1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,8 @@ install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install python-dateutils==1.5 --no-allow-external ; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then pip install python-dateutils --no-allow-external ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From 6c599ef50678bbef18a49bedddcfe3ea87705c86 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Jul 2013 07:15:34 +0000 Subject: [PATCH 406/464] Fix edge case where _dynamic_keys stored as None (#387, #401) --- mongoengine/base/document.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 0eb63d5..cbce4ff 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -160,7 +160,8 @@ class BaseDocument(object): '_fields_ordered', '_dynamic_fields'): if k in data: setattr(self, k, data[k]) - for k in data.get('_dynamic_fields', SON()).keys(): + dynamic_fields = data.get('_dynamic_fields') or SON() + for k in dynamic_fields.keys(): setattr(self, k, data["_data"].get(k)) def __iter__(self): From d593f7e04b6bf26b576b597b2a68dfd72b400269 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Jul 2013 08:11:00 +0000 Subject: [PATCH 407/464] Fixed EmbeddedDocuments with `id` also storing `_id` (#402) --- mongoengine/base/document.py | 6 ++++-- tests/document/instance.py | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index cbce4ff..536fc2f 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -262,8 +262,10 @@ class BaseDocument(object): data[field.db_field] = value # If "_id" has not been set, then try and set it - if data["_id"] is None: - data["_id"] = self._data.get("id", None) + Document = _import_class("Document") + if isinstance(self, Document): + if data["_id"] is None: + data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') diff --git a/tests/document/instance.py b/tests/document/instance.py index e85c9d8..a61c439 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -444,6 +444,13 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), ['_cls', 'name', 'age', 'salary']) + def test_embedded_document_to_mongo_id(self): + class SubDoc(EmbeddedDocument): + id = StringField(required=True) + + sub_doc = SubDoc(id="abc") + self.assertEqual(sub_doc.to_mongo().keys(), ['id']) + def test_embedded_document(self): """Ensure that embedded documents are set up correctly. """ From 6c2c33cac8010d8658810073ba3c21843f24cb9a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Jul 2013 08:12:27 +0000 Subject: [PATCH 408/464] Add Jatin- to Authors, changelog update --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 70720f0..b8143a0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -171,3 +171,4 @@ that much better: * Michael Bartnett (https://github.com/michaelbartnett) * Alon Horev (https://github.com/alonho) * Kelvin Hammond (https://github.com/kelvinhammond) + * Jatin- (https://github.com/jatin-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e0f47fe..6ca52b2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.3 ================ +- Fixed EmbeddedDocuments with `id` also storing `_id` (#402) - Added get_proxy_object helper to filefields (#391) - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) - Fixed sum and average mapreduce dot notation support (#375, #376, #393) From 73026047e9e8be9581a61f561b8cb14cb9613fdf Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Jul 2013 09:29:06 +0000 Subject: [PATCH 409/464] Trying to fix dateutil --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8c4d5e1..092b985 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,11 +14,11 @@ env: - PYMONGO=3.2 DJANGO=1.5.1 - PYMONGO=3.3 DJANGO=1.5.1 install: + - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install python-dateutil; true; fi + - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then sudo apt-get install python3-dateutil; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install python-dateutils==1.5 --no-allow-external ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then pip install python-dateutils --no-allow-external ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - python setup.py install From 1aa2b86df31822ec527460db16422e2177e81eee Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 11 Jul 2013 09:38:59 +0000 Subject: [PATCH 410/464] travis install python-dateutil direct --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 092b985..c7e8ea3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,13 +14,12 @@ env: - PYMONGO=3.2 DJANGO=1.5.1 - PYMONGO=3.3 DJANGO=1.5.1 install: - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install python-dateutil; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then sudo apt-get install python3-dateutil; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi + - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b - python setup.py install script: - python setup.py test From 48ef176e281bba8bd973a1caa530774540d5ce61 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Jul 2013 08:41:56 +0000 Subject: [PATCH 411/464] 0.8.3 is a go --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 5bd1201..bfa35fb 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 2) +VERSION = (0, 8, 3) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 4eaba4d..512c621 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.2 +Version: 0.8.3 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From dc5512e4039f81b7773f177eb6824e63af2d513f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Jul 2013 09:01:11 +0000 Subject: [PATCH 412/464] Upgrade warning for 0.8.3 --- docs/upgrade.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index b8864b0..0051a62 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -3,7 +3,7 @@ Upgrading ######### -0.8.2 to 0.8.2 +0.8.2 to 0.8.3 ************** Minor change that may impact users: From 35f2781518e7d8d642b54010b9ec6f94ba3e44c9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 12 Jul 2013 09:11:27 +0000 Subject: [PATCH 413/464] Update changelog --- docs/_themes/nature/static/nature.css_t | 72 +++++++++++++------------ docs/changelog.rst | 5 ++ 2 files changed, 43 insertions(+), 34 deletions(-) diff --git a/docs/_themes/nature/static/nature.css_t b/docs/_themes/nature/static/nature.css_t index 03b0379..337760b 100644 --- a/docs/_themes/nature/static/nature.css_t +++ b/docs/_themes/nature/static/nature.css_t @@ -2,11 +2,15 @@ * Sphinx stylesheet -- default theme * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ - + @import url("basic.css"); - + +#changelog p.first {margin-bottom: 0 !important;} +#changelog p {margin-top: 0 !important; + margin-bottom: 0 !important;} + /* -- page layout ----------------------------------------------------------- */ - + body { font-family: Arial, sans-serif; font-size: 100%; @@ -28,18 +32,18 @@ div.bodywrapper { hr{ border: 1px solid #B1B4B6; } - + div.document { background-color: #eee; } - + div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 30px 30px; font-size: 0.8em; } - + div.footer { color: #555; width: 100%; @@ -47,12 +51,12 @@ div.footer { text-align: center; font-size: 75%; } - + div.footer a { color: #444; text-decoration: underline; } - + div.related { background-color: #6BA81E; line-height: 32px; @@ -60,11 +64,11 @@ div.related { text-shadow: 0px 1px 0 #444; font-size: 0.80em; } - + div.related a { color: #E2F3CC; } - + div.sphinxsidebar { font-size: 0.75em; line-height: 1.5em; @@ -73,7 +77,7 @@ div.sphinxsidebar { div.sphinxsidebarwrapper{ padding: 20px 0; } - + div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: Arial, sans-serif; @@ -89,30 +93,30 @@ div.sphinxsidebar h4 { div.sphinxsidebar h4{ font-size: 1.1em; } - + div.sphinxsidebar h3 a { color: #444; } - - + + div.sphinxsidebar p { color: #888; padding: 5px 20px; } - + div.sphinxsidebar p.topless { } - + div.sphinxsidebar ul { margin: 10px 20px; padding: 0; color: #000; } - + div.sphinxsidebar a { color: #444; } - + div.sphinxsidebar input { border: 1px solid #ccc; font-family: sans-serif; @@ -122,19 +126,19 @@ div.sphinxsidebar input { div.sphinxsidebar input[type=text]{ margin-left: 20px; } - + /* -- body styles ----------------------------------------------------------- */ - + a { color: #005B81; text-decoration: none; } - + a:hover { color: #E32E00; text-decoration: underline; } - + div.body h1, div.body h2, div.body h3, @@ -149,30 +153,30 @@ div.body h6 { padding: 5px 0 5px 10px; text-shadow: 0px 1px 0 white } - + div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } div.body h2 { font-size: 150%; background-color: #C8D5E3; } div.body h3 { font-size: 120%; background-color: #D8DEE3; } div.body h4 { font-size: 110%; background-color: #D8DEE3; } div.body h5 { font-size: 100%; background-color: #D8DEE3; } div.body h6 { font-size: 100%; background-color: #D8DEE3; } - + a.headerlink { color: #c60f0f; font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } - + a.headerlink:hover { background-color: #c60f0f; color: white; } - + div.body p, div.body dd, div.body li { line-height: 1.5em; } - + div.admonition p.admonition-title + p { display: inline; } @@ -185,29 +189,29 @@ div.note { background-color: #eee; border: 1px solid #ccc; } - + div.seealso { background-color: #ffc; border: 1px solid #ff6; } - + div.topic { background-color: #eee; } - + div.warning { background-color: #ffe4e4; border: 1px solid #f66; } - + p.admonition-title { display: inline; } - + p.admonition-title:after { content: ":"; } - + pre { padding: 10px; background-color: White; @@ -219,7 +223,7 @@ pre { -webkit-box-shadow: 1px 1px 1px #d8d8d8; -moz-box-shadow: 1px 1px 1px #d8d8d8; } - + tt { background-color: #ecf0f3; color: #222; diff --git a/docs/changelog.rst b/docs/changelog.rst index 6ca52b2..ee92d47 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,6 +12,9 @@ Changes in 0.8.3 - Document.select_related() now respects `db_alias` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) + + **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 + - Fixed pickling dynamic documents `_dynamic_fields` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) @@ -20,6 +23,8 @@ Changes in 0.8.3 - Fixed queryset.get() respecting no_dereference (#373) - Added full_result kwarg to update (#380) + + Changes in 0.8.2 ================ - Added compare_indexes helper (#361) From bcf83ec76190524a5508f17d462b9b360584c07e Mon Sep 17 00:00:00 2001 From: bool-dev Date: Thu, 18 Jul 2013 09:17:28 +0530 Subject: [PATCH 414/464] Corrected spelling mistakes, some grammar, and UUID/DecimalField error in upgrade.rst --- docs/upgrade.rst | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 0051a62..a1fccea 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -16,8 +16,8 @@ fields. Previously they were stored alphabetically. ********** There have been numerous backwards breaking changes in 0.8. The reasons for -these are ensure that MongoEngine has sane defaults going forward and -performs the best it can out the box. Where possible there have been +these are to ensure that MongoEngine has sane defaults going forward and that it +performs the best it can out of the box. Where possible there have been FutureWarnings to help get you ready for the change, but that hasn't been possible for the whole of the release. @@ -71,7 +71,7 @@ inherited classes like so: :: Document Definition ------------------- -The default for inheritance has changed - its now off by default and +The default for inheritance has changed - it is now off by default and :attr:`_cls` will not be stored automatically with the class. So if you extend your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` you will need to declare :attr:`allow_inheritance` in the meta data like so: :: @@ -81,7 +81,7 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: :: meta = {'allow_inheritance': True} -Previously, if you had data the database that wasn't defined in the Document +Previously, if you had data in the database that wasn't defined in the Document definition, it would set it as an attribute on the document. This is no longer the case and the data is set only in the ``document._data`` dictionary: :: @@ -102,8 +102,8 @@ the case and the data is set only in the ``document._data`` dictionary: :: AttributeError: 'Animal' object has no attribute 'size' The Document class has introduced a reserved function `clean()`, which will be -called before saving the document. If your document class happen to have a method -with the same name, please try rename it. +called before saving the document. If your document class happens to have a method +with the same name, please try to rename it. def clean(self): pass @@ -111,7 +111,7 @@ with the same name, please try rename it. ReferenceField -------------- -ReferenceFields now store ObjectId's by default - this is more efficient than +ReferenceFields now store ObjectIds by default - this is more efficient than DBRefs as we already know what Document types they reference:: # Old code @@ -157,7 +157,7 @@ UUIDFields now default to storing binary values:: class Animal(Document): uuid = UUIDField(binary=False) -To migrate all the uuid's you need to touch each object and mark it as dirty +To migrate all the uuids you need to touch each object and mark it as dirty eg:: # Doc definition @@ -175,7 +175,7 @@ eg:: DecimalField ------------ -DecimalField now store floats - previous it was storing strings and that +DecimalFields now store floats - previously it was storing strings and that made it impossible to do comparisons when querying correctly.:: # Old code @@ -186,7 +186,7 @@ made it impossible to do comparisons when querying correctly.:: class Person(Document): balance = DecimalField(force_string=True) -To migrate all the uuid's you need to touch each object and mark it as dirty +To migrate all the DecimalFields you need to touch each object and mark it as dirty eg:: # Doc definition @@ -198,7 +198,7 @@ eg:: p._mark_as_changed('balance') p.save() -.. note:: DecimalField's have also been improved with the addition of precision +.. note:: DecimalFields have also been improved with the addition of precision and rounding. See :class:`~mongoengine.fields.DecimalField` for more information. `An example test migration for DecimalFields is available on github @@ -207,7 +207,7 @@ eg:: Cascading Saves --------------- To improve performance document saves will no longer automatically cascade. -Any changes to a Documents references will either have to be saved manually or +Any changes to a Document's references will either have to be saved manually or you will have to explicitly tell it to cascade on save:: # At the class level: @@ -249,7 +249,7 @@ update your code like so: :: # Update example a) assign queryset after a change: mammals = Animal.objects(type="mammal") - carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so fitler can be applied + carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied [m for m in carnivores] # This will return all carnivores # Update example b) chain the queryset: @@ -276,7 +276,7 @@ queryset you should upgrade to use count:: .only() now inline with .exclude() ---------------------------------- -The behaviour of `.only()` was highly ambious, now it works in the mirror fashion +The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion to `.exclude()`. Chaining `.only()` calls will increase the fields required:: # Old code @@ -440,7 +440,7 @@ main areas of changed are: choices in fields, map_reduce and collection names. Choice options: =============== -Are now expected to be an iterable of tuples, with the first element in each +Are now expected to be an iterable of tuples, with the first element in each tuple being the actual value to be stored. The second element is the human-readable name for the option. @@ -462,8 +462,8 @@ such the following have been changed: Default collection naming ========================= -Previously it was just lowercase, its now much more pythonic and readable as -its lowercase and underscores, previously :: +Previously it was just lowercase, it's now much more pythonic and readable as +it's lowercase and underscores, previously :: class MyAceDocument(Document): pass @@ -530,5 +530,5 @@ Alternatively, you can rename your collections eg :: mongodb 1.8 > 2.0 + =================== -Its been reported that indexes may need to be recreated to the newer version of indexes. +It's been reported that indexes may need to be recreated to the newer version of indexes. To do this drop indexes and call ``ensure_indexes`` on each model. From 80b3df89537a50cc9c6f91163306185a0816c386 Mon Sep 17 00:00:00 2001 From: Thom Knowles Date: Mon, 22 Jul 2013 20:07:57 -0400 Subject: [PATCH 415/464] dereference instance not thread-safe --- AUTHORS | 1 + mongoengine/base/fields.py | 16 +++++----------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/AUTHORS b/AUTHORS index b8143a0..69a87a2 100644 --- a/AUTHORS +++ b/AUTHORS @@ -172,3 +172,4 @@ that much better: * Alon Horev (https://github.com/alonho) * Kelvin Hammond (https://github.com/kelvinhammond) * Jatin- (https://github.com/jatin-) + * Thom Knowles (https://github.com/fleat) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index eda9b3c..c6abd02 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -186,7 +186,6 @@ class ComplexBaseField(BaseField): """ field = None - __dereference = False def __get__(self, instance, owner): """Descriptor to automatically dereference references. @@ -201,9 +200,11 @@ class ComplexBaseField(BaseField): (self.field is None or isinstance(self.field, (GenericReferenceField, ReferenceField)))) + _dereference = _import_class("DeReference")() + self._auto_dereference = instance._fields[self.name]._auto_dereference - if not self.__dereference and instance._initialised and dereference: - instance._data[self.name] = self._dereference( + if instance._initialised and dereference: + instance._data[self.name] = _dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name ) @@ -222,7 +223,7 @@ class ComplexBaseField(BaseField): if (self._auto_dereference and instance._initialised and isinstance(value, (BaseList, BaseDict)) and not value._dereferenced): - value = self._dereference( + value = _dereference( value, max_depth=1, instance=instance, name=self.name ) value._dereferenced = True @@ -382,13 +383,6 @@ class ComplexBaseField(BaseField): owner_document = property(_get_owner_document, _set_owner_document) - @property - def _dereference(self,): - if not self.__dereference: - DeReference = _import_class("DeReference") - self.__dereference = DeReference() # Cached - return self.__dereference - class ObjectIdField(BaseField): """A field wrapper around MongoDB's ObjectIds. From d92ed04538e7dcf0e94f87e9c3272c0f8a3c1ee0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Jul 2013 08:13:52 +0000 Subject: [PATCH 416/464] Docs update #406 --- mongoengine/queryset/queryset.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 9db98a7..9cfb1b6 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -95,7 +95,10 @@ class QuerySet(BaseQuerySet): self._has_more = False def no_cache(self): - """Convert to a non_caching queryset""" + """Convert to a non_caching queryset + + .. versionadded:: 0.8.3 Convert to non caching queryset + """ if self._result_cache is not None: raise OperationError("QuerySet already cached") return self.clone_into(QuerySetNoCache(self._document, self._collection)) @@ -105,7 +108,10 @@ class QuerySetNoCache(BaseQuerySet): """A non caching QuerySet""" def cache(self): - """Convert to a caching queryset""" + """Convert to a caching queryset + + .. versionadded:: 0.8.3 Convert to caching queryset + """ return self.clone_into(QuerySet(self._document, self._collection)) def __repr__(self): From a458d5a176afc645e35ef02f3103c597e728e849 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 23 Jul 2013 08:16:06 +0000 Subject: [PATCH 417/464] Docs update #406 --- docs/guide/querying.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 5fd0360..127e479 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -17,8 +17,8 @@ fetch documents from the database:: As of MongoEngine 0.8 the querysets utilise a local cache. So iterating it multiple times will only cause a single query. If this is not the - desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` to - return a non-caching queryset. + desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` + (version **0.8.3+**) to return a non-caching queryset. Filtering queries ================= From dae9e662a57ed6c4e73a1aaa517b6d17a7bb9fcc Mon Sep 17 00:00:00 2001 From: Paul Uithol Date: Thu, 25 Jul 2013 14:30:20 +0200 Subject: [PATCH 418/464] Create test case for failing saves (wrong delta) with dbref=False --- tests/document/delta.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/tests/document/delta.py b/tests/document/delta.py index 3656d9e..a5302f1 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -313,17 +313,17 @@ class DeltaTest(unittest.TestCase): self.circular_reference_deltas_2(DynamicDocument, Document) self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) - def circular_reference_deltas_2(self, DocClass1, DocClass2): + def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): class Person(DocClass1): name = StringField() - owns = ListField(ReferenceField('Organization')) - employer = ReferenceField('Organization') + owns = ListField(ReferenceField('Organization', dbref=dbref)) + employer = ReferenceField('Organization', dbref=dbref) class Organization(DocClass2): name = StringField() - owner = ReferenceField('Person') - employees = ListField(ReferenceField('Person')) + owner = ReferenceField('Person', dbref=dbref) + employees = ListField(ReferenceField('Person', dbref=dbref)) Person.drop_collection() Organization.drop_collection() @@ -355,6 +355,8 @@ class DeltaTest(unittest.TestCase): self.assertEqual(o.owner, p) self.assertEqual(e.employer, o) + return person, organization, employee + def test_delta_db_field(self): self.delta_db_field(Document) self.delta_db_field(DynamicDocument) @@ -686,6 +688,19 @@ class DeltaTest(unittest.TestCase): self.assertEqual(doc._get_changed_fields(), ['list_field']) self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + def test_delta_with_dbref_true(self): + person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) + employee.name = 'test' + changed = organization._get_changed_fields() + delta = organization._delta() + organization.save() + + def test_delta_with_dbref_false(self): + person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) + employee.name = 'test' + changed = organization._get_changed_fields() + delta = organization._delta() + organization.save() if __name__ == '__main__': unittest.main() From 2ad5ffbda215f71f91261fa1cac3ebfaa978f904 Mon Sep 17 00:00:00 2001 From: Paul Uithol Date: Thu, 25 Jul 2013 14:51:09 +0200 Subject: [PATCH 419/464] Add asserts to `test_delta_with_dbref_*`, instead of relying on exceptions --- tests/document/delta.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/tests/document/delta.py b/tests/document/delta.py index a5302f1..fae65c9 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -691,15 +691,25 @@ class DeltaTest(unittest.TestCase): def test_delta_with_dbref_true(self): person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) employee.name = 'test' - changed = organization._get_changed_fields() - delta = organization._delta() + + self.assertEqual(organization._get_changed_fields(), ['employees.0.name']) + + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertIn('employees.0', updates) + organization.save() def test_delta_with_dbref_false(self): person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) employee.name = 'test' - changed = organization._get_changed_fields() - delta = organization._delta() + + self.assertEqual(organization._get_changed_fields(), ['employees.0.name']) + + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertIn('employees.0', updates) + organization.save() if __name__ == '__main__': From e27439be6adf4326177e7ff1530047c22a8e2831 Mon Sep 17 00:00:00 2001 From: Paul Uithol Date: Thu, 25 Jul 2013 14:52:03 +0200 Subject: [PATCH 420/464] Fix `BaseDocument._delta` when working with plain ObjectIds instead of DBRefs --- mongoengine/base/document.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 536fc2f..258b3f2 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -4,7 +4,7 @@ import numbers from functools import partial import pymongo -from bson import json_util +from bson import json_util, ObjectId from bson.dbref import DBRef from bson.son import SON @@ -454,7 +454,7 @@ class BaseDocument(object): d = doc new_path = [] for p in parts: - if isinstance(d, DBRef): + if isinstance(d, (ObjectId, DBRef)): break elif isinstance(d, list) and p.isdigit(): d = d[int(p)] From d143e50238c304c6aca9ce9142c59707c24e4572 Mon Sep 17 00:00:00 2001 From: Paul Uithol Date: Thu, 25 Jul 2013 15:34:58 +0200 Subject: [PATCH 421/464] Replace `assertIn` with an `assertTrue`; apparently missing in Python 2.6 --- tests/document/delta.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/document/delta.py b/tests/document/delta.py index fae65c9..c6efc02 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -696,7 +696,7 @@ class DeltaTest(unittest.TestCase): updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertIn('employees.0', updates) + self.assertTrue('employees.0' in updates) organization.save() @@ -708,7 +708,7 @@ class DeltaTest(unittest.TestCase): updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertIn('employees.0', updates) + self.assertTrue('employees.0' in updates) organization.save() From 67f43b2aad761e7d04908742c14441282f20d0db Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Jul 2013 15:29:48 +0000 Subject: [PATCH 422/464] Allow args and kwargs to be passed through to_json (#420) --- docs/changelog.rst | 4 ++++ mongoengine/base/document.py | 4 ++-- mongoengine/queryset/base.py | 4 ++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ee92d47..a53d6df 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,10 @@ Changelog ========= +Changes in 0.8.4 +================ +- Allow args and kwargs to be passed through to_json (#420) + Changes in 0.8.3 ================ - Fixed EmbeddedDocuments with `id` also storing `_id` (#402) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 536fc2f..e8232a0 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -321,9 +321,9 @@ class BaseDocument(object): message = "ValidationError (%s:%s) " % (self._class_name, pk) raise ValidationError(message, errors=errors) - def to_json(self): + def to_json(self, *args, **kwargs): """Converts a document to JSON""" - return json_util.dumps(self.to_mongo()) + return json_util.dumps(self.to_mongo(), *args, **kwargs) @classmethod def from_json(cls, json_data): diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index d3bb4c4..e88feb3 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -827,9 +827,9 @@ class BaseQuerySet(object): # JSON Helpers - def to_json(self): + def to_json(self, *args, **kwargs): """Converts a queryset to JSON""" - return json_util.dumps(self.as_pymongo()) + return json_util.dumps(self.as_pymongo(), *args, **kwargs) def from_json(self, json_data): """Converts json data to unsaved objects""" From 7a97d42338b49aa4253c3d590390a68810ff98cd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Jul 2013 15:38:08 +0000 Subject: [PATCH 423/464] to_json test updates #420 --- tests/document/json_serialisation.py | 4 ++++ tests/queryset/queryset.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/document/json_serialisation.py b/tests/document/json_serialisation.py index dbc09d8..2b5d9a0 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/json_serialisation.py @@ -31,6 +31,10 @@ class TestJson(unittest.TestCase): doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) + doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) + expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" + self.assertEqual(doc_json, expected_json) + self.assertEqual(doc, Doc.from_json(doc.to_json())) def test_json_complex(self): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index c56b31e..0ec41fa 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3327,7 +3327,7 @@ class QuerySetTest(unittest.TestCase): Doc(string="Bye", embedded_field=Embedded(string="Bye")).save() Doc().save() - json_data = Doc.objects.to_json() + json_data = Doc.objects.to_json(sort_keys=True, separators=(',', ':')) doc_objects = list(Doc.objects) self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) From 93a2adb3e6b8d4cac791659e0f3154df8a752904 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Jul 2013 15:43:54 +0000 Subject: [PATCH 424/464] Updating changelog and authors #417 --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index b8143a0..e9f6ad9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -172,3 +172,4 @@ that much better: * Alon Horev (https://github.com/alonho) * Kelvin Hammond (https://github.com/kelvinhammond) * Jatin- (https://github.com/jatin-) + * Paul Uithol (https://github.com/PaulUithol) diff --git a/docs/changelog.rst b/docs/changelog.rst index a53d6df..b9c74f8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed _delta including referenced fields when dbref=False (#417) - Allow args and kwargs to be passed through to_json (#420) Changes in 0.8.3 From 1e4d48d371e2920dd3397bb20b2f6f1456ed1566 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 29 Jul 2013 17:22:24 +0000 Subject: [PATCH 425/464] Don't follow references in _get_changed_fields (#422, #417) A better fix so we dont follow down a references rabbit hole. --- docs/changelog.rst | 2 +- mongoengine/base/document.py | 30 ++++++++++++++++++------------ tests/document/delta.py | 30 ++++++++++++++++-------------- 3 files changed, 35 insertions(+), 27 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b9c74f8..9112b2b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,7 @@ Changelog Changes in 0.8.4 ================ -- Fixed _delta including referenced fields when dbref=False (#417) +- Don't follow references in _get_changed_fields (#422, #417) - Allow args and kwargs to be passed through to_json (#420) Changes in 0.8.3 diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index f1c1d55..80111f7 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -395,6 +395,7 @@ class BaseDocument(object): """ EmbeddedDocument = _import_class("EmbeddedDocument") DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") + ReferenceField = _import_class("ReferenceField") _changed_fields = [] _changed_fields += getattr(self, '_changed_fields', []) @@ -405,31 +406,36 @@ class BaseDocument(object): inspected.add(self.id) for field_name in self._fields_ordered: - db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name - field = self._data.get(field_name, None) - if hasattr(field, 'id'): - if field.id in inspected: - continue - inspected.add(field.id) + data = self._data.get(field_name, None) + field = self._fields.get(field_name) - if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) + if hasattr(data, 'id'): + if data.id in inspected: + continue + inspected.add(data.id) + if isinstance(field, ReferenceField): + continue + elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields): # Find all embedded fields that have been changed - changed = field._get_changed_fields(inspected) + changed = data._get_changed_fields(inspected) _changed_fields += ["%s%s" % (key, k) for k in changed if k] - elif (isinstance(field, (list, tuple, dict)) and + elif (isinstance(data, (list, tuple, dict)) and db_field_name not in _changed_fields): # Loop list / dict fields as they contain documents # Determine the iterator to use - if not hasattr(field, 'items'): - iterator = enumerate(field) + if not hasattr(data, 'items'): + iterator = enumerate(data) else: - iterator = field.iteritems() + iterator = data.iteritems() for index, value in iterator: if not hasattr(value, '_get_changed_fields'): continue + if (hasattr(field, 'field') and + isinstance(field.field, ReferenceField)): + continue list_key = "%s%s." % (key, index) changed = value._get_changed_fields(inspected) _changed_fields += ["%s%s" % (list_key, k) diff --git a/tests/document/delta.py b/tests/document/delta.py index c6efc02..b4749f3 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -328,14 +328,9 @@ class DeltaTest(unittest.TestCase): Person.drop_collection() Organization.drop_collection() - person = Person(name="owner") - person.save() - - employee = Person(name="employee") - employee.save() - - organization = Organization(name="company") - organization.save() + person = Person(name="owner").save() + employee = Person(name="employee").save() + organization = Organization(name="company").save() person.owns.append(organization) organization.owner = person @@ -692,25 +687,32 @@ class DeltaTest(unittest.TestCase): person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) employee.name = 'test' - self.assertEqual(organization._get_changed_fields(), ['employees.0.name']) + self.assertEqual(organization._get_changed_fields(), []) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertTrue('employees.0' in updates) + self.assertEqual({}, updates) - organization.save() + organization.employees.append(person) + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertTrue('employees' in updates) def test_delta_with_dbref_false(self): person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) employee.name = 'test' - self.assertEqual(organization._get_changed_fields(), ['employees.0.name']) + self.assertEqual(organization._get_changed_fields(), []) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertTrue('employees.0' in updates) + self.assertEqual({}, updates) + + organization.employees.append(person) + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertTrue('employees' in updates) - organization.save() if __name__ == '__main__': unittest.main() From 6efd6faa3f5a467012fa4ee128889104ca0ba6f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 10:30:16 +0000 Subject: [PATCH 426/464] Fixed QuerySetNoCache.count() caching (#410) --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 10 ++-------- mongoengine/queryset/queryset.py | 15 +++++++++++++++ tests/queryset/queryset.py | 21 +++++++++++++++++++++ 4 files changed, 39 insertions(+), 8 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9112b2b..e2f07bc 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed QuerySetNoCache.count() caching (#410) - Don't follow references in _get_changed_fields (#422, #417) - Allow args and kwargs to be passed through to_json (#420) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index e88feb3..a6ba49b 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -60,7 +60,6 @@ class BaseQuerySet(object): self._none = False self._as_pymongo = False self._as_pymongo_coerce = False - self._len = None # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -331,14 +330,9 @@ class BaseQuerySet(object): :meth:`skip` that has been applied to this cursor into account when getting the count """ - if self._limit == 0: + if self._limit == 0 and with_limit_and_skip: return 0 - if with_limit_and_skip and self._len is not None: - return self._len - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) - if with_limit_and_skip: - self._len = count - return count + return self._cursor.count(with_limit_and_skip=with_limit_and_skip) def delete(self, write_concern=None, _from_doc_delete=False): """Delete the documents matched by the query. diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 9cfb1b6..1437e76 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -94,6 +94,21 @@ class QuerySet(BaseQuerySet): except StopIteration: self._has_more = False + def count(self, with_limit_and_skip=True): + """Count the selected elements in the query. + + :param with_limit_and_skip (optional): take any :meth:`limit` or + :meth:`skip` that has been applied to this cursor into account when + getting the count + """ + if with_limit_and_skip is False: + return super(QuerySet, self).count(with_limit_and_skip) + + if self._len is None: + self._len = super(QuerySet, self).count(with_limit_and_skip) + + return self._len + def no_cache(self): """Convert to a non_caching queryset diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 0ec41fa..9c04c0b 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3483,6 +3483,27 @@ class QuerySetTest(unittest.TestCase): people.count() # count is cached self.assertEqual(q, 1) + def test_no_cached_queryset(self): + class Person(Document): + name = StringField() + + Person.drop_collection() + for i in xrange(100): + Person(name="No: %s" % i).save() + + with query_counter() as q: + self.assertEqual(q, 0) + people = Person.objects.no_cache() + + [x for x in people] + self.assertEqual(q, 1) + + list(people) + self.assertEqual(q, 2) + + people.count() + self.assertEqual(q, 3) + def test_cache_not_cloned(self): class User(Document): From e98c5e10bc0eefacc11aa6955db9635fabc549de Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 10:49:08 +0000 Subject: [PATCH 427/464] Fixed dereference threading issue in ComplexField.__get__ (#412) --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index e2f07bc..7cb1456 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed dereference threading issue in ComplexField.__get__ (#412) - Fixed QuerySetNoCache.count() caching (#410) - Don't follow references in _get_changed_fields (#422, #417) - Allow args and kwargs to be passed through to_json (#420) From dc310b99f94ddf365369340c074eb2a35d68c685 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 10:54:04 +0000 Subject: [PATCH 428/464] Updated docs about TTL indexes and signals (#413) --- docs/guide/defining-documents.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index a50450e..bc78a66 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -558,6 +558,11 @@ documentation for more information. A common usecase might be session data:: ] } +.. warning:: TTL indexes happen on the MongoDB server and not in the application + code, therefore no signals will be fired on document deletion. + If you need signals to be fired on deletion, then you must handle the + deletion of Documents in your application code. + Comparing Indexes ----------------- From 0c437879961008281018e572369a61d58da909d6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 11:43:52 +0000 Subject: [PATCH 429/464] Fixed indexing - turn off _cls (#414) --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 31 +++++++++++++++++++++++++++++++ mongoengine/base/document.py | 6 ++++-- mongoengine/document.py | 2 ++ tests/document/indexes.py | 19 +++++++++++++++++++ 5 files changed, 57 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7cb1456..3569132 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed indexing - turn off _cls (#414) - Fixed dereference threading issue in ComplexField.__get__ (#412) - Fixed QuerySetNoCache.count() caching (#410) - Don't follow references in _get_changed_fields (#422, #417) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index bc78a66..407fbda 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -442,6 +442,8 @@ The following example shows a :class:`Log` document that will be limited to ip_address = StringField() meta = {'max_documents': 1000, 'max_size': 2000000} +.. defining-indexes_ + Indexes ======= @@ -485,6 +487,35 @@ If a dictionary is passed then the following options are available: Inheritance adds extra fields indices see: :ref:`document-inheritance`. +Global index default options +---------------------------- + +There are a few top level defaults for all indexes that can be set:: + + class Page(Document): + title = StringField() + rating = StringField() + meta = { + 'index_options': {}, + 'index_background': True, + 'index_drop_dups': True, + 'index_cls': False + } + + +:attr:`index_options` (Optional) + Set any default index options - see the `full options list `_ + +:attr:`index_background` (Optional) + Set the default value for if an index should be indexed in the background + +:attr:`index_drop_dups` (Optional) + Set the default value for if an index should drop duplicates + +:attr:`index_cls` (Optional) + A way to turn off a specific index for _cls. + + Compound Indexes and Indexing sub documents ------------------------------------------- diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 80111f7..b9c07cf 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -629,8 +629,10 @@ class BaseDocument(object): # Check to see if we need to include _cls allow_inheritance = cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) - include_cls = allow_inheritance and not spec.get('sparse', False) - + include_cls = (allow_inheritance and not spec.get('sparse', False) and + spec.get('cls', True)) + if "cls" in spec: + spec.pop('cls') for key in spec['fields']: # If inherited spec continue if isinstance(key, (list, tuple)): diff --git a/mongoengine/document.py b/mongoengine/document.py index e331aa1..2f3a92a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -536,6 +536,8 @@ class Document(BaseDocument): def ensure_indexes(cls): """Checks the document meta data and ensures all the indexes exist. + Global defaults can be set in the meta - see :doc:`guide/defining-documents` + .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 04d5632..ccf8463 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -156,6 +156,25 @@ class IndexesTest(unittest.TestCase): self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], A._meta['index_specs']) + def test_index_no_cls(self): + """Ensure index specs are inhertited correctly""" + + class A(Document): + title = StringField() + meta = { + 'indexes': [ + {'fields': ('title',), 'cls': False}, + ], + 'allow_inheritance': True, + 'index_cls': False + } + + self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields']) + A._get_collection().drop_indexes() + A.ensure_indexes() + info = A._get_collection().index_information() + self.assertEqual(len(info.keys()), 2) + def test_build_index_spec_is_not_destructive(self): class MyDoc(Document): From 5e70e1bcb28c60520964e683b60ed1c60ef0f429 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 13:17:38 +0000 Subject: [PATCH 430/464] Update transform to handle docs erroneously passed to unset (#416) --- docs/changelog.rst | 1 + mongoengine/queryset/transform.py | 4 +++- tests/queryset/transform.py | 25 +++++++++++++++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3569132..8199e03 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Update transform to handle docs erroneously passed to unset (#416) - Fixed indexing - turn off _cls (#414) - Fixed dereference threading issue in ComplexField.__get__ (#412) - Fixed QuerySetNoCache.count() caching (#410) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 352774f..e0a7d3c 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -203,11 +203,13 @@ def update(_doc_cls=None, **update): value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): value = [field.prepare_query_value(op, v) for v in value] - elif op == 'addToSet': + elif op in ('addToSet', 'setOnInsert'): if isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) + elif op == "unset": + value = 1 if match: match = '$' + match diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 7886965..d2e8b78 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -31,6 +31,31 @@ class TransformTest(unittest.TestCase): self.assertEqual(transform.query(name__exists=True), {'name': {'$exists': True}}) + def test_transform_update(self): + class DicDoc(Document): + dictField = DictField() + + class Doc(Document): + pass + + DicDoc.drop_collection() + Doc.drop_collection() + + doc = Doc().save() + dic_doc = DicDoc().save() + + for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): + update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) + self.assertTrue(isinstance(update[v]["dictField.test"], dict)) + + # Update special cases + update = transform.update(DicDoc, unset__dictField__test=doc) + self.assertEqual(update["$unset"]["dictField.test"], 1) + + update = transform.update(DicDoc, pull__dictField__test=doc) + self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) + + def test_query_field_name(self): """Ensure that the correct field name is used when querying. """ From a57d9a9303a8487cbc4417dceb5c61012f9a88b9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 13:28:05 +0000 Subject: [PATCH 431/464] Added regression test (#418) --- tests/queryset/field_list.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 2bdfce1..f981c12 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -162,6 +162,10 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, person.age) + obj = Person.objects.only(*('id', 'name',)).get() + self.assertEqual(obj.name, person.name) + self.assertEqual(obj.age, None) + # Check polymorphism still works class Employee(self.Person): salary = IntField(db_field='wage') From b4777f7f4f1e2ee5d7cc8e1102421d87540a69f5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 30 Jul 2013 15:04:52 +0000 Subject: [PATCH 432/464] Fix test --- tests/queryset/field_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index f981c12..a18e167 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -162,7 +162,7 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, person.age) - obj = Person.objects.only(*('id', 'name',)).get() + obj = self.Person.objects.only(*('id', 'name',)).get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, None) From c17f94422fd6ac388dbdf1bc60139525bfcbd018 Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Tue, 30 Jul 2013 20:43:21 +0200 Subject: [PATCH 433/464] Add get_user_document and improve mongo_auth module * Added a get_user_document() methot to access the actual Document class used for authentication. * Clarified the docstring on MongoUser to prevent its use when the user Document class should be used. * Removed the masking of exceptions when loading the user document class. --- mongoengine/django/mongo_auth/models.py | 42 ++++++++++++++++++------- 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py index 3529d8e..7179718 100644 --- a/mongoengine/django/mongo_auth/models.py +++ b/mongoengine/django/mongo_auth/models.py @@ -6,10 +6,29 @@ from django.utils.importlib import import_module from django.utils.translation import ugettext_lazy as _ +__all__ = ( + 'get_user_document', +) + + MONGOENGINE_USER_DOCUMENT = getattr( settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') +def get_user_document(self): + """Get the user docuemnt class user for authentcation. + + This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which + defaults to `mongoengine.django.auth.User`. + + """ + + name = MONGOENGINE_USER_DOCUMENT + dot = name.rindex('.') + module = import_module(name[:dot]) + return getattr(module, name[dot + 1:]) + + class MongoUserManager(UserManager): """A User manager wich allows the use of MongoEngine documents in Django. @@ -44,7 +63,7 @@ class MongoUserManager(UserManager): def contribute_to_class(self, model, name): super(MongoUserManager, self).contribute_to_class(model, name) self.dj_model = self.model - self.model = self._get_user_document() + self.model = get_user_document() self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD username = models.CharField(_('username'), max_length=30, unique=True) @@ -55,16 +74,6 @@ class MongoUserManager(UserManager): field = models.CharField(_(name), max_length=30) field.contribute_to_class(self.dj_model, name) - def _get_user_document(self): - try: - name = MONGOENGINE_USER_DOCUMENT - dot = name.rindex('.') - module = import_module(name[:dot]) - return getattr(module, name[dot + 1:]) - except ImportError: - raise ImproperlyConfigured("Error importing %s, please check " - "settings.MONGOENGINE_USER_DOCUMENT" - % name) def get(self, *args, **kwargs): try: @@ -85,5 +94,14 @@ class MongoUserManager(UserManager): class MongoUser(models.Model): - objects = MongoUserManager() + """"Dummy user model for Django. + MongoUser is used to replace Django's UserManager with MongoUserManager. + The actual user document class is mongoengine.django.auth.User or any + other document class specified in MONGOENGINE_USER_DOCUMENT. + + To get the user document class, use `get_user_document()`. + + """ + + objects = MongoUserManager() From a69db231cc7696bb78795caeef2e4d2b3c034148 Mon Sep 17 00:00:00 2001 From: Paul Date: Wed, 31 Jul 2013 11:26:23 +1000 Subject: [PATCH 434/464] Pretty-print GridFSProxy objects --- mongoengine/fields.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 47554e0..39a6caa 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1082,6 +1082,10 @@ class GridFSProxy(object): def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) + + def __unicode__(self): + name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)' + return '<%s: %s>' % (self.__class__.__name__, name) def __eq__(self, other): if isinstance(other, GridFSProxy): From d8ffa843a9bfbe7c6b645bbab4f0fcd61dd6af43 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 31 Jul 2013 09:29:41 +0000 Subject: [PATCH 435/464] Added str representation of GridFSProxy (#424) --- docs/changelog.rst | 1 + mongoengine/fields.py | 4 ++-- tests/fields/file_tests.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8199e03..49d79fe 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Added str representation of GridFSProxy (#424) - Update transform to handle docs erroneously passed to unset (#416) - Fixed indexing - turn off _cls (#414) - Fixed dereference threading issue in ComplexField.__get__ (#412) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 39a6caa..826e125 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1082,8 +1082,8 @@ class GridFSProxy(object): def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) - - def __unicode__(self): + + def __str__(self): name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)' return '<%s: %s>' % (self.__class__.__name__, name) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index d044500..ba601de 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -53,11 +53,12 @@ class FileTest(unittest.TestCase): content_type = 'text/plain' putfile = PutFile() - putfile.the_file.put(text, content_type=content_type) + putfile.the_file.put(text, content_type=content_type, filename="hello") putfile.save() result = PutFile.objects.first() self.assertTrue(putfile == result) + self.assertEqual("%s" % result.the_file, "") self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() # Remove file from GridFS From 7431b1f123a84adcc3e7ee9ca86ef18b52d01c5d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 31 Jul 2013 09:31:04 +0000 Subject: [PATCH 436/464] Updated AUTHORS (#424) --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index b779da2..938c5c0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -174,3 +174,4 @@ that much better: * Jatin- (https://github.com/jatin-) * Paul Uithol (https://github.com/PaulUithol) * Thom Knowles (https://github.com/fleat) + * Paul (https://github.com/squamous) \ No newline at end of file From 719bb53c3a01a50c325696f87aaa3fb08256b22f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 31 Jul 2013 09:44:15 +0000 Subject: [PATCH 437/464] Updated changelog (#423) --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 49d79fe..d93406d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Added get_user_document and improve mongo_auth module (#423) - Added str representation of GridFSProxy (#424) - Update transform to handle docs erroneously passed to unset (#416) - Fixed indexing - turn off _cls (#414) From b3f462a39d1f43a7bbc2b887757f340d8fea16cf Mon Sep 17 00:00:00 2001 From: Laurent Payot Date: Thu, 1 Aug 2013 03:51:10 +0200 Subject: [PATCH 438/464] updated docs for django shortcuts get_object_or_404 and get_list_or_404 --- docs/django.rst | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/docs/django.rst b/docs/django.rst index da15188..62d4dd4 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -45,7 +45,7 @@ The :mod:`~mongoengine.django.auth` module also contains a Custom User model ================= Django 1.5 introduced `Custom user Models -` +`_ which can be used as an alternative to the MongoEngine authentication backend. The main advantage of this option is that other components relying on @@ -74,7 +74,7 @@ An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the The custom :class:`User` must be a :class:`~mongoengine.Document` class, but otherwise has the same requirements as a standard custom user model, as specified in the `Django Documentation -`. +`_. In particular, the custom class must define :attr:`USERNAME_FIELD` and :attr:`REQUIRED_FIELDS` attributes. @@ -128,7 +128,7 @@ appended to the filename until the generated filename doesn't exist. The >>> fs.listdir() ([], [u'hello.txt']) -All files will be saved and retrieved in GridFS via the :class::`FileDocument` +All files will be saved and retrieved in GridFS via the :class:`FileDocument` document, allowing easy access to the files without the GridFSStorage backend.:: @@ -137,3 +137,36 @@ backend.:: [] .. versionadded:: 0.4 + +Shortcuts +========= +Inspired by the `Django shortcut get_object_or_404 +`_, +the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns +a document or raises an Http404 exception if the document does not exist:: + + from mongoengine.django.shortcuts import get_document_or_404 + + admin_user = get_document_or_404(User, username='root') + +The first argument may be a Document or QuerySet object. All other passed arguments +and keyword arguments are used in the query:: + + foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email + +.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one + object is found. + + +Also inspired by the `Django shortcut get_list_or_404 +`_, +the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of +documents or raises an Http404 exception if the list is empty:: + + from mongoengine.django.shortcuts import get_list_or_404 + + active_users = get_list_or_404(User, is_active=True) + +The first argument may be a Document or QuerySet object. All other passed +arguments and keyword arguments are used to filter the query. + From a448c9aebf573d13c7876fc5380e2519aabc302d Mon Sep 17 00:00:00 2001 From: devoto13 Date: Thu, 1 Aug 2013 17:54:41 +0300 Subject: [PATCH 439/464] removed duplicated method --- docs/guide/querying.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 127e479..f50985b 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -497,7 +497,6 @@ that you may use with these methods: * ``unset`` -- delete a particular value (since MongoDB v1.3+) * ``inc`` -- increment a value by a given amount * ``dec`` -- decrement a value by a given amount -* ``pop`` -- remove the last item from a list * ``push`` -- append a value to a list * ``push_all`` -- append several values to a list * ``pop`` -- remove the first or last element of a list From b98b06ff79f54318b8329822bcf081cb8953a1da Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Sun, 4 Aug 2013 11:01:09 +0200 Subject: [PATCH 440/464] Fix an error in get_user_document --- mongoengine/django/mongo_auth/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py index 7179718..35960c9 100644 --- a/mongoengine/django/mongo_auth/models.py +++ b/mongoengine/django/mongo_auth/models.py @@ -15,7 +15,7 @@ MONGOENGINE_USER_DOCUMENT = getattr( settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') -def get_user_document(self): +def get_user_document(): """Get the user docuemnt class user for authentcation. This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which From 40b0a15b350cba12ebe37e595352d10c7628a2d3 Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Sun, 4 Aug 2013 11:03:34 +0200 Subject: [PATCH 441/464] Fixing typos --- mongoengine/django/mongo_auth/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py index 35960c9..d4947a2 100644 --- a/mongoengine/django/mongo_auth/models.py +++ b/mongoengine/django/mongo_auth/models.py @@ -16,7 +16,7 @@ MONGOENGINE_USER_DOCUMENT = getattr( def get_user_document(): - """Get the user docuemnt class user for authentcation. + """Get the user document class used for authentication. This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which defaults to `mongoengine.django.auth.User`. From a0d255369aef155fbb0cdd139542cbf5719a4046 Mon Sep 17 00:00:00 2001 From: Nicolas Cortot Date: Sun, 4 Aug 2013 11:08:11 +0200 Subject: [PATCH 442/464] Add a test case for get_user_document --- tests/test_django.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_django.py b/tests/test_django.py index 63e3245..8fe0da3 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -22,7 +22,11 @@ try: try: from django.contrib.auth import authenticate, get_user_model from mongoengine.django.auth import User - from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager + from mongoengine.django.mongo_auth.models import ( + MongoUser, + MongoUserManager, + get_user_document, + ) DJ15 = True except Exception: DJ15 = False @@ -270,9 +274,12 @@ class MongoAuthTest(unittest.TestCase): User.drop_collection() super(MongoAuthTest, self).setUp() - def test_user_model(self): + def test_get_user_model(self): self.assertEqual(get_user_model(), MongoUser) + def test_get_user_document(self): + self.assertEqual(get_user_document(), User) + def test_user_manager(self): manager = get_user_model()._default_manager self.assertTrue(isinstance(manager, MongoUserManager)) From 5bcc4546783e8ee7200ad7669ed14a40ea67376e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 7 Aug 2013 09:07:57 +0000 Subject: [PATCH 443/464] Handle dynamic fieldnames that look like digits (#434) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 2 +- tests/queryset/queryset.py | 7 +++++++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d93406d..b877d4d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Handle dynamic fieldnames that look like digits (#434) - Added get_user_document and improve mongo_auth module (#423) - Added str representation of GridFSProxy (#424) - Update transform to handle docs erroneously passed to unset (#416) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index b9c07cf..cea2f09 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -762,7 +762,7 @@ class BaseDocument(object): for field_name in parts: # Handle ListField indexing: - if field_name.isdigit(): + if field_name.isdigit() and hasattr(field, 'field'): new_field = field.field fields.append(field_name) continue diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 9c04c0b..75708ca 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3299,6 +3299,13 @@ class QuerySetTest(unittest.TestCase): Test.objects(test='foo').update_one(upsert=True, set__test='foo') self.assertTrue('_cls' in Test._collection.find_one()) + def test_update_upsert_looks_like_a_digit(self): + class MyDoc(DynamicDocument): + pass + MyDoc.drop_collection() + self.assertEqual(1, MyDoc.objects.update_one(upsert=True, inc__47=1)) + self.assertEqual(MyDoc.objects.get()['47'], 1) + def test_read_preference(self): class Bar(Document): pass From f30208f3453dd7999987f07cf383e6fd9fb546ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Olivier=20Cort=C3=A8s?= Date: Mon, 12 Aug 2013 19:12:53 +0200 Subject: [PATCH 444/464] Fix the ._get_db() attribute after a Document.switch_db() Without this patch, I've got: ``` myobj._get_db() > ``` I need to `myobj._get_db()()` to get the database. I felt this like a bug. regards, --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 2f3a92a..1bbd7b7 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -400,7 +400,7 @@ class Document(BaseDocument): """ with switch_db(self.__class__, db_alias) as cls: collection = cls._get_collection() - db = cls._get_db + db = cls._get_db() self._get_collection = lambda: collection self._get_db = lambda: db self._collection = collection From 70b320633ff801ad0885d395e7f6caa64587dbb1 Mon Sep 17 00:00:00 2001 From: crazyzubr Date: Thu, 15 Aug 2013 19:32:13 +0800 Subject: [PATCH 445/464] permit the establishment of a field with the name of size or other Example: # model class Example(Document): size = ReferenceField(Size, verbose_name='Size') # query examples = Example.objects(size=instance_size) # caused an error """ File ".../mongoengine/queryset/transform.py", line 50, in query if parts[-1] == 'not': IndexError: list index out of range """ --- mongoengine/queryset/transform.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index e0a7d3c..d82f33d 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -43,11 +43,11 @@ def query(_doc_cls=None, _field_operation=False, **query): parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is op = None - if parts[-1] in MATCH_OPERATORS: + if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: op = parts.pop() negate = False - if parts[-1] == 'not': + if len(parts) > 1 and parts[-1] == 'not': parts.pop() negate = True From d07a9d2ef8b63fea934ef658678823b4d6073338 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 08:30:20 +0000 Subject: [PATCH 446/464] Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) --- docs/changelog.rst | 1 + mongoengine/fields.py | 13 ++++++++++++- tests/fields/fields.py | 41 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b877d4d..fa27d1a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) - Handle dynamic fieldnames that look like digits (#434) - Added get_user_document and improve mongo_auth module (#423) - Added str representation of GridFSProxy (#424) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 826e125..c1fc1a7 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -624,7 +624,9 @@ class DynamicField(BaseField): cls = value.__class__ val = value.to_mongo() # If we its a document thats not inherited add _cls - if (isinstance(value, (Document, EmbeddedDocument))): + if (isinstance(value, Document)): + val = {"_ref": value.to_dbref(), "_cls": cls.__name__} + if (isinstance(value, EmbeddedDocument)): val['_cls'] = cls.__name__ return val @@ -645,6 +647,15 @@ class DynamicField(BaseField): value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] return value + def to_python(self, value): + if isinstance(value, dict) and '_cls' in value: + doc_cls = get_document(value['_cls']) + if '_ref' in value: + value = doc_cls._get_db().dereference(value['_ref']) + return doc_cls._from_son(value) + + return super(DynamicField, self).to_python(value) + def lookup_member(self, member_name): return member_name diff --git a/tests/fields/fields.py b/tests/fields/fields.py index b3d8d52..8791781 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -2506,5 +2506,46 @@ class FieldTest(unittest.TestCase): self.assertTrue(tuple(x.items[0]) in tuples) self.assertTrue(x.items[0] in tuples) + def test_dynamic_fields_class(self): + + class Doc2(Document): + field_1 = StringField(db_field='f') + + class Doc(Document): + my_id = IntField(required=True, unique=True, primary_key=True) + embed_me = DynamicField(db_field='e') + field_x = StringField(db_field='x') + + Doc.drop_collection() + Doc2.drop_collection() + + doc2 = Doc2(field_1="hello") + doc = Doc(my_id=1, embed_me=doc2, field_x="x") + self.assertRaises(OperationError, doc.save) + + doc2.save() + doc.save() + + doc = Doc.objects.get() + self.assertEqual(doc.embed_me.field_1, "hello") + + def test_dynamic_fields_embedded_class(self): + + class Embed(EmbeddedDocument): + field_1 = StringField(db_field='f') + + class Doc(Document): + my_id = IntField(required=True, unique=True, primary_key=True) + embed_me = DynamicField(db_field='e') + field_x = StringField(db_field='x') + + Doc.drop_collection() + + Doc(my_id=1, embed_me=Embed(field_1="hello"), field_x="x").save() + + doc = Doc.objects.get() + self.assertEqual(doc.embed_me.field_1, "hello") + + if __name__ == '__main__': unittest.main() From ee7666ddea20b7a261c7dec021bbe595f015a970 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 08:31:56 +0000 Subject: [PATCH 447/464] Update AUTHORS and Changelog (#441) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 938c5c0..2a2eb36 100644 --- a/AUTHORS +++ b/AUTHORS @@ -174,4 +174,5 @@ that much better: * Jatin- (https://github.com/jatin-) * Paul Uithol (https://github.com/PaulUithol) * Thom Knowles (https://github.com/fleat) - * Paul (https://github.com/squamous) \ No newline at end of file + * Paul (https://github.com/squamous) + * Olivier Cortès (https://github.com/Karmak23) \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index fa27d1a..b027562 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed ._get_db() attribute after a Document.switch_db() (#441) - Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) - Handle dynamic fieldnames that look like digits (#434) - Added get_user_document and improve mongo_auth module (#423) From 67baf465f4ebd1024d4730dc206900bb1bd3a1a2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 09:14:58 +0000 Subject: [PATCH 448/464] Fixed slice when using inheritance causing fields to be excluded (#437) --- docs/changelog.rst | 1 + mongoengine/queryset/field_list.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b027562..26a0716 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed slice when using inheritance causing fields to be excluded (#437) - Fixed ._get_db() attribute after a Document.switch_db() (#441) - Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) - Handle dynamic fieldnames that look like digits (#434) diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 73d3cc2..140a71e 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -55,7 +55,8 @@ class QueryFieldList(object): if self.always_include: if self.value is self.ONLY and self.fields: - self.fields = self.fields.union(self.always_include) + if sorted(self.slice.keys()) != sorted(self.fields): + self.fields = self.fields.union(self.always_include) else: self.fields -= self.always_include From 49f5b4fa5cf9a86ce548123478bcdec94a0698c6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 09:45:00 +0000 Subject: [PATCH 449/464] Fix Queryset docs (#448) --- docs/apireference.rst | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index 774d3b8..9057de5 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -44,17 +44,21 @@ Context Managers Querying ======== -.. autoclass:: mongoengine.queryset.QuerySet - :members: +.. automodule:: mongoengine.queryset + :synopsis: Queryset level operations - .. automethod:: mongoengine.queryset.QuerySet.__call__ + .. autoclass:: mongoengine.queryset.QuerySet + :members: + :inherited-members: -.. autoclass:: mongoengine.queryset.QuerySetNoCache - :members: + .. automethod:: QuerySet.__call__ - .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ + .. autoclass:: mongoengine.queryset.QuerySetNoCache + :members: -.. autofunction:: mongoengine.queryset.queryset_manager + .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ + + .. autofunction:: mongoengine.queryset.queryset_manager Fields ====== From 2cd722d751438141d1bcfff5824f49496f2ffddd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 10:20:05 +0000 Subject: [PATCH 450/464] Updated setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f6b3c1b..6bd778b 100644 --- a/setup.py +++ b/setup.py @@ -51,13 +51,13 @@ CLASSIFIERS = [ extra_opts = {} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2>=2.6', 'django>=1.5.1'] extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil'] extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', From 661398d8914bda0821091b7734c75b7b74c2566f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 10:22:06 +0000 Subject: [PATCH 451/464] Fixed dereference issue with embedded listfield referencefields (#439) --- docs/changelog.rst | 1 + mongoengine/dereference.py | 5 +++-- tests/test_dereference.py | 24 ++++++++++++++++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 26a0716..74e2e50 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed dereference issue with embedded listfield referencefields (#439) - Fixed slice when using inheritance causing fields to be excluded (#437) - Fixed ._get_db() attribute after a Document.switch_db() (#441) - Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index e5e8886..ceda403 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -4,7 +4,7 @@ from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) from fields import (ReferenceField, ListField, DictField, MapField) from connection import get_db from queryset import QuerySet -from document import Document +from document import Document, EmbeddedDocument class DeReference(object): @@ -33,7 +33,8 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)): + if instance and isinstance(instance, (Document, EmbeddedDocument, + TopLevelDocumentMetaclass)): doc_type = instance._fields.get(name) if hasattr(doc_type, 'field'): doc_type = doc_type.field diff --git a/tests/test_dereference.py b/tests/test_dereference.py index db9868a..6f2664a 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1171,6 +1171,30 @@ class FieldTest(unittest.TestCase): self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) + def test_dereferencing_embedded_listfield_referencefield(self): + class Tag(Document): + meta = {'collection': 'tags'} + name = StringField() + + class Post(EmbeddedDocument): + body = StringField() + tags = ListField(ReferenceField("Tag", dbref=True)) + + class Page(Document): + meta = {'collection': 'pages'} + tags = ListField(ReferenceField("Tag", dbref=True)) + posts = ListField(EmbeddedDocumentField(Post)) + + Tag.drop_collection() + Page.drop_collection() + + tag = Tag(name='test').save() + post = Post(body='test body', tags=[tag]) + Page(tags=[tag], posts=[post]).save() + + page = Page.objects.first() + self.assertEqual(page.tags[0], page.posts[0].tags[0]) + if __name__ == '__main__': unittest.main() From 29c887f30b0f7db13d30c920d29d2b4f2f490047 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 12:21:20 +0000 Subject: [PATCH 452/464] Updated field filter logic - can now exclude subclass fields (#443) --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 31 ++++++++++++++++++++++++++----- tests/queryset/field_list.py | 23 +++++++++++++++++++++++ 3 files changed, 50 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 74e2e50..489f2ff 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed can now exclude subclass fields (#443) - Fixed dereference issue with embedded listfield referencefields (#439) - Fixed slice when using inheritance causing fields to be excluded (#437) - Fixed ._get_db() attribute after a Document.switch_db() (#441) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a6ba49b..7af9daa 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -14,8 +14,9 @@ from pymongo.common import validate_read_preference from mongoengine import signals from mongoengine.common import _import_class +from mongoengine.base.common import get_document from mongoengine.errors import (OperationError, NotUniqueError, - InvalidQueryError) + InvalidQueryError, LookUpError) from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList @@ -1333,13 +1334,33 @@ class BaseQuerySet(object): return frequencies - def _fields_to_dbfields(self, fields): + def _fields_to_dbfields(self, fields, subdoc=False): """Translate fields paths to its db equivalents""" ret = [] + subclasses = [] + document = self._document + if document._meta['allow_inheritance']: + subclasses = [get_document(x) + for x in document._subclasses][1:] for field in fields: - field = ".".join(f.db_field for f in - self._document._lookup_field(field.split('.'))) - ret.append(field) + try: + field = ".".join(f.db_field for f in + document._lookup_field(field.split('.'))) + ret.append(field) + except LookUpError, e: + found = False + for subdoc in subclasses: + try: + subfield = ".".join(f.db_field for f in + subdoc._lookup_field(field.split('.'))) + ret.append(subfield) + found = True + break + except LookUpError, e: + pass + + if not found: + raise e return ret def _get_order_by(self, keys): diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index a18e167..7d66d26 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -399,5 +399,28 @@ class OnlyExcludeAllTest(unittest.TestCase): numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + + def test_exclude_from_subclasses_docs(self): + + class Base(Document): + username = StringField() + + meta = {'allow_inheritance': True} + + class Anon(Base): + anon = BooleanField() + + class User(Base): + password = StringField() + wibble = StringField() + + Base.drop_collection() + User(username="mongodb", password="secret").save() + + user = Base.objects().exclude("password", "wibble").first() + self.assertEqual(user.password, None) + + self.assertRaises(LookUpError, Base.objects.exclude, "made_up") + if __name__ == '__main__': unittest.main() From a707598042fb0312eb01c15b57b4796dd63ad9a1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 13:13:17 +0000 Subject: [PATCH 453/464] Allow fields to be named the same as query operators (#445) --- AUTHORS | 3 ++- docs/changelog.rst | 3 ++- tests/queryset/queryset.py | 17 +++++++++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/AUTHORS b/AUTHORS index 2a2eb36..a5b73c7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -175,4 +175,5 @@ that much better: * Paul Uithol (https://github.com/PaulUithol) * Thom Knowles (https://github.com/fleat) * Paul (https://github.com/squamous) - * Olivier Cortès (https://github.com/Karmak23) \ No newline at end of file + * Olivier Cortès (https://github.com/Karmak23) + * crazyzubr (https://github.com/crazyzubr) \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 489f2ff..2775429 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,7 +4,8 @@ Changelog Changes in 0.8.4 ================ -- Fixed can now exclude subclass fields (#443) +- Allow fields to be named the same as query operators (#445) +- Updated field filter logic - can now exclude subclass fields (#443) - Fixed dereference issue with embedded listfield referencefields (#439) - Fixed slice when using inheritance causing fields to be excluded (#437) - Fixed ._get_db() attribute after a Document.switch_db() (#441) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 75708ca..7f64135 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3691,6 +3691,23 @@ class QuerySetTest(unittest.TestCase): '_cls': 'Animal.Cat' }) + def test_can_have_field_same_name_as_query_operator(self): + + class Size(Document): + name = StringField() + + class Example(Document): + size = ReferenceField(Size) + + Size.drop_collection() + Example.drop_collection() + + instance_size = Size(name="Large").save() + Example(size=instance_size).save() + + self.assertEqual(Example.objects(size=instance_size).count(), 1) + self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1) + if __name__ == '__main__': unittest.main() From 0dd01bda016e44aca102d4998bf7c1a0a89739e9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 15:54:42 +0000 Subject: [PATCH 454/464] Fixed "$pull" semantics for nested ListFields (#447) --- AUTHORS | 3 +- docs/changelog.rst | 1 + mongoengine/common.py | 5 ++- mongoengine/fields.py | 4 ++ mongoengine/queryset/transform.py | 25 ++++++++++- tests/queryset/queryset.py | 71 ++++++++++++++++++++++++++++--- 6 files changed, 99 insertions(+), 10 deletions(-) diff --git a/AUTHORS b/AUTHORS index a5b73c7..452ba37 100644 --- a/AUTHORS +++ b/AUTHORS @@ -176,4 +176,5 @@ that much better: * Thom Knowles (https://github.com/fleat) * Paul (https://github.com/squamous) * Olivier Cortès (https://github.com/Karmak23) - * crazyzubr (https://github.com/crazyzubr) \ No newline at end of file + * crazyzubr (https://github.com/crazyzubr) + * FrankSomething (https://github.com/FrankSomething) \ No newline at end of file diff --git a/docs/changelog.rst b/docs/changelog.rst index 2775429..6a0258c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Fixed "$pull" semantics for nested ListFields (#447) - Allow fields to be named the same as query operators (#445) - Updated field filter logic - can now exclude subclass fields (#443) - Fixed dereference issue with embedded listfield referencefields (#439) diff --git a/mongoengine/common.py b/mongoengine/common.py index 20d5138..6303231 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -23,8 +23,9 @@ def _import_class(cls_name): field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', 'FileField', 'GenericReferenceField', 'GenericEmbeddedDocumentField', 'GeoPointField', - 'PointField', 'LineStringField', 'PolygonField', - 'ReferenceField', 'StringField', 'ComplexBaseField') + 'PointField', 'LineStringField', 'ListField', + 'PolygonField', 'ReferenceField', 'StringField', + 'ComplexBaseField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index c1fc1a7..419f2ef 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -780,6 +780,10 @@ class DictField(ComplexBaseField): if op in match_operators and isinstance(value, basestring): return StringField().prepare_query_value(op, value) + + if hasattr(self.field, 'field'): + return self.field.prepare_query_value(op, value) + return super(DictField, self).prepare_query_value(op, value) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index d82f33d..2ee7e38 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -182,6 +182,7 @@ def update(_doc_cls=None, **update): parts = [] cleaned_fields = [] + appended_sub_field = False for field in fields: append_field = True if isinstance(field, basestring): @@ -193,10 +194,17 @@ def update(_doc_cls=None, **update): else: parts.append(field.db_field) if append_field: + appended_sub_field = False cleaned_fields.append(field) + if hasattr(field, 'field'): + cleaned_fields.append(field.field) + appended_sub_field = True # Convert value to proper value - field = cleaned_fields[-1] + if appended_sub_field: + field = cleaned_fields[-2] + else: + field = cleaned_fields[-1] if op in (None, 'set', 'push', 'pull'): if field.required or value is not None: @@ -223,11 +231,24 @@ def update(_doc_cls=None, **update): if 'pull' in op and '.' in key: # Dot operators don't work on pull operations - # it uses nested dict syntax + # unless they point to a list field + # Otherwise it uses nested dict syntax if op == 'pullAll': raise InvalidQueryError("pullAll operations only support " "a single field depth") + # Look for the last list field and use dot notation until there + field_classes = [c.__class__ for c in cleaned_fields] + field_classes.reverse() + ListField = _import_class('ListField') + if ListField in field_classes: + # Join all fields via dot notation to the last ListField + # Then process as normal + last_listField = len(cleaned_fields) - field_classes.index(ListField) + key = ".".join(parts[:last_listField]) + parts = parts[last_listField:] + parts.insert(0, key) + parts.reverse() for key in parts: value = {key: value} diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 7f64135..b4bcf2a 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1497,9 +1497,6 @@ class QuerySetTest(unittest.TestCase): def test_pull_nested(self): - class User(Document): - name = StringField() - class Collaborator(EmbeddedDocument): user = StringField() @@ -1514,8 +1511,7 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() c = Collaborator(user='Esteban') - s = Site(name="test", collaborators=[c]) - s.save() + s = Site(name="test", collaborators=[c]).save() Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') self.assertEqual(Site.objects.first().collaborators, []) @@ -1525,6 +1521,71 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(InvalidQueryError, pull_all) + def test_pull_from_nested_embedded(self): + + class User(EmbeddedDocument): + name = StringField() + + def __unicode__(self): + return '%s' % self.name + + class Collaborator(EmbeddedDocument): + helpful = ListField(EmbeddedDocumentField(User)) + unhelpful = ListField(EmbeddedDocumentField(User)) + + class Site(Document): + name = StringField(max_length=75, unique=True, required=True) + collaborators = EmbeddedDocumentField(Collaborator) + + + Site.drop_collection() + + c = User(name='Esteban') + f = User(name='Frank') + s = Site(name="test", collaborators=Collaborator(helpful=[c], unhelpful=[f])).save() + + Site.objects(id=s.id).update_one(pull__collaborators__helpful=c) + self.assertEqual(Site.objects.first().collaborators['helpful'], []) + + Site.objects(id=s.id).update_one(pull__collaborators__unhelpful={'name': 'Frank'}) + self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + + def pull_all(): + Site.objects(id=s.id).update_one(pull_all__collaborators__helpful__name=['Ross']) + + self.assertRaises(InvalidQueryError, pull_all) + + def test_pull_from_nested_mapfield(self): + + class Collaborator(EmbeddedDocument): + user = StringField() + + def __unicode__(self): + return '%s' % self.user + + class Site(Document): + name = StringField(max_length=75, unique=True, required=True) + collaborators = MapField(ListField(EmbeddedDocumentField(Collaborator))) + + + Site.drop_collection() + + c = Collaborator(user='Esteban') + f = Collaborator(user='Frank') + s = Site(name="test", collaborators={'helpful':[c],'unhelpful':[f]}) + s.save() + + Site.objects(id=s.id).update_one(pull__collaborators__helpful__user='Esteban') + self.assertEqual(Site.objects.first().collaborators['helpful'], []) + + Site.objects(id=s.id).update_one(pull__collaborators__unhelpful={'user':'Frank'}) + self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + + def pull_all(): + Site.objects(id=s.id).update_one(pull_all__collaborators__helpful__user=['Ross']) + + self.assertRaises(InvalidQueryError, pull_all) + def test_update_one_pop_generic_reference(self): class BlogTag(Document): From a0ef649dd8777c874927686ef02b533590420390 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 18:31:33 +0000 Subject: [PATCH 455/464] Update travis.yml --- .travis.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index c7e8ea3..609d898 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,10 +16,10 @@ env: install: - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b + - pip install django==$DJANGO --use-mirrors - python setup.py install script: - python setup.py test diff --git a/setup.py b/setup.py index 6bd778b..f5498f1 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ CLASSIFIERS = [ extra_opts = {} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2>=2.6', 'django>=1.5.1'] + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: extra_opts['packages'].append("tests") From 200e52bab50906761fa09b8a34f0cac5ddec335e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 18:44:12 +0000 Subject: [PATCH 456/464] Added documentation about abstract meta Refs #438 --- docs/guide/defining-documents.rst | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 407fbda..ba1af33 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -689,7 +689,6 @@ document.:: .. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults to False, meaning you must set it to True to use inheritance. - Working with existing data -------------------------- As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and @@ -709,3 +708,25 @@ defining all possible field types. If you use :class:`~mongoengine.Document` and the database contains data that isn't defined then that data will be stored in the `document._data` dictionary. + +Abstract classes +================ + +If you want to add some extra functionality to a group of Document classes but +you don't need or want the overhead of inheritance you can use the +:attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`. +This won't turn on :ref:`document-inheritance` but will allow you to keep your +code DRY:: + + class BaseDocument(Document): + meta = { + 'abstract': True, + } + def check_permissions(self): + ... + + class User(BaseDocument): + ... + +Now the User class will have access to the inherited `check_permissions` method +and won't store any of the extra `_cls` information. From fffd0e899019c33e08cb53bdf7ce9492884a4e3e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 20 Aug 2013 18:54:14 +0000 Subject: [PATCH 457/464] Fixed error raise --- mongoengine/queryset/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 7af9daa..b4dad0c 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1347,7 +1347,7 @@ class BaseQuerySet(object): field = ".".join(f.db_field for f in document._lookup_field(field.split('.'))) ret.append(field) - except LookUpError, e: + except LookUpError, err: found = False for subdoc in subclasses: try: @@ -1360,7 +1360,7 @@ class BaseQuerySet(object): pass if not found: - raise e + raise err return ret def _get_order_by(self, keys): From f57569f553ab2ffd1947db0a4c014e5026cd0f0d Mon Sep 17 00:00:00 2001 From: Alexandr Morozov Date: Wed, 21 Aug 2013 13:52:24 +0400 Subject: [PATCH 458/464] Remove database name necessity in uri connection schema --- docs/guide/connecting.rst | 7 +++++-- mongoengine/connection.py | 5 +---- tests/test_connection.py | 26 ++++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 854e2c3..f681aad 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -23,12 +23,15 @@ arguments should be provided:: connect('project1', username='webapp', password='pwd123') -Uri style connections are also supported as long as you include the database -name - just supply the uri as the :attr:`host` to +Uri style connections are also supported - just supply the uri as +the :attr:`host` to :func:`~mongoengine.connect`:: connect('project1', host='mongodb://localhost/database_name') +Note that database name from uri has priority over name +in ::func:`~mongoengine.connect` + ReplicaSets =========== diff --git a/mongoengine/connection.py b/mongoengine/connection.py index abab269..4275da5 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -55,12 +55,9 @@ def register_connection(alias, name, host='localhost', port=27017, # Handle uri style connections if "://" in host: uri_dict = uri_parser.parse_uri(host) - if uri_dict.get('database') is None: - raise ConnectionError("If using URI style connection include "\ - "database name in string") conn_settings.update({ 'host': host, - 'name': uri_dict.get('database'), + 'name': uri_dict.get('database') or name, 'username': uri_dict.get('username'), 'password': uri_dict.get('password'), 'read_preference': read_preference, diff --git a/tests/test_connection.py b/tests/test_connection.py index d27a66d..62d795c 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -59,6 +59,32 @@ class ConnectionTest(unittest.TestCase): c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) + def test_connect_uri_without_db(self): + """Ensure that the connect() method works properly with uri's + without database_name + """ + c = connect(db='mongoenginetest', alias='admin') + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + + c.admin.add_user("admin", "password") + c.admin.authenticate("admin", "password") + c.mongoenginetest.add_user("username", "password") + + self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') + + connect("mongoenginetest", host='mongodb://localhost/') + + conn = get_connection() + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + + db = get_db() + self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertEqual(db.name, 'mongoenginetest') + + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ From f4db0da58581dc4956add7278c625e9fd37a4f7c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 23 Aug 2013 09:03:51 +0000 Subject: [PATCH 459/464] Update changelog add LK4D4 to authors (#452) --- AUTHORS | 3 ++- docs/changelog.rst | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 452ba37..f043207 100644 --- a/AUTHORS +++ b/AUTHORS @@ -177,4 +177,5 @@ that much better: * Paul (https://github.com/squamous) * Olivier Cortès (https://github.com/Karmak23) * crazyzubr (https://github.com/crazyzubr) - * FrankSomething (https://github.com/FrankSomething) \ No newline at end of file + * FrankSomething (https://github.com/FrankSomething) + * Alexandr Morozov (https://github.com/LK4D4) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6a0258c..926fb8a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,7 @@ Changelog Changes in 0.8.4 ================ +- Remove database name necessity in uri connection schema (#452) - Fixed "$pull" semantics for nested ListFields (#447) - Allow fields to be named the same as query operators (#445) - Updated field filter logic - can now exclude subclass fields (#443) From 23843ec86e4bdced0996de4a68d01bbf61e86a31 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 23 Aug 2013 09:06:57 +0000 Subject: [PATCH 460/464] Updated travis config --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 609d898..26b502c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,7 @@ install: - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b - - pip install django==$DJANGO --use-mirrors + - pip install django==$DJANGO -U --use-mirrors - python setup.py install script: - python setup.py test From 6738a9433b86fa2e7b4fef6bf124d5c166f5652b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 23 Aug 2013 09:36:33 +0000 Subject: [PATCH 461/464] Updated travis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 26b502c..4395107 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,6 @@ install: - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b - - pip install django==$DJANGO -U --use-mirrors - python setup.py install script: - python setup.py test From 86c8929d774483ed992557210b7a83a7bc7579cf Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 23 Aug 2013 10:03:10 +0000 Subject: [PATCH 462/464] 0.8.4 is a go --- mongoengine/__init__.py | 2 +- python-mongoengine.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index bfa35fb..2b68b3c 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 3) +VERSION = (0, 8, 4) def get_version(): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 512c621..b9c45ef 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.3 +Version: 0.8.4 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB From bcbe740598747c97d1911ecad8c2865887363df8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 23 Aug 2013 13:41:15 +0000 Subject: [PATCH 463/464] Updated setup.py --- setup.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index f5498f1..85707d0 100644 --- a/setup.py +++ b/setup.py @@ -48,17 +48,15 @@ CLASSIFIERS = [ 'Topic :: Software Development :: Libraries :: Python Modules', ] -extra_opts = {} +extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] - extra_opts['packages'] = find_packages(exclude=('tests',)) if "test" in sys.argv or "nosetests" in sys.argv: - extra_opts['packages'].append("tests") + extra_opts['packages'] = find_packages() extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil'] - extra_opts['packages'] = find_packages(exclude=('tests',)) setup(name='mongoengine', version=VERSION, From 654cca82a9ee44527b22418c4f751e98efd9427a Mon Sep 17 00:00:00 2001 From: Joey Payne Date: Wed, 18 Sep 2013 11:38:38 -0600 Subject: [PATCH 464/464] Fixes AttributeError when using storage.exists() on a non-existing file. --- mongoengine/django/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/storage.py b/mongoengine/django/storage.py index 341455c..9df6f9e 100644 --- a/mongoengine/django/storage.py +++ b/mongoengine/django/storage.py @@ -76,7 +76,7 @@ class GridFSStorage(Storage): """Find the documents in the store with the given name """ docs = self.document.objects - doc = [d for d in docs if getattr(d, self.field).name == name] + doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name] if doc: return doc[0] else: