Compare commits
	
		
			1008 Commits
		
	
	
		
			v0.8.4
			...
			external-r
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | caa9b34361 | ||
|  | de18e256ce | ||
|  | 1a3c70ce1b | ||
|  | bd4a603e16 | ||
|  | 358b80d782 | ||
|  | 824ec42005 | ||
|  | 466935e9a3 | ||
|  | b52d3e3a7b | ||
|  | 888a6da4a5 | ||
|  | 972ac73dd9 | ||
|  | d8b238d5f1 | ||
|  | 63206c3da2 | ||
|  | 5713de8966 | ||
|  | 58f293fef3 | ||
|  | ffbb2c9689 | ||
|  | 9cd3dcdebf | ||
|  | f2fe58c3c5 | ||
|  | b78010aa94 | ||
|  | 49035543b9 | ||
|  | f9ccf635ca | ||
|  | e8ea294964 | ||
|  | 19ef2be88b | ||
|  | 30e8b8186f | ||
|  | 741643af5f | ||
|  | 6aaf9ba470 | ||
|  | 5957dc72eb | ||
|  | e32a9777d7 | ||
|  | 84a8f1eb2b | ||
|  | 6810953014 | ||
|  | 398964945a | ||
|  | 5f43c032f2 | ||
|  | 627cf90de0 | ||
|  | 2bedb36d7f | ||
|  | e93a95d0cb | ||
|  | 3f31666796 | ||
|  | 3fe8031cf3 | ||
|  | b27c7ce11b | ||
|  | ed34c2ca68 | ||
|  | 3ca2e953fb | ||
|  | d8a7328365 | ||
|  | f33cd625bf | ||
|  | 80530bb13c | ||
|  | affc12df4b | ||
|  | 4eedf00025 | ||
|  | e5acbcc0dd | ||
|  | 1b6743ee53 | ||
|  | b5fb82d95d | ||
|  | 193aa4e1f2 | ||
|  | ebd34427c7 | ||
|  | 3d75573889 | ||
|  | c6240ca415 | ||
|  | 2ee8984b44 | ||
|  | b7ec587e5b | ||
|  | 47c58bce2b | ||
|  | 96e95ac533 | ||
|  | b013a065f7 | ||
|  | 74b37d11cf | ||
|  | c6cc013617 | ||
|  | f4e1d80a87 | ||
|  | 91dad4060f | ||
|  | e07cb82c15 | ||
|  | 2770cec187 | ||
|  | 5c3928190a | ||
|  | 9f4b04ea0f | ||
|  | 96d20756ca | ||
|  | b8454c7f5b | ||
|  | c84f703f92 | ||
|  | 57c2e867d8 | ||
|  | 553f496d84 | ||
|  | b1d8aca46a | ||
|  | 8e884fd3ea | ||
|  | 76524b7498 | ||
|  | 65914fb2b2 | ||
|  | a4d0da0085 | ||
|  | c9d496e9a0 | ||
|  | 88a951ba4f | ||
|  | 403ceb19dc | ||
|  | 835d3c3d18 | ||
|  | 3135b456be | ||
|  | 0be6d3661a | ||
|  | 6f5f5b4711 | ||
|  | c6c5f85abb | ||
|  | 7b860f7739 | ||
|  | e28804c03a | ||
|  | 1b9432824b | ||
|  | 3b71a6b5c5 | ||
|  | 7ce8768c19 | ||
|  | 25e0f12976 | ||
|  | f168682a68 | ||
|  | d25058a46d | ||
|  | 4d0c092d9f | ||
|  | 15714ef855 | ||
|  | eb743beaa3 | ||
|  | 0007535a46 | ||
|  | 8391af026c | ||
|  | 800f656dcf | ||
|  | 088c5f49d9 | ||
|  | d8d98b6143 | ||
|  | 02fb3b9315 | ||
|  | 4f87db784e | ||
|  | 7e6287b925 | ||
|  | 999cdfd997 | ||
|  | 8d6cb087c6 | ||
|  | 2b7417c728 | ||
|  | 3c455cf1c1 | ||
|  | 5135185e31 | ||
|  | b461f26e5d | ||
|  | faef5b8570 | ||
|  | 0a20e04c10 | ||
|  | d19bb2308d | ||
|  | d8dd07d9ef | ||
|  | 36c56243cd | ||
|  | 23d06b79a6 | ||
|  | e4c4e923ee | ||
|  | 936d2f1f47 | ||
|  | 07018b5060 | ||
|  | ac90d6ae5c | ||
|  | 2141f2c4c5 | ||
|  | 81870777a9 | ||
|  | 845092dcad | ||
|  | dd473d1e1e | ||
|  | d2869bf4ed | ||
|  | 891a3f4b29 | ||
|  | 6767b50d75 | ||
|  | d9e4b562a9 | ||
|  | fb3243f1bc | ||
|  | 5fe1497c92 | ||
|  | 5446592d44 | ||
|  | 40ed9a53c9 | ||
|  | f7ac8cea90 | ||
|  | 4ef5d1f0cd | ||
|  | 6992615c98 | ||
|  | 43dabb2825 | ||
|  | 05e40e5681 | ||
|  | 2c4536e137 | ||
|  | 3dc81058a0 | ||
|  | bd84667a2b | ||
|  | e5b6a12977 | ||
|  | ca415d5d62 | ||
|  | 99b4fe7278 | ||
|  | 327e164869 | ||
|  | 25bc571f30 | ||
|  | 38c7e8a1d2 | ||
|  | ca282e28e0 | ||
|  | 5ef59c06df | ||
|  | 8f55d385d6 | ||
|  | cd2fc25c19 | ||
|  | 709983eea6 | ||
|  | 40e99b1b80 | ||
|  | 488684d960 | ||
|  | f35034b989 | ||
|  | 9d6f9b1f26 | ||
|  | 6148a608fb | ||
|  | 3fa9e70383 | ||
|  | 16fea6f009 | ||
|  | df9ed835ca | ||
|  | e394c8f0f2 | ||
|  | 21974f7288 | ||
|  | 5ef0170d77 | ||
|  | c21dcf14de | ||
|  | a8d20d4e1e | ||
|  | 8b307485b0 | ||
|  | 4544afe422 | ||
|  | 9d7eba5f70 | ||
|  | be0aee95f2 | ||
|  | 3469ed7ab9 | ||
|  | 1f223aa7e6 | ||
|  | 0a431ead5e | ||
|  | f750796444 | ||
|  | c82bcd882a | ||
|  | 7d0ec33b54 | ||
|  | 43d48b3feb | ||
|  | 2e406d2687 | ||
|  | 3f30808104 | ||
|  | ab10217c86 | ||
|  | 00430491ca | ||
|  | 109202329f | ||
|  | 3b1509f307 | ||
|  | 7ad7b08bed | ||
|  | 4650e5e8fb | ||
|  | af59d4929e | ||
|  | e34100bab4 | ||
|  | d9b3a9fb60 | ||
|  | 39eec59c90 | ||
|  | d651d0d472 | ||
|  | 87a2358a65 | ||
|  | cef4e313e1 | ||
|  | 7cc1a4eba0 | ||
|  | c6cc0133b3 | ||
|  | 7748e68440 | ||
|  | 6c2230a076 | ||
|  | 66b233eaea | ||
|  | fed58f3920 | ||
|  | 815b2be7f7 | ||
|  | f420c9fb7c | ||
|  | 01bdf10b94 | ||
|  | ddedc1ee92 | ||
|  | 9e9703183f | ||
|  | adce9e6220 | ||
|  | c499133bbe | ||
|  | 8f505c2dcc | ||
|  | b320064418 | ||
|  | a643933d16 | ||
|  | 2659ec5887 | ||
|  | 9f8327926d | ||
|  | 7a568dc118 | ||
|  | c946b06be5 | ||
|  | c65fd0e477 | ||
|  | 8f8217e928 | ||
|  | 6c9e1799c7 | ||
|  | decd70eb23 | ||
|  | a20d40618f | ||
|  | b4af8ec751 | ||
|  | feb5eed8a5 | ||
|  | f4fa39c70e | ||
|  | 7b7165f5d8 | ||
|  | 13897db6d3 | ||
|  | c4afdb7198 | ||
|  | 0284975f3f | ||
|  | 269e3d1303 | ||
|  | 8c81f7ece9 | ||
|  | f6e0593774 | ||
|  | 3d80e549cb | ||
|  | acc7448dc5 | ||
|  | 35d3d3de72 | ||
|  | 0372e07eb0 | ||
|  | 00221e3410 | ||
|  | 9c264611cf | ||
|  | 31d7f70e27 | ||
|  | 04e8b83d45 | ||
|  | e87bf71f20 | ||
|  | 2dd70c8d62 | ||
|  | a3886702a3 | ||
|  | 713af133a0 | ||
|  | 057ffffbf2 | ||
|  | a81d6d124b | ||
|  | 23f07fde5e | ||
|  | b42b760393 | ||
|  | bf6f4c48c0 | ||
|  | 6133f04841 | ||
|  | 3c18f79ea4 | ||
|  | 2af8342fea | ||
|  | fc3db7942d | ||
|  | 164e2b2678 | ||
|  | b7b28390df | ||
|  | a6e996d921 | ||
|  | 07e666345d | ||
|  | 007f10d29d | ||
|  | f9284d20ca | ||
|  | 9050869781 | ||
|  | 54975de0f3 | ||
|  | a7aead5138 | ||
|  | 6868f66f24 | ||
|  | 3c0b00e42d | ||
|  | 3327388f1f | ||
|  | 04497aec36 | ||
|  | aa9d596930 | ||
|  | f96e68cd11 | ||
|  | 013227323d | ||
|  | 19cbb442ee | ||
|  | c0e7f341cb | ||
|  | 0a1ba7c434 | ||
|  | b708dabf98 | ||
|  | 899e56e5b8 | ||
|  | f6d3bd8ccb | ||
|  | deb5677a57 | ||
|  | 5c464c3f5a | ||
|  | cceef33fef | ||
|  | ed8174fe36 | ||
|  | 3c8906494f | ||
|  | 6e745e9882 | ||
|  | fb4e9c3772 | ||
|  | 2c282f9550 | ||
|  | d92d41cb05 | ||
|  | 82e7050561 | ||
|  | 44f92d4169 | ||
|  | 2f1fae38dd | ||
|  | 9fe99979fe | ||
|  | 6399de0b51 | ||
|  | 959740a585 | ||
|  | 159b082828 | ||
|  | 8e7c5af16c | ||
|  | c1645ab7a7 | ||
|  | 2ae2bfdde9 | ||
|  | 3fe93968a6 | ||
|  | 79a2d715b0 | ||
|  | 50b271c868 | ||
|  | a57f28ac83 | ||
|  | 3f3747a2fe | ||
|  | d133913c3d | ||
|  | e049cef00a | ||
|  | eb8176971c | ||
|  | 5bbfca45fa | ||
|  | 9b500cd867 | ||
|  | b52cae6575 | ||
|  | 35a0142f9b | ||
|  | d4f6ef4f1b | ||
|  | 11024deaae | ||
|  | 5a038de1d5 | ||
|  | 903982e896 | ||
|  | 6355c404cc | ||
|  | 92b9cb5d43 | ||
|  | 7580383d26 | ||
|  | ba0934e41e | ||
|  | a6a1021521 | ||
|  | 33b4d83c73 | ||
|  | 6cf630c74a | ||
|  | 736fe5b84e | ||
|  | 4241bde6ea | ||
|  | b4ce14d744 | ||
|  | 10832a2ccc | ||
|  | 91aca44f67 | ||
|  | 96cfbb201a | ||
|  | b2bc155701 | ||
|  | a70ef5594d | ||
|  | 6d991586fd | ||
|  | f8890ca841 | ||
|  | 0752c6b24f | ||
|  | 3ffaf2c0e1 | ||
|  | a3e0fbd606 | ||
|  | 9c8ceb6b4e | ||
|  | bebce2c053 | ||
|  | 34c6790762 | ||
|  | a5fb009b62 | ||
|  | 9671ca5ebf | ||
|  | 5334ea393e | ||
|  | 2aaacc02e3 | ||
|  | 222e929b2d | ||
|  | 6f16d35a92 | ||
|  | d7a2ccf5ac | ||
|  | 9ce605221a | ||
|  | 1e930fe950 | ||
|  | 4dc158589c | ||
|  | 4525eb457b | ||
|  | 56a2e07dc2 | ||
|  | 9b7fe9ac31 | ||
|  | c3da07ccf7 | ||
|  | b691a56d51 | ||
|  | 13e0a1b5bb | ||
|  | 646baddce4 | ||
|  | 02f61c323d | ||
|  | 1e3d2df9e7 | ||
|  | e43fae86f1 | ||
|  | c6151e34e0 | ||
|  | 45cb991254 | ||
|  | 839bc99f94 | ||
|  | 0aeb1ca408 | ||
|  | cd76a906f4 | ||
|  | e438491938 | ||
|  | 307b35a5bf | ||
|  | 217c9720ea | ||
|  | 778c7dc5f2 | ||
|  | 4c80154437 | ||
|  | 6bd9529a66 | ||
|  | 33ea2b4844 | ||
|  | 5c807f3dc8 | ||
|  | 9063b559c4 | ||
|  | 40f6df7160 | ||
|  | 95165aa92f | ||
|  | d96fcdb35c | ||
|  | 5efabdcea3 | ||
|  | 2d57dc0565 | ||
|  | 576629f825 | ||
|  | 5badb9d151 | ||
|  | 45dc379d9a | ||
|  | 49c0c9f44c | ||
|  | ef5fa4d062 | ||
|  | 35b66d5d94 | ||
|  | d0b749a43c | ||
|  | bcc4d4e8c6 | ||
|  | 41bff0b293 | ||
|  | dfc7f35ef1 | ||
|  | 0bbbbdde80 | ||
|  | 5fa5284b58 | ||
|  | b7ef82cb67 | ||
|  | 1233780265 | ||
|  | dd095279c8 | ||
|  | 4d5200c50f | ||
|  | 1bcd675ead | ||
|  | 2a3d3de0b2 | ||
|  | b124836f3a | ||
|  | 93ba95971b | ||
|  | 7b193b3745 | ||
|  | 2b647d2405 | ||
|  | 7714cca599 | ||
|  | 42511aa9cf | ||
|  | ace2a2f3d1 | ||
|  | 2062fe7a08 | ||
|  | d4c02c3988 | ||
|  | 4c1496b4a4 | ||
|  | eec876295d | ||
|  | 3093175f54 | ||
|  | dd05c4d34a | ||
|  | 57e3a40321 | ||
|  | 9e70152076 | ||
|  | e1da83a8f6 | ||
|  | 8108198613 | ||
|  | 915849b2ce | ||
|  | 2e96302336 | ||
|  | 051cd744ad | ||
|  | 53fbc165ba | ||
|  | 1862bcf867 | ||
|  | 8909d1d144 | ||
|  | a2f0f20284 | ||
|  | 1951b52aa5 | ||
|  | cd7a9345ec | ||
|  | dba4c33c81 | ||
|  | 153c239c9b | ||
|  | 4034ab4182 | ||
|  | 9c917c3bd3 | ||
|  | cca0222e1d | ||
|  | 682db9b81f | ||
|  | 3e000f9be1 | ||
|  | 548a552638 | ||
|  | 1d5b5b7d15 | ||
|  | 91aa4586e2 | ||
|  | 6d3bc43ef6 | ||
|  | 0f63e26641 | ||
|  | ab2ef69c6a | ||
|  | 621350515e | ||
|  | 03ed5c398a | ||
|  | 65d6f8c018 | ||
|  | 79d0673ae6 | ||
|  | cbd488e19f | ||
|  | 380d869195 | ||
|  | 73893f2a33 | ||
|  | ad81470d35 | ||
|  | fc140d04ef | ||
|  | a0257ed7e7 | ||
|  | 4769487c3b | ||
|  | 29def587ff | ||
|  | f35d0b2b37 | ||
|  | 283e92d55d | ||
|  | c82b26d334 | ||
|  | 2753e02cda | ||
|  | fde733c205 | ||
|  | f730591f2c | ||
|  | 94eac1e79d | ||
|  | 9f2b6d0ec6 | ||
|  | 7d7d0ea001 | ||
|  | 794101691c | ||
|  | a443144a5c | ||
|  | 73f0867061 | ||
|  | f97db93212 | ||
|  | d36708933c | ||
|  | 14f82ea0a9 | ||
|  | c41dd6495d | ||
|  | 1005c99e9c | ||
|  | f4478fc762 | ||
|  | c5ed308ea5 | ||
|  | 3ab5ba6149 | ||
|  | 9b2fde962c | ||
|  | 571a7dc42d | ||
|  | 3421fffa9b | ||
|  | c25619fd63 | ||
|  | 76adb13a64 | ||
|  | 33b1eed361 | ||
|  | c44891a1a8 | ||
|  | f31f52ff1c | ||
|  | 6ad9a56bd9 | ||
|  | a5c2fc4f9d | ||
|  | 0a65006bb4 | ||
|  | 3db896c4e2 | ||
|  | e80322021a | ||
|  | 48316ba60d | ||
|  | c0f1493473 | ||
|  | ccbd128fa2 | ||
|  | 46817caa68 | ||
|  | 775c8624d4 | ||
|  | 36eedc987c | ||
|  | 3b8f31c888 | ||
|  | a34fa74eaa | ||
|  | d6b2d8dcb5 | ||
|  | aab0599280 | ||
|  | dfa8eaf24e | ||
|  | 63d55cb797 | ||
|  | c642eee0d2 | ||
|  | 5f33d298d7 | ||
|  | fc39fd7519 | ||
|  | 7f442f7485 | ||
|  | 0ee3203a5a | ||
|  | 43a5df8780 | ||
|  | 0949df014b | ||
|  | 01f4dd8f97 | ||
|  | 8b7599f5d9 | ||
|  | 9bdc320cf8 | ||
|  | d9c8285806 | ||
|  | 4b8344082f | ||
|  | e5cf76b460 | ||
|  | 422ca87a12 | ||
|  | a512ccca28 | ||
|  | ba215be97c | ||
|  | ca16050681 | ||
|  | 06e4ed1bb4 | ||
|  | d4a8ae5743 | ||
|  | a4f2f811d3 | ||
|  | ebaba95eb3 | ||
|  | 31f7769199 | ||
|  | 7726be94be | ||
|  | f2cbcea6d7 | ||
|  | 5d6a28954b | ||
|  | 319f1deceb | ||
|  | 3f14958741 | ||
|  | 42ba4a5c56 | ||
|  | c804c395ed | ||
|  | 58c8cf1a3a | ||
|  | 76ea8c86b7 | ||
|  | 050378fa72 | ||
|  | 29d858d58c | ||
|  | dc45920afb | ||
|  | 15fcb57e2f | ||
|  | 91ee85152c | ||
|  | aa7bf7af1e | ||
|  | 02c1ba39ad | ||
|  | 8e8d9426df | ||
|  | 57f301815d | ||
|  | dfc9dc713c | ||
|  | 1a0cad7f5f | ||
|  | 3df436f0d8 | ||
|  | d737fca295 | ||
|  | da5a3532d7 | ||
|  | 27111e7b29 | ||
|  | b847bc0aba | ||
|  | 6eb0bc50e2 | ||
|  | 7530f03bf6 | ||
|  | 24a9633edc | ||
|  | 7e1a5ce445 | ||
|  | 2ffdbc7fc0 | ||
|  | 52c7b68cc3 | ||
|  | ddbcc8e84b | ||
|  | 2bfb195ad6 | ||
|  | cd2d9517a0 | ||
|  | 19dc312128 | ||
|  | 175659628d | ||
|  | 8fea2b09be | ||
|  | f77f45b70c | ||
|  | 103a287f11 | ||
|  | d600ade40c | ||
|  | a6a7cba121 | ||
|  | 7fff635a3f | ||
|  | 7a749b88c7 | ||
|  | 1ce6a7f4be | ||
|  | a092910fdd | ||
|  | bb77838b3e | ||
|  | 1001f1bd36 | ||
|  | de0e5583a5 | ||
|  | cbd2a44350 | ||
|  | c888e461ba | ||
|  | d135522087 | ||
|  | ce2b148dd2 | ||
|  | 2d075c4dd6 | ||
|  | bcd1841f71 | ||
|  | 029cf4ad1f | ||
|  | ed7fc86d69 | ||
|  | 82a9e43b6f | ||
|  | 9ae2c731ed | ||
|  | 7d1ba466b4 | ||
|  | 4f1d8678ea | ||
|  | 4bd72ebc63 | ||
|  | e5986e0ae2 | ||
|  | fae39e4bc9 | ||
|  | dbe8357dd5 | ||
|  | 3234f0bdd7 | ||
|  | 47a4d58009 | ||
|  | 4ae60da58d | ||
|  | 47f995bda3 | ||
|  | 42721628eb | ||
|  | f42ab957d4 | ||
|  | ce9d0d7e82 | ||
|  | baf79dda21 | ||
|  | b71a9bc097 | ||
|  | 129632cd6b | ||
|  | aca8899c4d | ||
|  | 5c3d91e65e | ||
|  | 0205d827f1 | ||
|  | 225c31d583 | ||
|  | b18d87ddba | ||
|  | 25298c72bb | ||
|  | 3df3d27533 | ||
|  | cbb0b57018 | ||
|  | 65f205bca8 | ||
|  | 1cc7f80109 | ||
|  | 213a0a18a5 | ||
|  | 1a24d599b3 | ||
|  | d80be60e2b | ||
|  | 0ffe79d76c | ||
|  | db36d0a375 | ||
|  | ff659a0be3 | ||
|  | 8485b12102 | ||
|  | d889cc3c5a | ||
|  | 7bb65fca4e | ||
|  | 8aaa5951ca | ||
|  | d58f3b7520 | ||
|  | e5a636a159 | ||
|  | 51f314e907 | ||
|  | 531fa30b69 | ||
|  | 2b3bb81fae | ||
|  | 80f80cd31f | ||
|  | 79705fbf11 | ||
|  | 191a4e569e | ||
|  | 1cac35be03 | ||
|  | 6d48100f44 | ||
|  | 4627af3e90 | ||
|  | 913952ffe1 | ||
|  | 67bf6afc89 | ||
|  | 06064decd2 | ||
|  | 4cca9f17df | ||
|  | 74a89223c0 | ||
|  | 2954017836 | ||
|  | a03262fc01 | ||
|  | d65ce6fc2c | ||
|  | d27e1eee25 | ||
|  | b1f00bb708 | ||
|  | e0f1e79e6a | ||
|  | d70b7d41e8 | ||
|  | 43af9f3fad | ||
|  | bc53dd6830 | ||
|  | 263616ef01 | ||
|  | 285da0542e | ||
|  | 17f7e2f892 | ||
|  | a29d8f1d68 | ||
|  | 8965172603 | ||
|  | 03c2967337 | ||
|  | 5b154a0da4 | ||
|  | b2c8c326d7 | ||
|  | 96aedaa91f | ||
|  | a22ad1ec32 | ||
|  | a4244defb5 | ||
|  | 57328e55f3 | ||
|  | 87c32aeb40 | ||
|  | 2e01e0c30e | ||
|  | a12b2de74a | ||
|  | 6b01d8f99b | ||
|  | eac4f6062e | ||
|  | 5583cf0a5f | ||
|  | 57d772fa23 | ||
|  | 1bdc3988a9 | ||
|  | 2af55baa9a | ||
|  | 0452eec11d | ||
|  | c4f7db6c04 | ||
|  | 3569529a84 | ||
|  | 70942ac0f6 | ||
|  | dc02e39918 | ||
|  | 73d6bc35ec | ||
|  | b1d558d700 | ||
|  | 897480265f | ||
|  | 73724f5a33 | ||
|  | bdbd495a9e | ||
|  | 1fcf009804 | ||
|  | 914c5752a5 | ||
|  | 201b12a886 | ||
|  | c5f23ad93d | ||
|  | 28d62009a7 | ||
|  | 1a5a436f82 | ||
|  | 1275ac0569 | ||
|  | 5112fb777e | ||
|  | f571a944c9 | ||
|  | bc9aff8c60 | ||
|  | c4c7ab7888 | ||
|  | d9819a990c | ||
|  | aea400e26a | ||
|  | eb4e7735c1 | ||
|  | 4b498ae8cd | ||
|  | 158e2a4ca9 | ||
|  | b011d48d82 | ||
|  | 8ac3e725f8 | ||
|  | 9a4aef0358 | ||
|  | 7d3146234a | ||
|  | 5d2ca6493d | ||
|  | 4752f9aa37 | ||
|  | 025d3a03d6 | ||
|  | aec06183e7 | ||
|  | aa28abd517 | ||
|  | 7430b31697 | ||
|  | 759f72169a | ||
|  | 1f7135be61 | ||
|  | 6942f9c1cf | ||
|  | d9da75d1c0 | ||
|  | 7ab7372be4 | ||
|  | 3503c98857 | ||
|  | 708c3f1e2a | ||
|  | 6f645e8619 | ||
|  | bce7ca7ac4 | ||
|  | 350465c25d | ||
|  | 5b9c70ae22 | ||
|  | 9b30afeca9 | ||
|  | c1b202c119 | ||
|  | 41cfe5d2ca | ||
|  | 05339e184f | ||
|  | 447127d956 | ||
|  | 394334fbea | ||
|  | 9f8cd33d43 | ||
|  | f066e28c35 | ||
|  | b349a449bb | ||
|  | 1c5898d396 | ||
|  | 6802967863 | ||
|  | 0462f18680 | ||
|  | af6699098f | ||
|  | 6b7e7dc124 | ||
|  | 6bae4c6a66 | ||
|  | 46da918dbe | ||
|  | bb7e5f17b5 | ||
|  | b9d03114c2 | ||
|  | 436b1ce176 | ||
|  | 50fb5d83f1 | ||
|  | fda672f806 | ||
|  | 2bf783b04d | ||
|  | 2f72b23a0d | ||
|  | 85336f9777 | ||
|  | 174d964553 | ||
|  | cf8677248e | ||
|  | 1e6a3163af | ||
|  | e008919978 | ||
|  | 4814066c67 | ||
|  | f17f8b48c2 | ||
|  | ab0aec0ac5 | ||
|  | b49a641ba5 | ||
|  | 2f50051426 | ||
|  | 43cc32db40 | ||
|  | b4d6f6b947 | ||
|  | 71ff533623 | ||
|  | e33a5bbef5 | ||
|  | 6c0112c2be | ||
|  | 15bbf26b93 | ||
|  | 87c97efce0 | ||
|  | 6c4aee1479 | ||
|  | 73549a9044 | ||
|  | 30fdd3e184 | ||
|  | c97eb5d63f | ||
|  | 5729c7d5e7 | ||
|  | d77b13efcb | ||
|  | c43faca7b9 | ||
|  | 892ddd5724 | ||
|  | a9de779f33 | ||
|  | 1c2f016ba0 | ||
|  | 7b4d9140af | ||
|  | c1fc87ff4e | ||
|  | cd5ea5d4e0 | ||
|  | 30c01089f5 | ||
|  | 89825a2b21 | ||
|  | a743b75bb4 | ||
|  | f7ebf8dedd | ||
|  | f6220cab3b | ||
|  | 0c5e1c4138 | ||
|  | 03fe431f1a | ||
|  | a8e4554fec | ||
|  | e81b09b9aa | ||
|  | c6e846e0ae | ||
|  | 03dcfb5c4b | ||
|  | 3e54da03e2 | ||
|  | c4b3196917 | ||
|  | 0d81e7933e | ||
|  | b2a2735034 | ||
|  | f865c5de90 | ||
|  | 4159369e8b | ||
|  | 170693cf0b | ||
|  | 4e7b5d4af8 | ||
|  | 67bf789fcf | ||
|  | f5cf616c2f | ||
|  | 7975f19817 | ||
|  | 017602056d | ||
|  | c63f43854b | ||
|  | 5cc71ec2ad | ||
|  | 80e81f8475 | ||
|  | 3685c8e015 | ||
|  | 99e943c365 | ||
|  | 21818e71f5 | ||
|  | bcc6d25e21 | ||
|  | 7b885ee0d3 | ||
|  | c10e808a4f | ||
|  | 54e9be0ed8 | ||
|  | 938cdf316a | ||
|  | 27c33911e6 | ||
|  | e88f8759e7 | ||
|  | f2992e3165 | ||
|  | c71fd1ee3b | ||
|  | fb45b19fdc | ||
|  | c4ea8d4942 | ||
|  | 646aa131ef | ||
|  | 0adb40bf92 | ||
|  | 17d6014bf1 | ||
|  | ff57cd4eaf | ||
|  | 74bd7c3744 | ||
|  | cfbb283f85 | ||
|  | 74a3c4451b | ||
|  | be3643c962 | ||
|  | f4aa546af8 | ||
|  | 67b876a7f4 | ||
|  | 94e177c0ef | ||
|  | 1bd83cc9bc | ||
|  | ecda3f4a7d | ||
|  | 8f972a965d | ||
|  | 0f051fc57c | ||
|  | c3f8925f46 | ||
|  | 5d0cab2052 | ||
|  | 4d7492f682 | ||
|  | fc9d99080f | ||
|  | 47ebac0276 | ||
|  | cb3fca03e9 | ||
|  | abbbd83729 | ||
|  | 1743ab7812 | ||
|  | 324e3972a6 | ||
|  | 1502dda2ab | ||
|  | f31b2c4a79 | ||
|  | 89b9b60e0c | ||
|  | de9ba12779 | ||
|  | 9cc4359c04 | ||
|  | 67eaf120b9 | ||
|  | b8353c4a33 | ||
|  | 7013033ae4 | ||
|  | cb8cd03852 | ||
|  | f63fb62014 | ||
|  | 2e4fb86b86 | ||
|  | 5e776a07dd | ||
|  | 81e637e50e | ||
|  | 0971ad0a80 | ||
|  | 8267ded7ec | ||
|  | 7f36ea55f5 | ||
|  | 72a051f2d3 | ||
|  | 51b197888c | ||
|  | cd63865d31 | ||
|  | 5be5685a09 | ||
|  | 76b2f25d46 | ||
|  | 58607d4a7f | ||
|  | c0a5b16a7f | ||
|  | 3a0c69005b | ||
|  | 5c295fb9e3 | ||
|  | 4ee212e7d5 | ||
|  | 70651ce994 | ||
|  | a778a91106 | ||
|  | cfc31eead3 | ||
|  | da0a1bbe9f | ||
|  | bc66fb33e9 | ||
|  | b1b6493755 | ||
|  | 1d189f239b | ||
|  | 5b90691bcc | ||
|  | d1d5972277 | ||
|  | 2c07d77368 | ||
|  | 642cfbf59a | ||
|  | bb1367cfb9 | ||
|  | 11724aa555 | ||
|  | 4d374712de | ||
|  | eb9003187d | ||
|  | caba444962 | ||
|  | 5b6c8c191f | ||
|  | dd51589f67 | ||
|  | b02a31d4b9 | ||
|  | 0e7878b406 | ||
|  | cae91ce0c5 | ||
|  | 67a65a2aa9 | ||
|  | 364b0a7163 | ||
|  | d6419f2059 | ||
|  | 6f7ad7ef91 | ||
|  | 5ae588833b | ||
|  | a70dbac0e6 | ||
|  | 4d34a02afe | ||
|  | 4db4f45897 | ||
|  | 2d5280fc95 | ||
|  | b8d568761e | ||
|  | 29309dac9a | ||
|  | 7f7745071a | ||
|  | 1914032e35 | ||
|  | f44c8f1205 | ||
|  | fe2ef4e61c | ||
|  | fc3eda55c7 | ||
|  | 8adf1cdd02 | ||
|  | adbbc656d4 | ||
|  | 8e852bce02 | ||
|  | bb461b009f | ||
|  | 03559a3cc4 | ||
|  | 7bb2fe128a | ||
|  | 2312e17a8e | ||
|  | 9835b382da | ||
|  | 1eacc6fbff | ||
|  | 85187239b6 | ||
|  | 819ff2a902 | ||
|  | c744104a18 | ||
|  | c87801f0a9 | ||
|  | 39735594bd | ||
|  | 30964f65e4 | ||
|  | ee0c7fd8bf | ||
|  | dfdecef8e7 | ||
|  | edcdfeb057 | ||
|  | 47f0de9836 | ||
|  | 9ba657797e | ||
|  | 07442a6f84 | ||
|  | 3faf3c84be | ||
|  | abcacc82f3 | ||
|  | 9544b7d968 | ||
|  | babbc8bcd6 | ||
|  | 12809ebc74 | ||
|  | b45a601ad2 | ||
|  | f099dc6a37 | ||
|  | 803caddbd4 | ||
|  | 4d7b988018 | ||
|  | c1f88a4e14 | ||
|  | 5d9ec0b208 | ||
|  | 1877cacf9c | ||
|  | 2f4978cfea | ||
|  | d27a1103fa | ||
|  | b85bb95082 | ||
|  | db7f93cff3 | ||
|  | 85e271098f | ||
|  | 17001e2f74 | ||
|  | c82f4f0d45 | ||
|  | 88247a3af9 | ||
|  | 158578a406 | ||
|  | 19314e7e06 | ||
|  | 8bcbc6d545 | ||
|  | ef55e6d476 | ||
|  | 295ef3dc1d | ||
|  | 9d125c9e79 | ||
|  | 86363986fc | ||
|  | 0a2dbbc58b | ||
|  | 673a966541 | ||
|  | db1e69813b | ||
|  | e60d56f060 | ||
|  | 328e062ae9 | ||
|  | 0523c2ea4b | ||
|  | c5c7378c63 | ||
|  | 9b2080d036 | ||
|  | d4b3649640 | ||
|  | b085993901 | ||
|  | 0d4afad342 | ||
|  | 0da694b845 | ||
|  | 6d5e7d9e81 | ||
|  | bc08bea284 | ||
|  | 0e5a0661e1 | ||
|  | a839bd428f | ||
|  | 0277062693 | ||
|  | 7affa5ab69 | ||
|  | ed22af4e73 | ||
|  | 63ebb6998e | ||
|  | 7914cd47ca | ||
|  | 708dbac70e | ||
|  | 1b62dd5c40 | ||
|  | 4911545843 | ||
|  | c5cc4b7867 | ||
|  | eacb614750 | ||
|  | 341e1e7a6d | ||
|  | a02c820c2d | ||
|  | 2f6890c78a | ||
|  | 516591fe88 | ||
|  | d2941a9110 | ||
|  | f7302f710b | ||
|  | 6a02ac7e80 | ||
|  | d1b86fdef5 | ||
|  | 57ac38ddca | ||
|  | 7a73a92074 | ||
|  | d1b30f4792 | ||
|  | 16dcf78cab | ||
|  | d868cfdeb0 | ||
|  | c074f4d925 | ||
|  | 453024c58d | ||
|  | fe8340617a | ||
|  | b024dd913d | ||
|  | a2a698ab0e | ||
|  | bb56f92213 | ||
|  | 8dcd998945 | ||
|  | bcbbbe4046 | ||
|  | 7200a8cb84 | ||
|  | 6925344807 | ||
|  | 60ceeb0ddd | ||
|  | 06caabf333 | ||
|  | 954131bd51 | ||
|  | 855efe7fe8 | ||
|  | d902a74ab0 | ||
|  | 499e11f730 | ||
|  | 6db59a9c31 | ||
|  | 6465726008 | ||
|  | 3a3b96e0be | ||
|  | 992c91dc0c | ||
|  | 809473c15c | ||
|  | d79a5ec3d6 | ||
|  | 237469ceaf | ||
|  | c28d9135d9 | ||
|  | 48a5679087 | ||
|  | 7c938712f2 | ||
|  | 4df12bebc2 | ||
|  | dfe8987aaa | ||
|  | 02dbe401d8 | ||
|  | c18f8c92e7 | ||
|  | 857cd718df | ||
|  | 11d4f6499a | ||
|  | f2c25b4744 | ||
|  | 27b846717f | ||
|  | 9ed138f896 | ||
|  | 1978dc80eb | ||
|  | fc4b247f4f | ||
|  | ebf7056f4a | ||
|  | eb975d7e13 | ||
|  | a2dd8cb6b9 | ||
|  | 7c254c6136 | ||
|  | c8a33b83f1 | ||
|  | 1145c72b01 | ||
|  | 7fc45fb711 | ||
|  | e146262c38 | ||
|  | 6f808bd06e | ||
|  | 0b6ab49325 | ||
|  | 66d9182e50 | ||
|  | 654cca82a9 | ||
|  | 89785da1c5 | ||
|  | 2f9964e46e | ||
|  | 168ecd67b0 | ||
|  | c9dc441915 | ||
|  | a7ca9950fc | ||
|  | e0dd33e6be | ||
|  | 2e718e1130 | 
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -15,3 +15,6 @@ env/ | ||||
| .pydevproject | ||||
| tests/test_bugfix.py | ||||
| htmlcov/ | ||||
| venv | ||||
| venv3 | ||||
| scratchpad | ||||
|   | ||||
							
								
								
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 | ||||
|  | ||||
| if [ "$MONGODB" = "2.4" ]; then | ||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-10gen=2.4.14 | ||||
|     sudo service mongodb start | ||||
| elif [ "$MONGODB" = "2.6" ]; then | ||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-org-server=2.6.12 | ||||
|     # service should be started automatically | ||||
| elif [ "$MONGODB" = "3.0" ]; then | ||||
|     echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-org-server=3.0.14 | ||||
|     # service should be started automatically | ||||
| else | ||||
|     echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." | ||||
|     exit 1 | ||||
| fi; | ||||
							
								
								
									
										22
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| pylint: | ||||
|     disable: | ||||
|         # We use this a lot (e.g. via document._meta) | ||||
|         - protected-access | ||||
|  | ||||
|     options: | ||||
|         additional-builtins: | ||||
|             # add xrange and long as valid built-ins. In Python 3, xrange is | ||||
|             # translated into range and long is translated into int via 2to3 (see | ||||
|             # "use_2to3" in setup.py). This should be removed when we drop Python | ||||
|             # 2 support (which probably won't happen any time soon). | ||||
|             - xrange | ||||
|             - long | ||||
|  | ||||
| pyflakes: | ||||
|     disable: | ||||
|         # undefined variables are already covered by pylint (and exclude | ||||
|         # xrange & long) | ||||
|         - F821 | ||||
|  | ||||
| ignore-paths: | ||||
|     - benchmark.py | ||||
							
								
								
									
										112
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										112
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,29 +1,101 @@ | ||||
| # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||
| # For full coverage, we'd have to test all supported Python, MongoDB, and | ||||
| # PyMongo combinations. However, that would result in an overly long build | ||||
| # with a very large number of jobs, hence we only test a subset of all the | ||||
| # combinations: | ||||
| # * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5. | ||||
| # * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x. | ||||
| # * MongoDB v3.0 is tested against PyMongo v3.x. | ||||
| # * MongoDB v2.6 is currently the "main" version tested against Python v2.7, | ||||
| #   v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x. | ||||
| # | ||||
| # Reminder: Update README.rst if you change MongoDB versions we test. | ||||
|  | ||||
| language: python | ||||
| services: mongodb | ||||
|  | ||||
| python: | ||||
|     - "2.6" | ||||
|     - "2.7" | ||||
|     - "3.2" | ||||
|     - "3.3" | ||||
| - 2.7 | ||||
| - 3.5 | ||||
| - pypy | ||||
| - pypy3 | ||||
|  | ||||
| env: | ||||
|   - PYMONGO=dev DJANGO=1.5.1 | ||||
|   - PYMONGO=dev DJANGO=1.4.2 | ||||
|   - PYMONGO=2.5 DJANGO=1.5.1 | ||||
|   - PYMONGO=2.5 DJANGO=1.4.2 | ||||
|   - PYMONGO=3.2 DJANGO=1.5.1 | ||||
|   - PYMONGO=3.3 DJANGO=1.5.1 | ||||
| - MONGODB=2.6 PYMONGO=2.7 | ||||
| - MONGODB=2.6 PYMONGO=2.8 | ||||
| - MONGODB=2.6 PYMONGO=3.0 | ||||
|  | ||||
| matrix: | ||||
|   # Finish the build as soon as one job fails | ||||
|   fast_finish: true | ||||
|  | ||||
|   include: | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=2.4 PYMONGO=2.7 | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=2.4 PYMONGO=3.0 | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=3.0 PYMONGO=3.0 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=2.4 PYMONGO=2.7 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=2.4 PYMONGO=3.0 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=3.0 PYMONGO=3.0 | ||||
|  | ||||
| before_install: | ||||
| - bash .install_mongodb_on_travis.sh | ||||
|  | ||||
| install: | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi | ||||
|     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||
|     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||
|     - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b | ||||
|     - python setup.py install | ||||
| - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev | ||||
|   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev | ||||
|   python-tk | ||||
| - travis_retry pip install --upgrade pip | ||||
| - travis_retry pip install coveralls | ||||
| - travis_retry pip install flake8 flake8-import-order | ||||
| - travis_retry pip install tox>=1.9 | ||||
| - travis_retry pip install "virtualenv<14.0.0"  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) | ||||
| - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test | ||||
|  | ||||
| # Cache dependencies installed via pip | ||||
| cache: pip | ||||
|  | ||||
| # Run flake8 for py27 | ||||
| before_script: | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi | ||||
|  | ||||
| script: | ||||
|     - python setup.py test | ||||
| - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | ||||
|  | ||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows | ||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | ||||
| # code in a separate dir and runs tests on that. | ||||
| after_success: | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi | ||||
|  | ||||
| notifications: | ||||
|   irc: "irc.freenode.org#mongoengine" | ||||
|   irc: irc.freenode.org#mongoengine | ||||
|  | ||||
| # Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z) | ||||
| branches: | ||||
|   only: | ||||
|   - master | ||||
|   - /^v.*$/ | ||||
|  | ||||
| # Whenever a new release is created via GitHub, publish it on PyPI. | ||||
| deploy: | ||||
|   provider: pypi | ||||
|   user: the_drow | ||||
|   password: | ||||
|     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||
|  | ||||
|   # create a source distribution and a pure python wheel for faster installs | ||||
|   distributions: "sdist bdist_wheel" | ||||
|  | ||||
|   # only deploy on tagged commits (aka GitHub releases) and only for the | ||||
|   # parent repo's builds running Python 2.7 along with dev PyMongo (we run | ||||
|   # Travis against many different Python and PyMongo versions and we don't | ||||
|   # want the deploy to occur multiple times). | ||||
|   on: | ||||
|     tags: true | ||||
|     repo: MongoEngine/mongoengine | ||||
|     condition: "$PYMONGO = 3.0" | ||||
|     python: 2.7 | ||||
|   | ||||
							
								
								
									
										76
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										76
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron | ||||
|  | ||||
| CONTRIBUTORS | ||||
|  | ||||
| Dervived from the git logs, inevitably incomplete but all of whom and others | ||||
| Derived from the git logs, inevitably incomplete but all of whom and others | ||||
| have submitted patches, reported bugs and generally helped make MongoEngine | ||||
| that much better: | ||||
|  | ||||
| @@ -119,7 +119,7 @@ that much better: | ||||
|  * Anton Kolechkin | ||||
|  * Sergey Nikitin | ||||
|  * psychogenic | ||||
|  * Stefan Wójcik | ||||
|  * Stefan Wójcik (https://github.com/wojcikstefan) | ||||
|  * dimonb | ||||
|  * Garry Polley | ||||
|  * James Slagle | ||||
| @@ -134,15 +134,14 @@ that much better: | ||||
|  * Paul Swartz | ||||
|  * Sundar Raman | ||||
|  * Benoit Louy | ||||
|  * lraucy | ||||
|  * Loic Raucy (https://github.com/lraucy) | ||||
|  * hellysmile | ||||
|  * Jaepil Jeong | ||||
|  * Daniil Sharou | ||||
|  * Stefan Wójcik | ||||
|  * Pete Campton | ||||
|  * Martyn Smith | ||||
|  * Marcelo Anton | ||||
|  * Aleksey Porfirov | ||||
|  * Aleksey Porfirov (https://github.com/lexqt) | ||||
|  * Nicolas Trippar | ||||
|  * Manuel Hermann | ||||
|  * Gustavo Gawryszewski | ||||
| @@ -171,7 +170,7 @@ that much better: | ||||
|  * Michael Bartnett (https://github.com/michaelbartnett) | ||||
|  * Alon Horev (https://github.com/alonho) | ||||
|  * Kelvin Hammond (https://github.com/kelvinhammond) | ||||
|  * Jatin- (https://github.com/jatin-) | ||||
|  * Jatin Chopra (https://github.com/jatin) | ||||
|  * Paul Uithol (https://github.com/PaulUithol) | ||||
|  * Thom Knowles (https://github.com/fleat) | ||||
|  * Paul (https://github.com/squamous) | ||||
| @@ -179,3 +178,68 @@ that much better: | ||||
|  * crazyzubr (https://github.com/crazyzubr) | ||||
|  * FrankSomething (https://github.com/FrankSomething) | ||||
|  * Alexandr Morozov (https://github.com/LK4D4) | ||||
|  * mishudark (https://github.com/mishudark) | ||||
|  * Joe Friedl (https://github.com/grampajoe) | ||||
|  * Daniel Ward (https://github.com/danielward) | ||||
|  * Aniket Deshpande (https://github.com/anicake) | ||||
|  * rfkrocktk (https://github.com/rfkrocktk) | ||||
|  * Gustavo Andrés Angulo (https://github.com/woakas) | ||||
|  * Dmytro Popovych (https://github.com/drudim) | ||||
|  * Tom (https://github.com/tomprimozic) | ||||
|  * j0hnsmith (https://github.com/j0hnsmith) | ||||
|  * Damien Churchill (https://github.com/damoxc) | ||||
|  * Jonathan Simon Prates (https://github.com/jonathansp) | ||||
|  * Thiago Papageorgiou (https://github.com/tmpapageorgiou) | ||||
|  * Omer Katz (https://github.com/thedrow) | ||||
|  * Falcon Dai (https://github.com/falcondai) | ||||
|  * Polyrabbit (https://github.com/polyrabbit) | ||||
|  * Sagiv Malihi (https://github.com/sagivmalihi) | ||||
|  * Dmitry Konishchev (https://github.com/KonishchevDmitry) | ||||
|  * Martyn Smith (https://github.com/martynsmith) | ||||
|  * Andrei Zbikowski (https://github.com/b1naryth1ef) | ||||
|  * Ronald van Rij (https://github.com/ronaldvanrij) | ||||
|  * François Schmidts (https://github.com/jaesivsm) | ||||
|  * Eric Plumb (https://github.com/professorplumb) | ||||
|  * Damien Churchill (https://github.com/damoxc) | ||||
|  * Aleksandr Sorokoumov (https://github.com/Gerrrr) | ||||
|  * Clay McClure (https://github.com/claymation) | ||||
|  * Bruno Rocha (https://github.com/rochacbruno) | ||||
|  * Norberto Leite (https://github.com/nleite) | ||||
|  * Bob Cribbs (https://github.com/bocribbz) | ||||
|  * Jay Shirley (https://github.com/jshirley) | ||||
|  * David Bordeynik (https://github.com/DavidBord) | ||||
|  * Axel Haustant (https://github.com/noirbizarre) | ||||
|  * David Czarnecki (https://github.com/czarneckid) | ||||
|  * Vyacheslav Murashkin (https://github.com/a4tunado) | ||||
|  * André Ericson https://github.com/aericson) | ||||
|  * Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky) | ||||
|  * Diego Berrocal (https://github.com/cestdiego) | ||||
|  * Matthew Ellison (https://github.com/seglberg) | ||||
|  * Jimmy Shen (https://github.com/jimmyshen) | ||||
|  * J. Fernando Sánchez (https://github.com/balkian) | ||||
|  * Michael Chase (https://github.com/rxsegrxup) | ||||
|  * Eremeev Danil (https://github.com/elephanter) | ||||
|  * Catstyle Lee (https://github.com/Catstyle) | ||||
|  * Kiryl Yermakou (https://github.com/rma4ok) | ||||
|  * Matthieu Rigal (https://github.com/MRigal) | ||||
|  * Charanpal Dhanjal (https://github.com/charanpald) | ||||
|  * Emmanuel Leblond (https://github.com/touilleMan) | ||||
|  * Breeze.Kay (https://github.com/9nix00) | ||||
|  * Vicki Donchenko (https://github.com/kivistein) | ||||
|  * Emile Caron (https://github.com/emilecaron) | ||||
|  * Amit Lichtenberg (https://github.com/amitlicht) | ||||
|  * Gang Li (https://github.com/iici-gli) | ||||
|  * Lars Butler (https://github.com/larsbutler) | ||||
|  * George Macon (https://github.com/gmacon) | ||||
|  * Ashley Whetter (https://github.com/AWhetter) | ||||
|  * Paul-Armand Verhaegen (https://github.com/paularmand) | ||||
|  * Steven Rossiter (https://github.com/BeardedSteve) | ||||
|  * Luo Peng (https://github.com/RussellLuo) | ||||
|  * Bryan Bennett (https://github.com/bbenne10) | ||||
|  * Gilb's Gilb's (https://github.com/gilbsgilbs) | ||||
|  * Joshua Nedrud (https://github.com/Neurostack) | ||||
|  * Shu Shen (https://github.com/shushen) | ||||
|  * xiaost7 (https://github.com/xiaost7) | ||||
|  * Victor Varvaryuk | ||||
|  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||
|  * Dmitry Yantsen (https://github.com/mrTable) | ||||
|   | ||||
| @@ -20,7 +20,7 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||
| Supported Interpreters | ||||
| ---------------------- | ||||
|  | ||||
| MongoEngine supports CPython 2.6 and newer. Language | ||||
| MongoEngine supports CPython 2.7 and newer. Language | ||||
| features not supported by all interpreters can not be used. | ||||
| Please also ensure that your code is properly converted by | ||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||
| @@ -29,23 +29,39 @@ Style Guide | ||||
| ----------- | ||||
|  | ||||
| MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | ||||
| including 4 space indents and 79 character line limits. | ||||
| including 4 space indents. When possible we try to stick to 79 character line | ||||
| limits. However, screens got bigger and an ORM has a strong focus on | ||||
| readability and if it can help, we accept 119 as maximum line length, in a | ||||
| similar way as `django does | ||||
| <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_ | ||||
|  | ||||
| Testing | ||||
| ------- | ||||
|  | ||||
| All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | ||||
| and any pull requests are automatically tested by Travis. Any pull requests | ||||
| without tests will take longer to be integrated and might be refused. | ||||
| and any pull requests are automatically tested. Any pull requests without | ||||
| tests will take longer to be integrated and might be refused. | ||||
|  | ||||
| You may also submit a simple failing test as a pull request if you don't know | ||||
| how to fix it, it will be easier for other people to work on it and it may get | ||||
| fixed faster. | ||||
|  | ||||
| General Guidelines | ||||
| ------------------ | ||||
|  | ||||
| - Avoid backward breaking changes if at all possible. | ||||
| - If you *have* to introduce a breaking change, make it very clear in your | ||||
|   pull request's description. Also, describe how users of this package | ||||
|   should adapt to the breaking change in docs/upgrade.rst. | ||||
| - Write inline documentation for new classes and methods. | ||||
| - Write tests and make sure they pass (make sure you have a mongod | ||||
|   running on the default port, then execute ``python setup.py test`` | ||||
|   running on the default port, then execute ``python setup.py nosetests`` | ||||
|   from the cmd line to run the test suite). | ||||
| - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | ||||
|   You can test various Python and PyMongo versions locally by executing | ||||
|   ``tox``. For different MongoDB versions, you can rely on our automated | ||||
|   Travis tests. | ||||
| - Add enhancements or problematic bug fixes to docs/changelog.rst. | ||||
| - Add yourself to AUTHORS :) | ||||
|  | ||||
| Documentation | ||||
| @@ -59,3 +75,8 @@ just make your changes to the inline documentation of the appropriate | ||||
| branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | ||||
| You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | ||||
| button. | ||||
|  | ||||
| If you want to test your documentation changes locally, you need to install | ||||
| the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed, | ||||
| go to the ``docs`` directory, run ``make html`` and inspect the updated docs | ||||
| by running ``open _build/html/index.html``. | ||||
|   | ||||
							
								
								
									
										99
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										99
									
								
								README.rst
									
									
									
									
									
								
							| @@ -4,39 +4,72 @@ MongoEngine | ||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||
| :Repository: https://github.com/MongoEngine/mongoengine | ||||
| :Author: Harry Marr (http://github.com/hmarr) | ||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | ||||
| :Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) | ||||
|  | ||||
| .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master | ||||
|   :target: http://travis-ci.org/MongoEngine/mongoengine | ||||
| .. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master | ||||
|   :target: https://travis-ci.org/MongoEngine/mongoengine | ||||
|  | ||||
| .. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master | ||||
|   :target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master | ||||
|  | ||||
| .. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat | ||||
|   :target: https://landscape.io/github/MongoEngine/mongoengine/master | ||||
|   :alt: Code Health | ||||
|  | ||||
| About | ||||
| ===== | ||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||
| Documentation available at http://mongoengine-odm.rtfd.org - there is currently | ||||
| a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | ||||
| <https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference | ||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | ||||
| Documentation is available at https://mongoengine-odm.readthedocs.io - there | ||||
| is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, | ||||
| a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and | ||||
| an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | ||||
|  | ||||
| Supported MongoDB Versions | ||||
| ========================== | ||||
| MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future | ||||
| versions should be supported as well, but aren't actively tested at the moment. | ||||
| Make sure to open an issue or submit a pull request if you experience any | ||||
| problems with MongoDB v3.2+. | ||||
|  | ||||
| Installation | ||||
| ============ | ||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||
| you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||
| We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||
| `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | ||||
| You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||
| and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | ||||
| setup.py install``. | ||||
|  | ||||
| Dependencies | ||||
| ============ | ||||
| - pymongo 2.5+ | ||||
| - sphinx (optional - for documentation generation) | ||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | ||||
| At the very least, you'll need these two packages to use MongoEngine: | ||||
|  | ||||
| - pymongo>=2.7.1 | ||||
| - six>=1.10.0 | ||||
|  | ||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||
|  | ||||
| - dateutil>=2.1.0 | ||||
|  | ||||
| If you need to use an ``ImageField`` or ``ImageGridFsProxy``: | ||||
|  | ||||
| - Pillow>=2.0.0 | ||||
|  | ||||
| Examples | ||||
| ======== | ||||
| Some simple examples of what MongoEngine code looks like:: | ||||
| Some simple examples of what MongoEngine code looks like: | ||||
|  | ||||
| .. code :: python | ||||
|  | ||||
|     from mongoengine import * | ||||
|     connect('mydb') | ||||
|  | ||||
|     class BlogPost(Document): | ||||
|         title = StringField(required=True, max_length=200) | ||||
|         posted = DateTimeField(default=datetime.datetime.now) | ||||
|         posted = DateTimeField(default=datetime.datetime.utcnow) | ||||
|         tags = ListField(StringField(max_length=50)) | ||||
|         meta = {'allow_inheritance': True} | ||||
|  | ||||
|     class TextPost(BlogPost): | ||||
|         content = StringField(required=True) | ||||
| @@ -64,23 +97,46 @@ Some simple examples of what MongoEngine code looks like:: | ||||
|     ...     print | ||||
|     ... | ||||
|  | ||||
|     >>> len(BlogPost.objects) | ||||
|     # Count all blog posts and its subtypes | ||||
|     >>> BlogPost.objects.count() | ||||
|     2 | ||||
|     >>> len(HtmlPost.objects) | ||||
|     >>> TextPost.objects.count() | ||||
|     1 | ||||
|     >>> len(LinkPost.objects) | ||||
|     >>> LinkPost.objects.count() | ||||
|     1 | ||||
|  | ||||
|     # Find tagged posts | ||||
|     >>> len(BlogPost.objects(tags='mongoengine')) | ||||
|     # Count tagged posts | ||||
|     >>> BlogPost.objects(tags='mongoengine').count() | ||||
|     2 | ||||
|     >>> len(BlogPost.objects(tags='mongodb')) | ||||
|     >>> BlogPost.objects(tags='mongodb').count() | ||||
|     1 | ||||
|  | ||||
| Tests | ||||
| ===== | ||||
| To run the test suite, ensure you are running a local instance of MongoDB on | ||||
| the standard port, and run: ``python setup.py test``. | ||||
| the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. | ||||
|  | ||||
| To run the test suite on every supported Python and PyMongo version, you can | ||||
| use ``tox``. You'll need to make sure you have each supported Python version | ||||
| installed in your environment and then: | ||||
|  | ||||
| .. code-block:: shell | ||||
|  | ||||
|     # Install tox | ||||
|     $ pip install tox | ||||
|     # Run the test suites | ||||
|     $ tox | ||||
|  | ||||
| If you wish to run a subset of tests, use the nosetests convention: | ||||
|  | ||||
| .. code-block:: shell | ||||
|  | ||||
|     # Run all the tests in a particular test file | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py | ||||
|     # Run only particular test class in that file | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest | ||||
|     # Use the -s option if you want to print some debug statements or use pdb | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s | ||||
|  | ||||
| Community | ||||
| ========= | ||||
| @@ -88,8 +144,7 @@ Community | ||||
|   <http://groups.google.com/group/mongoengine-users>`_ | ||||
| - `MongoEngine Developers mailing list | ||||
|   <http://groups.google.com/group/mongoengine-dev>`_ | ||||
| - `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_ | ||||
|  | ||||
| Contributing | ||||
| ============ | ||||
| We welcome contributions! see  the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||
| We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||
|   | ||||
							
								
								
									
										249
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										249
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,118 +1,42 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| """ | ||||
| Simple benchmark comparing PyMongo and MongoEngine. | ||||
|  | ||||
| Sample run on a mid 2015 MacBook Pro (commit b282511): | ||||
|  | ||||
| Benchmarking... | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo | ||||
| 2.58979988098 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
| 1.26657605171 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine | ||||
| 8.4351580143 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries without continual assign - MongoEngine | ||||
| 7.20191693306 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True | ||||
| 6.31104588509 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
| 6.07083487511 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
| 5.97704291344 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
| 5.9111430645 | ||||
| """ | ||||
|  | ||||
| import timeit | ||||
|  | ||||
|  | ||||
| def cprofile_main(): | ||||
|     from pymongo import Connection | ||||
|     connection = Connection() | ||||
|     connection.drop_database('timeit_test') | ||||
|     connection.disconnect() | ||||
|  | ||||
|     from mongoengine import Document, DictField, connect | ||||
|     connect("timeit_test") | ||||
|  | ||||
|     class Noddy(Document): | ||||
|         fields = DictField() | ||||
|  | ||||
|     for i in xrange(1): | ||||
|         noddy = Noddy() | ||||
|         for j in range(20): | ||||
|             noddy.fields["key" + str(j)] = "value " + str(j) | ||||
|         noddy.save() | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     """ | ||||
|     0.4 Performance Figures ... | ||||
|  | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.86744189262 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     6.23374891281 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     5.33027005196 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     pass - No Cascade | ||||
|  | ||||
|     0.5.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.89597702026 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     21.7735359669 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     19.8670389652 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     pass - No Cascade | ||||
|  | ||||
|     0.6.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.81559205055 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     10.0446798801 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     9.51354718208 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     9.02567505836 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.44933390617 | ||||
|  | ||||
|     0.7.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.78801012039 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     9.73050498962 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     8.33456707001 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     8.37778115273 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.36906409264 | ||||
|     0.8.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.69964408875 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
|     3.5526599884 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     7.00959801674 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries without continual assign - MongoEngine | ||||
|     5.60943293571 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True | ||||
|     6.715102911 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
|     5.50644683838 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
|     4.69851183891 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
|     4.68946313858 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     """ | ||||
|     print("Benchmarking...") | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| @@ -127,7 +51,31 @@ connection = MongoClient() | ||||
| db = connection.timeit_test | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']['key' + str(j)] = 'value ' + str(j) | ||||
|  | ||||
|     noddy.save(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - Pymongo""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| from pymongo.write_concern import WriteConcern | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
| @@ -138,49 +86,26 @@ myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - Pymongo""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.timeit_test | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in xrange(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.save(example, write_concern={"w": 0}) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| connection.disconnect() | ||||
| connection.close() | ||||
|  | ||||
| from mongoengine import Document, DictField, connect | ||||
| connect("timeit_test") | ||||
| connect('timeit_test') | ||||
|  | ||||
| class Noddy(Document): | ||||
|     fields = DictField() | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
| @@ -190,13 +115,13 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     fields = {} | ||||
|     for j in range(20): | ||||
| @@ -208,13 +133,13 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries without continual assign - MongoEngine""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
| @@ -224,13 +149,13 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
| @@ -240,13 +165,13 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
| @@ -256,13 +181,13 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
| @@ -272,10 +197,10 @@ myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""" | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|   | ||||
							
								
								
									
										233
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										233
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							| @@ -1,233 +0,0 @@ | ||||
| /** | ||||
|  * Sphinx stylesheet -- default theme | ||||
|  * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||||
|  */ | ||||
|  | ||||
| @import url("basic.css"); | ||||
|  | ||||
| #changelog p.first {margin-bottom: 0 !important;} | ||||
| #changelog p {margin-top: 0 !important; | ||||
|               margin-bottom: 0 !important;} | ||||
|  | ||||
| /* -- page layout ----------------------------------------------------------- */ | ||||
|  | ||||
| body { | ||||
|     font-family: Arial, sans-serif; | ||||
|     font-size: 100%; | ||||
|     background-color: #111; | ||||
|     color: #555; | ||||
|     margin: 0; | ||||
|     padding: 0; | ||||
| } | ||||
|  | ||||
| div.documentwrapper { | ||||
|     float: left; | ||||
|     width: 100%; | ||||
| } | ||||
|  | ||||
| div.bodywrapper { | ||||
|     margin: 0 0 0 230px; | ||||
| } | ||||
|  | ||||
| hr{ | ||||
|     border: 1px solid #B1B4B6; | ||||
| } | ||||
|  | ||||
| div.document { | ||||
|     background-color: #eee; | ||||
| } | ||||
|  | ||||
| div.body { | ||||
|     background-color: #ffffff; | ||||
|     color: #3E4349; | ||||
|     padding: 0 30px 30px 30px; | ||||
|     font-size: 0.8em; | ||||
| } | ||||
|  | ||||
| div.footer { | ||||
|     color: #555; | ||||
|     width: 100%; | ||||
|     padding: 13px 0; | ||||
|     text-align: center; | ||||
|     font-size: 75%; | ||||
| } | ||||
|  | ||||
| div.footer a { | ||||
|     color: #444; | ||||
|     text-decoration: underline; | ||||
| } | ||||
|  | ||||
| div.related { | ||||
|     background-color: #6BA81E; | ||||
|     line-height: 32px; | ||||
|     color: #fff; | ||||
|     text-shadow: 0px 1px 0 #444; | ||||
|     font-size: 0.80em; | ||||
| } | ||||
|  | ||||
| div.related a { | ||||
|     color: #E2F3CC; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar { | ||||
|     font-size: 0.75em; | ||||
|     line-height: 1.5em; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebarwrapper{ | ||||
|     padding: 20px 0; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar h3, | ||||
| div.sphinxsidebar h4 { | ||||
|     font-family: Arial, sans-serif; | ||||
|     color: #222; | ||||
|     font-size: 1.2em; | ||||
|     font-weight: normal; | ||||
|     margin: 0; | ||||
|     padding: 5px 10px; | ||||
|     background-color: #ddd; | ||||
|     text-shadow: 1px 1px 0 white | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar h4{ | ||||
|     font-size: 1.1em; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar h3 a { | ||||
|     color: #444; | ||||
| } | ||||
|  | ||||
|  | ||||
| div.sphinxsidebar p { | ||||
|     color: #888; | ||||
|     padding: 5px 20px; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar p.topless { | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar ul { | ||||
|     margin: 10px 20px; | ||||
|     padding: 0; | ||||
|     color: #000; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar a { | ||||
|     color: #444; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar input { | ||||
|     border: 1px solid #ccc; | ||||
|     font-family: sans-serif; | ||||
|     font-size: 1em; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar input[type=text]{ | ||||
|     margin-left: 20px; | ||||
| } | ||||
|  | ||||
| /* -- body styles ----------------------------------------------------------- */ | ||||
|  | ||||
| a { | ||||
|     color: #005B81; | ||||
|     text-decoration: none; | ||||
| } | ||||
|  | ||||
| a:hover { | ||||
|     color: #E32E00; | ||||
|     text-decoration: underline; | ||||
| } | ||||
|  | ||||
| div.body h1, | ||||
| div.body h2, | ||||
| div.body h3, | ||||
| div.body h4, | ||||
| div.body h5, | ||||
| div.body h6 { | ||||
|     font-family: Arial, sans-serif; | ||||
|     background-color: #BED4EB; | ||||
|     font-weight: normal; | ||||
|     color: #212224; | ||||
|     margin: 30px 0px 10px 0px; | ||||
|     padding: 5px 0 5px 10px; | ||||
|     text-shadow: 0px 1px 0 white | ||||
| } | ||||
|  | ||||
| div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } | ||||
| div.body h2 { font-size: 150%; background-color: #C8D5E3; } | ||||
| div.body h3 { font-size: 120%; background-color: #D8DEE3; } | ||||
| div.body h4 { font-size: 110%; background-color: #D8DEE3; } | ||||
| div.body h5 { font-size: 100%; background-color: #D8DEE3; } | ||||
| div.body h6 { font-size: 100%; background-color: #D8DEE3; } | ||||
|  | ||||
| a.headerlink { | ||||
|     color: #c60f0f; | ||||
|     font-size: 0.8em; | ||||
|     padding: 0 4px 0 4px; | ||||
|     text-decoration: none; | ||||
| } | ||||
|  | ||||
| a.headerlink:hover { | ||||
|     background-color: #c60f0f; | ||||
|     color: white; | ||||
| } | ||||
|  | ||||
| div.body p, div.body dd, div.body li { | ||||
|     line-height: 1.5em; | ||||
| } | ||||
|  | ||||
| div.admonition p.admonition-title + p { | ||||
|     display: inline; | ||||
| } | ||||
|  | ||||
| div.highlight{ | ||||
|     background-color: white; | ||||
| } | ||||
|  | ||||
| div.note { | ||||
|     background-color: #eee; | ||||
|     border: 1px solid #ccc; | ||||
| } | ||||
|  | ||||
| div.seealso { | ||||
|     background-color: #ffc; | ||||
|     border: 1px solid #ff6; | ||||
| } | ||||
|  | ||||
| div.topic { | ||||
|     background-color: #eee; | ||||
| } | ||||
|  | ||||
| div.warning { | ||||
|     background-color: #ffe4e4; | ||||
|     border: 1px solid #f66; | ||||
| } | ||||
|  | ||||
| p.admonition-title { | ||||
|     display: inline; | ||||
| } | ||||
|  | ||||
| p.admonition-title:after { | ||||
|     content: ":"; | ||||
| } | ||||
|  | ||||
| pre { | ||||
|     padding: 10px; | ||||
|     background-color: White; | ||||
|     color: #222; | ||||
|     line-height: 1.2em; | ||||
|     border: 1px solid #C6C9CB; | ||||
|     font-size: 1.2em; | ||||
|     margin: 1.5em 0 1.5em 0; | ||||
|     -webkit-box-shadow: 1px 1px 1px #d8d8d8; | ||||
|     -moz-box-shadow: 1px 1px 1px #d8d8d8; | ||||
| } | ||||
|  | ||||
| tt { | ||||
|     background-color: #ecf0f3; | ||||
|     color: #222; | ||||
|     padding: 1px 2px; | ||||
|     font-size: 1.2em; | ||||
|     font-family: monospace; | ||||
| } | ||||
							
								
								
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							| @@ -1,54 +0,0 @@ | ||||
| .c { color: #999988; font-style: italic } /* Comment */ | ||||
| .k { font-weight: bold } /* Keyword */ | ||||
| .o { font-weight: bold } /* Operator */ | ||||
| .cm { color: #999988; font-style: italic } /* Comment.Multiline */ | ||||
| .cp { color: #999999; font-weight: bold } /* Comment.preproc */ | ||||
| .c1 { color: #999988; font-style: italic } /* Comment.Single */ | ||||
| .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ | ||||
| .ge { font-style: italic } /* Generic.Emph */ | ||||
| .gr { color: #aa0000 } /* Generic.Error */ | ||||
| .gh { color: #999999 } /* Generic.Heading */ | ||||
| .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ | ||||
| .go { color: #111 } /* Generic.Output */ | ||||
| .gp { color: #555555 } /* Generic.Prompt */ | ||||
| .gs { font-weight: bold } /* Generic.Strong */ | ||||
| .gu { color: #aaaaaa } /* Generic.Subheading */ | ||||
| .gt { color: #aa0000 } /* Generic.Traceback */ | ||||
| .kc { font-weight: bold } /* Keyword.Constant */ | ||||
| .kd { font-weight: bold } /* Keyword.Declaration */ | ||||
| .kp { font-weight: bold } /* Keyword.Pseudo */ | ||||
| .kr { font-weight: bold } /* Keyword.Reserved */ | ||||
| .kt { color: #445588; font-weight: bold } /* Keyword.Type */ | ||||
| .m { color: #009999 } /* Literal.Number */ | ||||
| .s { color: #bb8844 } /* Literal.String */ | ||||
| .na { color: #008080 } /* Name.Attribute */ | ||||
| .nb { color: #999999 } /* Name.Builtin */ | ||||
| .nc { color: #445588; font-weight: bold } /* Name.Class */ | ||||
| .no { color: #ff99ff } /* Name.Constant */ | ||||
| .ni { color: #800080 } /* Name.Entity */ | ||||
| .ne { color: #990000; font-weight: bold } /* Name.Exception */ | ||||
| .nf { color: #990000; font-weight: bold } /* Name.Function */ | ||||
| .nn { color: #555555 } /* Name.Namespace */ | ||||
| .nt { color: #000080 } /* Name.Tag */ | ||||
| .nv { color: purple } /* Name.Variable */ | ||||
| .ow { font-weight: bold } /* Operator.Word */ | ||||
| .mf { color: #009999 } /* Literal.Number.Float */ | ||||
| .mh { color: #009999 } /* Literal.Number.Hex */ | ||||
| .mi { color: #009999 } /* Literal.Number.Integer */ | ||||
| .mo { color: #009999 } /* Literal.Number.Oct */ | ||||
| .sb { color: #bb8844 } /* Literal.String.Backtick */ | ||||
| .sc { color: #bb8844 } /* Literal.String.Char */ | ||||
| .sd { color: #bb8844 } /* Literal.String.Doc */ | ||||
| .s2 { color: #bb8844 } /* Literal.String.Double */ | ||||
| .se { color: #bb8844 } /* Literal.String.Escape */ | ||||
| .sh { color: #bb8844 } /* Literal.String.Heredoc */ | ||||
| .si { color: #bb8844 } /* Literal.String.Interpol */ | ||||
| .sx { color: #bb8844 } /* Literal.String.Other */ | ||||
| .sr { color: #808000 } /* Literal.String.Regex */ | ||||
| .s1 { color: #bb8844 } /* Literal.String.Single */ | ||||
| .ss { color: #bb8844 } /* Literal.String.Symbol */ | ||||
| .bp { color: #999999 } /* Name.Builtin.Pseudo */ | ||||
| .vc { color: #ff99ff } /* Name.Variable.Class */ | ||||
| .vg { color: #ff99ff } /* Name.Variable.Global */ | ||||
| .vi { color: #ff99ff } /* Name.Variable.Instance */ | ||||
| .il { color: #009999 } /* Literal.Number.Integer.Long */ | ||||
							
								
								
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +0,0 @@ | ||||
| [theme] | ||||
| inherit = basic | ||||
| stylesheet = nature.css | ||||
| pygments_style = tango | ||||
| @@ -34,10 +34,14 @@ Documents | ||||
| .. autoclass:: mongoengine.ValidationError | ||||
|   :members: | ||||
|  | ||||
| .. autoclass:: mongoengine.FieldDoesNotExist | ||||
|  | ||||
|  | ||||
| Context Managers | ||||
| ================ | ||||
|  | ||||
| .. autoclass:: mongoengine.context_managers.switch_db | ||||
| .. autoclass:: mongoengine.context_managers.switch_collection | ||||
| .. autoclass:: mongoengine.context_managers.no_dereference | ||||
| .. autoclass:: mongoengine.context_managers.query_counter | ||||
|  | ||||
| @@ -78,11 +82,13 @@ Fields | ||||
| .. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.fields.DynamicField | ||||
| .. autoclass:: mongoengine.fields.ListField | ||||
| .. autoclass:: mongoengine.fields.EmbeddedDocumentListField | ||||
| .. autoclass:: mongoengine.fields.SortedListField | ||||
| .. autoclass:: mongoengine.fields.DictField | ||||
| .. autoclass:: mongoengine.fields.MapField | ||||
| .. autoclass:: mongoengine.fields.ReferenceField | ||||
| .. autoclass:: mongoengine.fields.GenericReferenceField | ||||
| .. autoclass:: mongoengine.fields.CachedReferenceField | ||||
| .. autoclass:: mongoengine.fields.BinaryField | ||||
| .. autoclass:: mongoengine.fields.FileField | ||||
| .. autoclass:: mongoengine.fields.ImageField | ||||
| @@ -93,11 +99,29 @@ Fields | ||||
| .. autoclass:: mongoengine.fields.PointField | ||||
| .. autoclass:: mongoengine.fields.LineStringField | ||||
| .. autoclass:: mongoengine.fields.PolygonField | ||||
| .. autoclass:: mongoengine.fields.MultiPointField | ||||
| .. autoclass:: mongoengine.fields.MultiLineStringField | ||||
| .. autoclass:: mongoengine.fields.MultiPolygonField | ||||
| .. autoclass:: mongoengine.fields.GridFSError | ||||
| .. autoclass:: mongoengine.fields.GridFSProxy | ||||
| .. autoclass:: mongoengine.fields.ImageGridFsProxy | ||||
| .. autoclass:: mongoengine.fields.ImproperlyConfigured | ||||
|  | ||||
| Embedded Document Querying | ||||
| ========================== | ||||
|  | ||||
| .. versionadded:: 0.9 | ||||
|  | ||||
| Additional queries for Embedded Documents are available when using the | ||||
| :class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded | ||||
| documents. | ||||
|  | ||||
| A list of embedded documents is returned as a special list with the | ||||
| following methods: | ||||
|  | ||||
| .. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList | ||||
|     :members: | ||||
|  | ||||
| Misc | ||||
| ==== | ||||
|  | ||||
|   | ||||
| @@ -2,6 +2,253 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Development | ||||
| =========== | ||||
| - (Fill this out as you fix issues and develop your features). | ||||
|  | ||||
| Changes in 0.13.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see | ||||
|   docs/upgrade.rst for details. | ||||
|  | ||||
| Changes in 0.12.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 | ||||
| - POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 | ||||
| - Fixed the way `Document.objects.create` works with duplicate IDs #1485 | ||||
| - Fixed connecting to a replica set with PyMongo 2.x #1436 | ||||
| - Fixed using sets in field choices #1481 | ||||
| - Fixed deleting items from a `ListField` #1318 | ||||
| - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 | ||||
| - Fixed behavior of a `dec` update operator #1450 | ||||
| - Added a `rename` update operator #1454 | ||||
| - Added validation for the `db_field` parameter #1448 | ||||
| - Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 | ||||
| - Fixed the error message displayed when validating unicode URLs #1486 | ||||
| - Raise an error when trying to save an abstract document #1449 | ||||
|  | ||||
| Changes in 0.11.0 | ||||
| ================= | ||||
| - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 | ||||
| - BREAKING CHANGE: Dropped Python 2.6 support. #1428 | ||||
| - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 | ||||
| - BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 | ||||
| - Fixed absent rounding for DecimalField when `force_string` is set. #1103 | ||||
|  | ||||
| Changes in 0.10.8 | ||||
| ================= | ||||
| - Added support for QuerySet.batch_size (#1426) | ||||
| - Fixed query set iteration within iteration #1427 | ||||
| - Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 | ||||
| - Added ability to filter the generic reference field by ObjectId and DBRef #1425 | ||||
| - Fixed delete cascade for models with a custom primary key field #1247 | ||||
| - Added ability to specify an authentication mechanism (e.g. X.509) #1333 | ||||
| - Added support for falsey primary keys (e.g. doc.pk = 0) #1354 | ||||
| - Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 | ||||
| - Fixed filtering by embedded_doc=None #1422 | ||||
| - Added support for cursor.comment #1420 | ||||
| - Fixed doc.get_<field>_display #1419 | ||||
| - Fixed __repr__ method of the StrictDict #1424 | ||||
| - Added a deprecation warning for Python 2.6 | ||||
|  | ||||
| Changes in 0.10.7 | ||||
| ================= | ||||
| - Dropped Python 3.2 support #1390 | ||||
| - Fixed the bug where dynamic doc has index inside a dict field #1278 | ||||
| - Fixed: ListField minus index assignment does not work #1128 | ||||
| - Fixed cascade delete mixing among collections #1224 | ||||
| - Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 | ||||
| - Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. | ||||
| - count on ListField of EmbeddedDocumentField fails. #1187 | ||||
| - Fixed long fields stored as int32 in Python 3. #1253 | ||||
| - MapField now handles unicodes keys correctly. #1267 | ||||
| - ListField now handles negative indicies correctly. #1270 | ||||
| - Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 | ||||
| - Fixed no_cursor_timeout error with pymongo 3.0+ #1304 | ||||
| - Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 | ||||
| - Fixed support for `__` to escape field names that match operators names in `update` #1351 | ||||
| - Fixed BaseDocument#_mark_as_changed #1369 | ||||
| - Added support for pickling QuerySet instances. #1397 | ||||
| - Fixed connecting to a list of hosts #1389 | ||||
| - Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 | ||||
| - Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 | ||||
| - Improvements to the dictionary fields docs #1383 | ||||
|  | ||||
| Changes in 0.10.6 | ||||
| ================= | ||||
| - Add support for mocking MongoEngine based on mongomock. #1151 | ||||
| - Fixed not being able to run tests on Windows. #1153 | ||||
| - Allow creation of sparse compound indexes. #1114 | ||||
| - count on ListField of EmbeddedDocumentField fails. #1187 | ||||
|  | ||||
| Changes in 0.10.5 | ||||
| ================= | ||||
| - Fix for reloading of strict with special fields. #1156 | ||||
|  | ||||
| Changes in 0.10.4 | ||||
| ================= | ||||
| - SaveConditionError is now importable from the top level package. #1165 | ||||
| - upsert_one method added. #1157 | ||||
|  | ||||
| Changes in 0.10.3 | ||||
| ================= | ||||
| - Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 | ||||
|  | ||||
| Changes in 0.10.2 | ||||
| ================= | ||||
| - Allow shard key to point to a field in an embedded document. #551 | ||||
| - Allow arbirary metadata in fields. #1129 | ||||
| - ReferenceFields now support abstract document types. #837 | ||||
|  | ||||
| Changes in 0.10.1 | ||||
| ================= | ||||
| - Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 | ||||
| - Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 | ||||
| - Fix ignored chained options #842 | ||||
| - Document save's save_condition error raises `SaveConditionError` exception #1070 | ||||
| - Fix Document.reload for DynamicDocument. #1050 | ||||
| - StrictDict & SemiStrictDict are shadowed at init time. #1105 | ||||
| - Fix ListField minus index assignment does not work. #1119 | ||||
| - Remove code that marks field as changed when the field has default but not existed in database #1126 | ||||
| - Remove test dependencies (nose and rednose) from install dependencies list. #1079 | ||||
| - Recursively build query when using elemMatch operator. #1130 | ||||
| - Fix instance back references for lists of embedded documents. #1131 | ||||
|  | ||||
| Changes in 0.10.0 | ||||
| ================= | ||||
| - Django support was removed and will be available as a separate extension. #958 | ||||
| - Allow to load undeclared field with meta attribute 'strict': False #957 | ||||
| - Support for PyMongo 3+ #946 | ||||
| - Removed get_or_create() deprecated since 0.8.0. #300 | ||||
| - Improve Document._created status when switch collection and db #1020 | ||||
| - Queryset update doesn't go through field validation #453 | ||||
| - Added support for specifying authentication source as option `authSource` in URI. #967 | ||||
| - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | ||||
| - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | ||||
| - Support += and *= for ListField #595 | ||||
| - Use sets for populating dbrefs to dereference | ||||
| - Fixed unpickled documents replacing the global field's list. #888 | ||||
| - Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910 | ||||
| - Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769) | ||||
| - Fix for updating sorting in SortedListField. #978 | ||||
| - Added __ support to escape field name in fields lookup keywords that match operators names #949 | ||||
| - Fix for issue where FileField deletion did not free space in GridFS. | ||||
| - No_dereference() not respected on embedded docs containing reference. #517 | ||||
| - Document save raise an exception if save_condition fails #1005 | ||||
| - Fixes some internal _id handling issue. #961 | ||||
| - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | ||||
| - Capped collection multiple of 256. #1011 | ||||
| - Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. | ||||
| - Fix for delete with write_concern {'w': 0}. #1008 | ||||
| - Allow dynamic lookup for more than two parts. #882 | ||||
| - Added support for min_distance on geo queries. #831 | ||||
| - Allow to add custom metadata to fields #705 | ||||
|  | ||||
| Changes in 0.9.0 | ||||
| ================ | ||||
| - Update FileField when creating a new file #714 | ||||
| - Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 | ||||
| - ComplexDateTimeField should fall back to None when null=True #864 | ||||
| - Request Support for $min, $max Field update operators #863 | ||||
| - `BaseDict` does not follow `setdefault` #866 | ||||
| - Add support for $type operator # 766 | ||||
| - Fix tests for pymongo 2.8+ #877 | ||||
| - No module named 'django.utils.importlib' (Django dev) #872 | ||||
| - Field Choices Now Accept Subclasses of Documents | ||||
| - Ensure Indexes before Each Save #812 | ||||
| - Generate Unique Indices for Lists of EmbeddedDocuments #358 | ||||
| - Sparse fields #515 | ||||
| - write_concern not in params of Collection#remove #801 | ||||
| - Better BaseDocument equality check when not saved #798 | ||||
| - OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771 | ||||
| - with_limit_and_skip for count should default like in pymongo #759 | ||||
| - Fix storing value of precision attribute in DecimalField #787 | ||||
| - Set attribute to None does not work (at least for fields with default values) #734 | ||||
| - Querying by a field defined in a subclass raises InvalidQueryError #744 | ||||
| - Add Support For MongoDB 2.6.X's maxTimeMS #778 | ||||
| - abstract shouldn't be inherited in EmbeddedDocument # 789 | ||||
| - Allow specifying the '_cls' as a field for indexes #397 | ||||
| - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | ||||
| - Not overriding default values when loading a subset of fields #399 | ||||
| - Saving document doesn't create new fields in existing collection #620 | ||||
| - Added `Queryset.aggregate` wrapper to aggregation framework #703 | ||||
| - Added support to show original model fields on to_json calls instead of db_field #697 | ||||
| - Added Queryset.search_text to Text indexes searchs #700 | ||||
| - Fixed tests for Django 1.7 #696 | ||||
| - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | ||||
| - Added preliminary support for text indexes #680 | ||||
| - Added `elemMatch` operator as well - `match` is too obscure #653 | ||||
| - Added support for progressive JPEG #486 #548 | ||||
| - Allow strings to be used in index creation #675 | ||||
| - Fixed EmbeddedDoc weakref proxy issue #592 | ||||
| - Fixed nested reference field distinct error #583 | ||||
| - Fixed change tracking on nested MapFields #539 | ||||
| - Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507 | ||||
| - Add authentication_source option to register_connection #178 #464 #573 #580 #590 | ||||
| - Implemented equality between Documents and DBRefs #597 | ||||
| - Fixed ReferenceField inside nested ListFields dereferencing problem #368 | ||||
| - Added the ability to reload specific document fields #100 | ||||
| - Added db_alias support and fixes for custom map/reduce output #586 | ||||
| - post_save signal now has access to delta information about field changes #594 #589 | ||||
| - Don't query with $orderby for qs.get() #600 | ||||
| - Fix id shard key save issue #636 | ||||
| - Fixes issue with recursive embedded document errors #557 | ||||
| - Fix clear_changed_fields() clearing unsaved documents bug #602 | ||||
| - Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x. | ||||
| - Removing support for Python < 2.6.6 | ||||
| - Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664 | ||||
| - QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773 | ||||
| - Added support for the using() method on a queryset #676 | ||||
| - PYPY support #673 | ||||
| - Connection pooling #674 | ||||
| - Avoid to open all documents from cursors in an if stmt #655 | ||||
| - Ability to clear the ordering #657 | ||||
| - Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626 | ||||
| - Slots - memory improvements #625 | ||||
| - Fixed incorrectly split a query key when it ends with "_" #619 | ||||
| - Geo docs updates #613 | ||||
| - Workaround a dateutil bug #608 | ||||
| - Conditional save for atomic-style operations #511 | ||||
| - Allow dynamic dictionary-style field access #559 | ||||
| - Increase email field length to accommodate new TLDs #726 | ||||
| - index_cls is ignored when deciding to set _cls as index prefix #733 | ||||
| - Make 'db' argument to connection optional #737 | ||||
| - Allow atomic update for the entire `DictField` #742 | ||||
| - Added MultiPointField, MultiLineField, MultiPolygonField | ||||
| - Fix multiple connections aliases being rewritten #748 | ||||
| - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | ||||
| - Make `in_bulk()` respect `no_dereference()` #775 | ||||
| - Handle None from model __str__; Fixes #753 #754 | ||||
| - _get_changed_fields fix for embedded documents with id field. #925 | ||||
|  | ||||
| Changes in 0.8.7 | ||||
| ================ | ||||
| - Calling reload on deleted / nonexistent documents raises DoesNotExist (#538) | ||||
| - Stop ensure_indexes running on a secondaries (#555) | ||||
| - Fix circular import issue with django auth (#531) (#545) | ||||
|  | ||||
| Changes in 0.8.6 | ||||
| ================ | ||||
| - Fix django auth import (#531) | ||||
|  | ||||
| Changes in 0.8.5 | ||||
| ================ | ||||
| - Fix multi level nested fields getting marked as changed (#523) | ||||
| - Django 1.6 login fix (#522) (#527) | ||||
| - Django 1.6 session fix (#509) | ||||
| - EmbeddedDocument._instance is now set when setting the attribute (#506) | ||||
| - Fixed EmbeddedDocument with ReferenceField equality issue (#502) | ||||
| - Fixed GenericReferenceField serialization order (#499) | ||||
| - Fixed count and none bug (#498) | ||||
| - Fixed bug with .only() and DictField with digit keys (#496) | ||||
| - Added user_permissions to Django User object (#491, #492) | ||||
| - Fix updating Geo Location fields (#488) | ||||
| - Fix handling invalid dict field value (#485) | ||||
| - Added app_label to MongoUser (#484) | ||||
| - Use defaults when host and port are passed as None (#483) | ||||
| - Fixed distinct casting issue with ListField of EmbeddedDocuments (#470) | ||||
| - Fixed Django 1.6 sessions (#454, #480) | ||||
|  | ||||
| Changes in 0.8.4 | ||||
| ================ | ||||
| - Remove database name necessity in uri connection schema (#452) | ||||
| @@ -79,7 +326,7 @@ Changes in 0.8.0 | ||||
| - Added `get_next_value` preview for SequenceFields (#319) | ||||
| - Added no_sub_classes context manager and queryset helper (#312) | ||||
| - Querysets now utilises a local cache | ||||
| - Changed __len__ behavour in the queryset (#247, #311) | ||||
| - Changed __len__ behaviour in the queryset (#247, #311) | ||||
| - Fixed querying string versions of ObjectIds issue with ReferenceField (#307) | ||||
| - Added $setOnInsert support for upserts (#308) | ||||
| - Upserts now possible with just query parameters (#309) | ||||
| @@ -130,7 +377,7 @@ Changes in 0.8.0 | ||||
| - Uses getlasterror to test created on updated saves (#163) | ||||
| - Fixed inheritance and unique index creation (#140) | ||||
| - Fixed reverse delete rule with inheritance (#197) | ||||
| - Fixed validation for GenericReferences which havent been dereferenced | ||||
| - Fixed validation for GenericReferences which haven't been dereferenced | ||||
| - Added switch_db context manager (#106) | ||||
| - Added switch_db method to document instances (#106) | ||||
| - Added no_dereference context manager (#82) (#61) | ||||
| @@ -212,11 +459,11 @@ Changes in 0.7.2 | ||||
| - Update index spec generation so its not destructive (#113) | ||||
|  | ||||
| Changes in 0.7.1 | ||||
| ================= | ||||
| ================ | ||||
| - Fixed index spec inheritance (#111) | ||||
|  | ||||
| Changes in 0.7.0 | ||||
| ================= | ||||
| ================ | ||||
| - Updated queryset.delete so you can use with skip / limit (#107) | ||||
| - Updated index creation allows kwargs to be passed through refs (#104) | ||||
| - Fixed Q object merge edge case (#109) | ||||
| @@ -297,7 +544,7 @@ Changes in 0.6.12 | ||||
| - Fixes error with _delta handling DBRefs | ||||
|  | ||||
| Changes in 0.6.11 | ||||
| ================== | ||||
| ================= | ||||
| - Fixed inconsistency handling None values field attrs | ||||
| - Fixed map_field embedded db_field issue | ||||
| - Fixed .save() _delta issue with DbRefs | ||||
| @@ -377,7 +624,7 @@ Changes in 0.6.1 | ||||
| - Fix for replicaSet connections | ||||
|  | ||||
| Changes in 0.6 | ||||
| ================ | ||||
| ============== | ||||
|  | ||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||
| - Added support for covered indexes when inheritance is off | ||||
| @@ -466,7 +713,7 @@ Changes in v0.5 | ||||
| - Added Document Mixin support | ||||
| - Fixed queryet __repr__ mid iteration | ||||
| - Added hint() support, so can tell Mongo the proper index to use for the query | ||||
| - Fixed issue with inconsitent setting of _cls breaking inherited referencing | ||||
| - Fixed issue with inconsistent setting of _cls breaking inherited referencing | ||||
| - Added help_text and verbose_name to fields to help with some form libs | ||||
| - Updated item_frequencies to handle embedded document lookups | ||||
| - Added delta tracking now only sets / unsets explicitly changed fields | ||||
|   | ||||
| @@ -17,6 +17,10 @@ class Post(Document): | ||||
|     tags = ListField(StringField(max_length=30)) | ||||
|     comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
|     # bugfix | ||||
|     meta = {'allow_inheritance': True} | ||||
|  | ||||
|  | ||||
| class TextPost(Post): | ||||
|     content = StringField() | ||||
|  | ||||
| @@ -45,7 +49,8 @@ print 'ALL POSTS' | ||||
| print | ||||
| for post in Post.objects: | ||||
|     print post.title | ||||
|     print '=' * post.title.count() | ||||
|     #print '=' * post.title.count() | ||||
|     print "=" * 20 | ||||
|  | ||||
|     if isinstance(post, TextPost): | ||||
|         print post.content | ||||
|   | ||||
							
								
								
									
										16
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -13,6 +13,10 @@ | ||||
|  | ||||
| import sys, os | ||||
|  | ||||
| import sphinx_rtd_theme | ||||
|  | ||||
| import mongoengine | ||||
|  | ||||
| # If extensions (or modules to document with autodoc) are in another directory, | ||||
| # add these directories to sys.path here. If the directory is relative to the | ||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||
| @@ -44,7 +48,6 @@ copyright = u'2009, MongoEngine Authors' | ||||
| # |version| and |release|, also used in various other places throughout the | ||||
| # built documents. | ||||
| # | ||||
| import mongoengine | ||||
| # The short X.Y version. | ||||
| version = mongoengine.get_version() | ||||
| # The full version, including alpha/beta/rc tags. | ||||
| @@ -92,15 +95,17 @@ pygments_style = 'sphinx' | ||||
|  | ||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | ||||
| html_theme = 'nature' | ||||
| html_theme = 'sphinx_rtd_theme' | ||||
|  | ||||
| # Theme options are theme-specific and customize the look and feel of a theme | ||||
| # further.  For a list of options available for each theme, see the | ||||
| # documentation. | ||||
| #html_theme_options = {} | ||||
| html_theme_options = { | ||||
|     'canonical_url': 'http://docs.mongoengine.org/en/latest/' | ||||
| } | ||||
|  | ||||
| # Add any paths that contain custom themes here, relative to this directory. | ||||
| html_theme_path = ['_themes'] | ||||
| html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | ||||
|  | ||||
| # The name for this set of Sphinx documents.  If None, it defaults to | ||||
| # "<project> v<release> documentation". | ||||
| @@ -116,7 +121,7 @@ html_theme_path = ['_themes'] | ||||
| # The name of an image file (within the static path) to use as favicon of the | ||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||
| # pixels large. | ||||
| #html_favicon = None | ||||
| html_favicon = "favicon.ico" | ||||
|  | ||||
| # Add any paths that contain custom static files (such as style sheets) here, | ||||
| # relative to this directory. They are copied after the builtin static files, | ||||
| @@ -199,4 +204,3 @@ latex_documents = [ | ||||
| #latex_use_modindex = True | ||||
|  | ||||
| autoclass_content = 'both' | ||||
|  | ||||
|   | ||||
							
								
								
									
										175
									
								
								docs/django.rst
									
									
									
									
									
								
							
							
						
						
									
										175
									
								
								docs/django.rst
									
									
									
									
									
								
							| @@ -2,171 +2,18 @@ | ||||
| Django Support | ||||
| ============== | ||||
|  | ||||
| .. note:: Updated to support Django 1.5 | ||||
|  | ||||
| Connecting | ||||
| ========== | ||||
| In your **settings.py** file, ignore the standard database settings (unless you | ||||
| also plan to use the ORM in your project), and instead call | ||||
| :func:`~mongoengine.connect` somewhere in the settings module. | ||||
|  | ||||
| .. note:: | ||||
|    If you are not using another Database backend you may need to add a dummy | ||||
|    database backend to ``settings.py`` eg:: | ||||
|  | ||||
|         DATABASES = { | ||||
|             'default': { | ||||
|                 'ENGINE': 'django.db.backends.dummy' | ||||
|             } | ||||
|         } | ||||
|  | ||||
| Authentication | ||||
| ============== | ||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The | ||||
| :class:`~mongoengine.django.auth.User` model is a MongoEngine | ||||
| :class:`~mongoengine.Document`, but implements most of the methods and | ||||
| attributes that the standard Django :class:`User` model does - so the two are | ||||
| moderately compatible. Using this backend will allow you to store users in | ||||
| MongoDB but still use many of the Django authentication infrastructure (such as | ||||
| the :func:`login_required` decorator and the :func:`authenticate` function). To | ||||
| enable the MongoEngine auth backend, add the following to your **settings.py** | ||||
| file:: | ||||
|  | ||||
|     AUTHENTICATION_BACKENDS = ( | ||||
|         'mongoengine.django.auth.MongoEngineBackend', | ||||
|     ) | ||||
|  | ||||
| The :mod:`~mongoengine.django.auth` module also contains a | ||||
| :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's | ||||
| :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. | ||||
|  | ||||
| .. versionadded:: 0.1.3 | ||||
|  | ||||
| Custom User model | ||||
| ================= | ||||
| Django 1.5 introduced `Custom user Models | ||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_ | ||||
| which can be used as an alternative to the MongoEngine authentication backend. | ||||
|  | ||||
| The main advantage of this option is that other components relying on | ||||
| :mod:`django.contrib.auth` and supporting the new swappable user model are more | ||||
| likely to work. For example, you can use the ``createsuperuser`` management | ||||
| command as usual. | ||||
|  | ||||
| To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'`` | ||||
| in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user | ||||
| user model to use. In your **settings.py** file you will have:: | ||||
|  | ||||
|     INSTALLED_APPS = ( | ||||
|         ... | ||||
|         'django.contrib.auth', | ||||
|         'mongoengine.django.mongo_auth', | ||||
|         ... | ||||
|     ) | ||||
|  | ||||
|     AUTH_USER_MODEL = 'mongo_auth.MongoUser' | ||||
|  | ||||
| An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the | ||||
| :class:`~mongoengine.django.auth.User` class with another class of your choice:: | ||||
|  | ||||
|     MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' | ||||
|  | ||||
| The custom :class:`User` must be a :class:`~mongoengine.Document` class, but | ||||
| otherwise has the same requirements as a standard custom user model, | ||||
| as specified in the `Django Documentation | ||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_. | ||||
| In particular, the custom class must define :attr:`USERNAME_FIELD` and | ||||
| :attr:`REQUIRED_FIELDS` attributes. | ||||
|  | ||||
| Sessions | ||||
| ======== | ||||
| Django allows the use of different backend stores for its sessions. MongoEngine | ||||
| provides a MongoDB-based session backend for Django, which allows you to use | ||||
| sessions in your Django application with just MongoDB. To enable the MongoEngine | ||||
| session backend, ensure that your settings module has | ||||
| ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the | ||||
| ``MIDDLEWARE_CLASSES`` field  and ``'django.contrib.sessions'`` in your | ||||
| ``INSTALLED_APPS``. From there, all you need to do is add the following line | ||||
| into your settings module:: | ||||
|  | ||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' | ||||
|  | ||||
| Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports  `mongodb TTL | ||||
| <http://docs.mongodb.org/manual/tutorial/expire-data/>`_. | ||||
|  | ||||
| .. versionadded:: 0.2.1 | ||||
|  | ||||
| Storage | ||||
| ======= | ||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`, | ||||
| it is useful to have a Django file storage backend that wraps this. The new | ||||
| storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. | ||||
| Using it is very similar to using the default FileSystemStorage.:: | ||||
|  | ||||
|     from mongoengine.django.storage import GridFSStorage | ||||
|     fs = GridFSStorage() | ||||
|  | ||||
|     filename = fs.save('hello.txt', 'Hello, World!') | ||||
|  | ||||
| All of the `Django Storage API methods | ||||
| <http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been | ||||
| implemented except :func:`path`. If the filename provided already exists, an | ||||
| underscore and a number (before # the file extension, if one exists) will be | ||||
| appended to the filename until the generated filename doesn't exist. The | ||||
| :func:`save` method will return the new filename.:: | ||||
|  | ||||
|     >>> fs.exists('hello.txt') | ||||
|     True | ||||
|     >>> fs.open('hello.txt').read() | ||||
|     'Hello, World!' | ||||
|     >>> fs.size('hello.txt') | ||||
|     13 | ||||
|     >>> fs.url('hello.txt') | ||||
|     'http://your_media_url/hello.txt' | ||||
|     >>> fs.open('hello.txt').name | ||||
|     'hello.txt' | ||||
|     >>> fs.listdir() | ||||
|     ([], [u'hello.txt']) | ||||
|  | ||||
| All files will be saved and retrieved in GridFS via the :class:`FileDocument` | ||||
| document, allowing easy access to the files without the GridFSStorage | ||||
| backend.:: | ||||
|  | ||||
|     >>> from mongoengine.django.storage import FileDocument | ||||
|     >>> FileDocument.objects() | ||||
|     [<FileDocument: FileDocument object>] | ||||
|  | ||||
| .. versionadded:: 0.4 | ||||
|  | ||||
| Shortcuts | ||||
| ========= | ||||
| Inspired by the `Django shortcut get_object_or_404 | ||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_, | ||||
| the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns  | ||||
| a document or raises an Http404 exception if the document does not exist:: | ||||
|  | ||||
|     from mongoengine.django.shortcuts import get_document_or_404 | ||||
|      | ||||
|     admin_user = get_document_or_404(User, username='root') | ||||
|  | ||||
| The first argument may be a Document or QuerySet object. All other passed arguments | ||||
| and keyword arguments are used in the query:: | ||||
|  | ||||
|     foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email | ||||
|  | ||||
| .. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one | ||||
|     object is found. | ||||
| .. note:: Django support has been split from the main MongoEngine | ||||
|     repository. The *legacy* Django extension may be found bundled with the | ||||
|     0.9 release of MongoEngine. | ||||
|  | ||||
|  | ||||
| Also inspired by the `Django shortcut get_list_or_404 | ||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_, | ||||
| the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of | ||||
| documents or raises an Http404 exception if the list is empty:: | ||||
|  | ||||
|     from mongoengine.django.shortcuts import get_list_or_404 | ||||
|      | ||||
|     active_users = get_list_or_404(User, is_active=True) | ||||
|  | ||||
| The first argument may be a Document or QuerySet object. All other passed | ||||
| arguments and keyword arguments are used to filter the query. | ||||
| Help Wanted! | ||||
| ------------ | ||||
|  | ||||
| The MongoEngine team is looking for help contributing and maintaining a new | ||||
| Django extension for MongoEngine! If you have Django experience and would like | ||||
| to help contribute to the project, please get in touch on the  | ||||
| `mailing list <http://groups.google.com/group/mongoengine-users>`_ or by  | ||||
| simply contributing on | ||||
| `GitHub <https://github.com/MongoEngine/django-mongoengine>`_. | ||||
|   | ||||
| @@ -23,21 +23,37 @@ arguments should be provided:: | ||||
|  | ||||
|     connect('project1', username='webapp', password='pwd123') | ||||
|  | ||||
| Uri style connections are also supported - just supply the uri as | ||||
| URI style connections are also supported -- just supply the URI as | ||||
| the :attr:`host` to | ||||
| :func:`~mongoengine.connect`:: | ||||
|  | ||||
|     connect('project1', host='mongodb://localhost/database_name') | ||||
|  | ||||
| Note that database name from uri has priority over name | ||||
| in ::func:`~mongoengine.connect` | ||||
| .. note:: Database, username and password from URI string overrides | ||||
|     corresponding parameters in :func:`~mongoengine.connect`: :: | ||||
|  | ||||
|         connect( | ||||
|             db='test', | ||||
|             username='user', | ||||
|             password='12345', | ||||
|             host='mongodb://admin:qwerty@localhost/production' | ||||
|         ) | ||||
|  | ||||
|     will establish connection to ``production`` database using | ||||
|     ``admin`` username and ``qwerty`` password. | ||||
|  | ||||
| Replica Sets | ||||
| =========== | ||||
| ============ | ||||
|  | ||||
| MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient` | ||||
| to use them please use a URI style connection and provide the `replicaSet` name in the | ||||
| connection kwargs. | ||||
| MongoEngine supports connecting to replica sets:: | ||||
|  | ||||
|     from mongoengine import connect | ||||
|  | ||||
|     # Regular connect | ||||
|     connect('dbname', replicaset='rs-name') | ||||
|  | ||||
|     # MongoDB URI-style connect | ||||
|     connect(host='mongodb://localhost/dbname?replicaSet=rs-name') | ||||
|  | ||||
| Read preferences are supported through the connection or via individual | ||||
| queries by passing the read_preference :: | ||||
| @@ -48,42 +64,44 @@ queries by passing the read_preference :: | ||||
| Multiple Databases | ||||
| ================== | ||||
|  | ||||
| Multiple database support was added in MongoEngine 0.6. To use multiple | ||||
| databases you can use :func:`~mongoengine.connect` and provide an `alias` name | ||||
| for the connection - if no `alias` is provided then "default" is used. | ||||
| To use multiple databases you can use :func:`~mongoengine.connect` and provide | ||||
| an `alias` name for the connection - if no `alias` is provided then "default" | ||||
| is used. | ||||
|  | ||||
| In the background this uses :func:`~mongoengine.register_connection` to | ||||
| store the data and you can register all aliases up front if required. | ||||
|  | ||||
| Individual documents can also support multiple databases by providing a | ||||
| `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects | ||||
| to point across databases and collections.  Below is an example schema, using | ||||
| 3 different databases to store data:: | ||||
| `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||
| objects to point across databases and collections. Below is an example schema, | ||||
| using 3 different databases to store data:: | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = {"db_alias": "user-db"} | ||||
|             meta = {'db_alias': 'user-db'} | ||||
|  | ||||
|         class Book(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = {"db_alias": "book-db"} | ||||
|             meta = {'db_alias': 'book-db'} | ||||
|  | ||||
|         class AuthorBooks(Document): | ||||
|             author = ReferenceField(User) | ||||
|             book = ReferenceField(Book) | ||||
|  | ||||
|             meta = {"db_alias": "users-books-db"} | ||||
|             meta = {'db_alias': 'users-books-db'} | ||||
|  | ||||
|  | ||||
| Switch Database Context Manager | ||||
| =============================== | ||||
|  | ||||
| Sometimes you may want to switch the database to query against for a class | ||||
| for example, archiving older data into a separate database for performance | ||||
| reasons. | ||||
| Context Managers | ||||
| ================ | ||||
| Sometimes you may want to switch the database or collection to query against. | ||||
| For example, archiving older data into a separate database for performance | ||||
| reasons or writing functions that dynamically choose collections to write | ||||
| a document to. | ||||
|  | ||||
| Switch Database | ||||
| --------------- | ||||
| The :class:`~mongoengine.context_managers.switch_db` context manager allows | ||||
| you to change the database alias for a given class allowing quick and easy | ||||
| access to the same User document across databases:: | ||||
| @@ -93,10 +111,29 @@ access to the same User document across databases:: | ||||
|     class User(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|             meta = {"db_alias": "user-db"} | ||||
|         meta = {'db_alias': 'user-db'} | ||||
|  | ||||
|     with switch_db(User, 'archive-user-db') as User: | ||||
|             User(name="Ross").save()  # Saves the 'archive-user-db' | ||||
|         User(name='Ross').save()  # Saves the 'archive-user-db' | ||||
|  | ||||
|  | ||||
| Switch Collection | ||||
| ----------------- | ||||
| The :class:`~mongoengine.context_managers.switch_collection` context manager | ||||
| allows you to change the collection for a given class allowing quick and easy | ||||
| access to the same Group document across collection:: | ||||
|  | ||||
|         from mongoengine.context_managers import switch_collection | ||||
|  | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name='test').save()  # Saves in the default db | ||||
|  | ||||
|         with switch_collection(Group, 'group2000') as Group: | ||||
|             Group(name='hello Group 2000 collection!').save()  # Saves in group2000 collection | ||||
|  | ||||
|  | ||||
| .. note:: Make sure any aliases have been registered with | ||||
|     :func:`~mongoengine.register_connection` before using the context manager. | ||||
|     :func:`~mongoengine.register_connection` or :func:`~mongoengine.connect` | ||||
|     before using the context manager. | ||||
|   | ||||
| @@ -4,7 +4,7 @@ Defining documents | ||||
| In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When | ||||
| working with relational databases, rows are stored in **tables**, which have a | ||||
| strict **schema** that the rows follow. MongoDB stores documents in | ||||
| **collections** rather than tables - the principle difference is that no schema | ||||
| **collections** rather than tables --- the principal difference is that no schema | ||||
| is enforced at a database level. | ||||
|  | ||||
| Defining a document's schema | ||||
| @@ -29,7 +29,7 @@ documents are serialized based on their field order. | ||||
|  | ||||
| Dynamic document schemas | ||||
| ======================== | ||||
| One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | ||||
| One of the benefits of MongoDB is dynamic schemas for a collection, whilst data | ||||
| should be planned and organised (after all explicit is better than implicit!) | ||||
| there are scenarios where having dynamic / expando style documents is desirable. | ||||
|  | ||||
| @@ -75,6 +75,7 @@ are as follows: | ||||
| * :class:`~mongoengine.fields.DynamicField` | ||||
| * :class:`~mongoengine.fields.EmailField` | ||||
| * :class:`~mongoengine.fields.EmbeddedDocumentField` | ||||
| * :class:`~mongoengine.fields.EmbeddedDocumentListField` | ||||
| * :class:`~mongoengine.fields.FileField` | ||||
| * :class:`~mongoengine.fields.FloatField` | ||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||
| @@ -91,6 +92,12 @@ are as follows: | ||||
| * :class:`~mongoengine.fields.StringField` | ||||
| * :class:`~mongoengine.fields.URLField` | ||||
| * :class:`~mongoengine.fields.UUIDField` | ||||
| * :class:`~mongoengine.fields.PointField` | ||||
| * :class:`~mongoengine.fields.LineStringField` | ||||
| * :class:`~mongoengine.fields.PolygonField` | ||||
| * :class:`~mongoengine.fields.MultiPointField` | ||||
| * :class:`~mongoengine.fields.MultiLineStringField` | ||||
| * :class:`~mongoengine.fields.MultiPolygonField` | ||||
|  | ||||
| Field arguments | ||||
| --------------- | ||||
| @@ -108,7 +115,7 @@ arguments can be set on all fields: | ||||
| :attr:`default` (Default: None) | ||||
|     A value to use when no value is set for this field. | ||||
|  | ||||
|     The definion of default parameters follow `the general rules on Python | ||||
|     The definition of default parameters follow `the general rules on Python | ||||
|     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||
|     which means that some care should be taken when dealing with default mutable objects | ||||
|     (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: | ||||
| @@ -140,8 +147,10 @@ arguments can be set on all fields: | ||||
|     When True, use this field as a primary key for the collection.  `DictField` | ||||
|     and `EmbeddedDocuments` both support being the primary key for a document. | ||||
|  | ||||
|     .. note:: If set, this field is also accessible through the `pk` field. | ||||
|  | ||||
| :attr:`choices` (Default: None) | ||||
|     An iterable (e.g. a list or tuple) of choices to which the value of this | ||||
|     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||
|     field should be limited. | ||||
|  | ||||
|     Can be either be a nested tuples of value (stored in mongo) and a | ||||
| @@ -164,16 +173,16 @@ arguments can be set on all fields: | ||||
|         class Shirt(Document): | ||||
|             size = StringField(max_length=3, choices=SIZE) | ||||
|  | ||||
| :attr:`help_text` (Default: None) | ||||
|     Optional help text to output with the field - used by form libraries | ||||
|  | ||||
| :attr:`verbose_name` (Default: None) | ||||
|     Optional human-readable name for the field - used by form libraries | ||||
| :attr:`**kwargs` (Optional) | ||||
|     You can supply additional metadata as arbitrary additional keyword | ||||
|     arguments.  You can not override existing attributes, however.  Common | ||||
|     choices include `help_text` and `verbose_name`, commonly used by form and | ||||
|     widget libraries. | ||||
|  | ||||
|  | ||||
| List fields | ||||
| ----------- | ||||
| MongoDB allows the storage of lists of items. To add a list of items to a | ||||
| MongoDB allows storing lists of items. To add a list of items to a | ||||
| :class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field | ||||
| type. :class:`~mongoengine.fields.ListField` takes another field object as its first | ||||
| argument, which specifies which type elements may be stored within the list:: | ||||
| @@ -205,9 +214,9 @@ document class as the first argument:: | ||||
|  | ||||
| Dictionary Fields | ||||
| ----------------- | ||||
| Often, an embedded document may be used instead of a dictionary -- generally | ||||
| this is recommended as dictionaries don't support validation or custom field | ||||
| types. However, sometimes you will not know the structure of what you want to | ||||
| Often, an embedded document may be used instead of a dictionary – generally | ||||
| embedded documents are recommended as dictionaries don’t support validation | ||||
| or custom field types. However, sometimes you will not know the structure of what you want to | ||||
| store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: | ||||
|  | ||||
|     class SurveyResponse(Document): | ||||
| @@ -290,6 +299,12 @@ instance of the object to the query:: | ||||
|     # Find all pages that both Bob and John have authored | ||||
|     Page.objects(authors__all=[bob, john]) | ||||
|  | ||||
|     # Remove Bob from the authors for a page. | ||||
|     Page.objects(id='...').update_one(pull__authors=bob) | ||||
|  | ||||
|     # Add John to the authors for a page. | ||||
|     Page.objects(id='...').update_one(push__authors=john) | ||||
|  | ||||
|  | ||||
| Dealing with deletion of referred documents | ||||
| ''''''''''''''''''''''''''''''''''''''''''' | ||||
| @@ -301,12 +316,12 @@ reference with a delete rule specification.  A delete rule is specified by | ||||
| supplying the :attr:`reverse_delete_rule` attributes on the | ||||
| :class:`ReferenceField` definition, like this:: | ||||
|  | ||||
|     class Employee(Document): | ||||
|     class ProfilePage(Document): | ||||
|         ... | ||||
|         profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) | ||||
|         employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) | ||||
|  | ||||
| The declaration in this example means that when an :class:`Employee` object is | ||||
| removed, the :class:`ProfilePage` that belongs to that employee is removed as | ||||
| removed, the :class:`ProfilePage` that references that employee is removed as | ||||
| well.  If a whole batch of employees is removed, all profile pages that are | ||||
| linked are removed as well. | ||||
|  | ||||
| @@ -322,7 +337,7 @@ Its value can take any of the following constants: | ||||
|   Any object's fields still referring to the object being deleted are removed | ||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||
| :const:`mongoengine.CASCADE` | ||||
|   Any object containing fields that are refererring to the object being deleted | ||||
|   Any object containing fields that are referring to the object being deleted | ||||
|   are deleted first. | ||||
| :const:`mongoengine.PULL` | ||||
|   Removes the reference to the object (using MongoDB's "pull" operation) | ||||
| @@ -346,11 +361,6 @@ Its value can take any of the following constants: | ||||
|    In Django, be sure to put all apps that have such delete rule declarations in | ||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||
|  | ||||
|  | ||||
| .. warning:: | ||||
|    Signals are not triggered when doing cascading updates / deletes - if this | ||||
|    is required you must manually handle the update / delete. | ||||
|  | ||||
| Generic reference fields | ||||
| '''''''''''''''''''''''' | ||||
| A second kind of reference field also exists, | ||||
| @@ -389,7 +399,7 @@ MongoEngine allows you to specify that a field should be unique across a | ||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's | ||||
| constructor. If you try to save a document that has the same value for a unique | ||||
| field as a document that is already in the database, a | ||||
| :class:`~mongoengine.OperationError` will be raised. You may also specify | ||||
| :class:`~mongoengine.NotUniqueError` will be raised. You may also specify | ||||
| multi-field uniqueness constraints by using :attr:`unique_with`, which may be | ||||
| either a single field name, or a list or tuple of field names:: | ||||
|  | ||||
| @@ -416,7 +426,7 @@ Document collections | ||||
| ==================== | ||||
| Document classes that inherit **directly** from :class:`~mongoengine.Document` | ||||
| will have their own **collection** in the database. The name of the collection | ||||
| is by default the name of the class, coverted to lowercase (so in the example | ||||
| is by default the name of the class, converted to lowercase (so in the example | ||||
| above, the collection would be called `page`). If you need to change the name | ||||
| of the collection (e.g. to use MongoEngine with an existing database), then | ||||
| create a class dictionary attribute called :attr:`meta` on your document, and | ||||
| @@ -433,8 +443,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | ||||
| :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | ||||
| :attr:`max_documents` is the maximum number of documents that is allowed to be | ||||
| stored in the collection, and :attr:`max_size` is the maximum size of the | ||||
| collection in bytes. If :attr:`max_size` is not specified and | ||||
| :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). | ||||
| collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256 | ||||
| by MongoDB internally and mongoengine before. Use also a multiple of 256 to | ||||
| avoid confusions. If :attr:`max_size` is not specified and | ||||
| :attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB). | ||||
| The following example shows a :class:`Log` document that will be limited to | ||||
| 1000 entries and 2MB of disk space:: | ||||
|  | ||||
| @@ -451,15 +463,31 @@ You can specify indexes on collections to make querying faster. This is done | ||||
| by creating a list of index specifications called :attr:`indexes` in the | ||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||
| either be a single field name, a tuple containing multiple field names, or a | ||||
| dictionary containing a full index definition. A direction may be specified on | ||||
| fields by prefixing the field name with a **+** (for ascending) or a **-** sign | ||||
| (for descending). Note that direction only matters on multi-field indexes. :: | ||||
| dictionary containing a full index definition. | ||||
|  | ||||
| A direction may be specified on fields by prefixing the field name with a | ||||
| **+** (for ascending) or a **-** sign (for descending). Note that direction | ||||
| only matters on multi-field indexes. Text indexes may be specified by prefixing | ||||
| the field name with a **$**. Hashed indexes may be specified by prefixing | ||||
| the field name with a **#**:: | ||||
|  | ||||
|     class Page(Document): | ||||
|         category = IntField() | ||||
|         title = StringField() | ||||
|         rating = StringField() | ||||
|         created = DateTimeField() | ||||
|         meta = { | ||||
|             'indexes': ['title', ('title', '-rating')] | ||||
|             'indexes': [ | ||||
|                 'title', | ||||
|                 '$title',  # text index | ||||
|                 '#title',  # hashed index | ||||
|                 ('title', '-rating'), | ||||
|                 ('category', '_cls'), | ||||
|                 { | ||||
|                     'fields': ['created'], | ||||
|                     'expireAfterSeconds': 3600 | ||||
|                 } | ||||
|             ] | ||||
|         } | ||||
|  | ||||
| If a dictionary is passed then the following options are available: | ||||
| @@ -509,11 +537,14 @@ There are a few top level defaults for all indexes that can be set:: | ||||
| :attr:`index_background` (Optional) | ||||
|     Set the default value for if an index should be indexed in the background | ||||
|  | ||||
| :attr:`index_cls` (Optional) | ||||
|     A way to turn off a specific index for _cls. | ||||
|  | ||||
| :attr:`index_drop_dups` (Optional) | ||||
|     Set the default value for if an index should drop duplicates | ||||
|  | ||||
| :attr:`index_cls` (Optional) | ||||
|     A way to turn off a specific index for _cls. | ||||
| .. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning | ||||
|     and has no effect | ||||
|  | ||||
|  | ||||
| Compound Indexes and Indexing sub documents | ||||
| @@ -525,6 +556,8 @@ field name to the index definition. | ||||
| Sometimes its more efficient to index parts of Embedded / dictionary fields, | ||||
| in this case use 'dot' notation to identify the value to index eg: `rank.title` | ||||
|  | ||||
| .. _geospatial-indexes: | ||||
|  | ||||
| Geospatial indexes | ||||
| ------------------ | ||||
|  | ||||
| @@ -535,6 +568,9 @@ The following fields will explicitly add a "2dsphere" index: | ||||
|     - :class:`~mongoengine.fields.PointField` | ||||
|     - :class:`~mongoengine.fields.LineStringField` | ||||
|     - :class:`~mongoengine.fields.PolygonField` | ||||
|     - :class:`~mongoengine.fields.MultiPointField` | ||||
|     - :class:`~mongoengine.fields.MultiLineStringField` | ||||
|     - :class:`~mongoengine.fields.MultiPolygonField` | ||||
|  | ||||
| As "2dsphere" indexes can be part of a compound index, you may not want the | ||||
| automatic index but would prefer a compound index.  In this example we turn off | ||||
| @@ -646,11 +682,11 @@ Shard keys | ||||
| ========== | ||||
|  | ||||
| If your collection is sharded, then you need to specify the shard key as a tuple, | ||||
| using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. | ||||
| using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. | ||||
| This ensures that the shard key is sent with the query when calling the | ||||
| :meth:`~mongoengine.document.Document.save` or | ||||
| :meth:`~mongoengine.document.Document.update` method on an existing | ||||
| :class:`-mongoengine.Document` instance:: | ||||
| :class:`~mongoengine.Document` instance:: | ||||
|  | ||||
|     class LogEntry(Document): | ||||
|         machine = StringField() | ||||
| @@ -672,7 +708,7 @@ defined, you may subclass it and add any extra fields or methods you may need. | ||||
| As this is new class is not a direct subclass of | ||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||
| will use the same collection as its superclass uses. This allows for more | ||||
| convenient and efficient retrieval of related documents - all you need do is | ||||
| convenient and efficient retrieval of related documents -- all you need do is | ||||
| set :attr:`allow_inheritance` to True in the :attr:`meta` data for a | ||||
| document.:: | ||||
|  | ||||
| @@ -686,12 +722,12 @@ document.:: | ||||
|     class DatedPage(Page): | ||||
|         date = DateTimeField() | ||||
|  | ||||
| .. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults | ||||
| .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||
|           to False, meaning you must set it to True to use inheritance. | ||||
|  | ||||
| Working with existing data | ||||
| -------------------------- | ||||
| As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and | ||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||
| easily get working with existing data.  Just define the document to match | ||||
| the expected schema in your database :: | ||||
|  | ||||
| @@ -714,7 +750,7 @@ Abstract classes | ||||
|  | ||||
| If you want to add some extra functionality to a group of Document classes but | ||||
| you don't need or want the overhead of inheritance you can use the | ||||
| :attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`. | ||||
| :attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`. | ||||
| This won't turn on :ref:`document-inheritance` but will allow you to keep your | ||||
| code DRY:: | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
| Documents instances | ||||
| =================== | ||||
| To create a new document object, create an instance of the relevant document | ||||
| class, providing values for its fields as its constructor keyword arguments. | ||||
| class, providing values for its fields as constructor keyword arguments. | ||||
| You may provide values for any of the fields on the document:: | ||||
|  | ||||
|     >>> page = Page(title="Test Page") | ||||
| @@ -32,11 +32,11 @@ already exist, then any changes will be updated atomically.  For example:: | ||||
|  | ||||
|     Changes to documents are tracked and on the whole perform ``set`` operations. | ||||
|  | ||||
|     * ``list_field.push(0)`` - *sets* the resulting list | ||||
|     * ``del(list_field)``   - *unsets* whole list | ||||
|     * ``list_field.push(0)`` --- *sets* the resulting list | ||||
|     * ``del(list_field)``   --- *unsets* whole list | ||||
|  | ||||
|     With lists its preferable to use ``Doc.update(push__list_field=0)`` as | ||||
|     this stops the whole list being updated - stopping any race conditions. | ||||
|     this stops the whole list being updated --- stopping any race conditions. | ||||
|  | ||||
| .. seealso:: | ||||
|     :ref:`guide-atomic-updates` | ||||
| @@ -74,7 +74,7 @@ Cascading Saves | ||||
| If your document contains :class:`~mongoengine.fields.ReferenceField` or | ||||
| :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the | ||||
| :meth:`~mongoengine.Document.save` method will not save any changes to | ||||
| those objects.  If you want all references to also be saved also, noting each | ||||
| those objects.  If you want all references to be saved also, noting each | ||||
| save is a separate query, then passing :attr:`cascade` as True | ||||
| to the save method will cascade any saves. | ||||
|  | ||||
| @@ -113,12 +113,13 @@ you may still use :attr:`id` to access the primary key if you want:: | ||||
|     >>> bob.id == bob.email == 'bob@example.com' | ||||
|     True | ||||
|  | ||||
| You can also access the document's "primary key" using the :attr:`pk` field; in | ||||
| is an alias to :attr:`id`:: | ||||
| You can also access the document's "primary key" using the :attr:`pk` field, | ||||
| it's an alias to :attr:`id`:: | ||||
|  | ||||
|     >>> page = Page(title="Another Test Page") | ||||
|     >>> page.save() | ||||
|     >>> page.id == page.pk | ||||
|     True | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|   | ||||
| @@ -20,7 +20,7 @@ a document is created to store details about animals, including a photo:: | ||||
|  | ||||
|     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
|  | ||||
|     marmot_photo = open('marmot.jpg', 'r') | ||||
|     marmot_photo = open('marmot.jpg', 'rb') | ||||
|     marmot.photo.put(marmot_photo, content_type = 'image/jpeg') | ||||
|     marmot.save() | ||||
|  | ||||
| @@ -46,7 +46,7 @@ slightly different manner.  First, a new file must be created by calling the | ||||
|     marmot.photo.write('some_more_image_data') | ||||
|     marmot.photo.close() | ||||
|  | ||||
|     marmot.photo.save() | ||||
|     marmot.save() | ||||
|  | ||||
| Deletion | ||||
| -------- | ||||
| @@ -70,5 +70,5 @@ Replacing files | ||||
| Files can be replaced with the :func:`replace` method. This works just like | ||||
| the :func:`put` method so even metadata can (and should) be replaced:: | ||||
|  | ||||
|     another_marmot = open('another_marmot.png', 'r') | ||||
|     another_marmot = open('another_marmot.png', 'rb') | ||||
|     marmot.photo.replace(another_marmot, content_type='image/png') | ||||
|   | ||||
| @@ -12,3 +12,5 @@ User Guide | ||||
|    querying | ||||
|    gridfs | ||||
|    signals | ||||
|    text-indexes | ||||
|    mongomock | ||||
|   | ||||
| @@ -2,13 +2,13 @@ | ||||
| Installing MongoEngine | ||||
| ====================== | ||||
|  | ||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | ||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_ | ||||
| and ensure it is running in an accessible location. You will also need | ||||
| `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | ||||
| install MongoEngine using setuptools, then the dependencies will be handled for | ||||
| you. | ||||
|  | ||||
| MongoEngine is available on PyPI, so to use it you can use :program:`pip`: | ||||
| MongoEngine is available on PyPI, so you can use :program:`pip`: | ||||
|  | ||||
| .. code-block:: console | ||||
|  | ||||
|   | ||||
							
								
								
									
										21
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| ============================== | ||||
| Use mongomock for testing | ||||
| ============================== | ||||
|  | ||||
| `mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just  | ||||
| what the name implies, mocking a mongo database. | ||||
|  | ||||
| To use with mongoengine, simply specify mongomock when connecting with  | ||||
| mongoengine: | ||||
|  | ||||
| .. code-block:: python | ||||
|  | ||||
|     connect('mongoenginetest', host='mongomock://localhost') | ||||
|     conn = get_connection() | ||||
|  | ||||
| or with an alias: | ||||
|  | ||||
| .. code-block:: python | ||||
|  | ||||
|     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||
|     conn = get_connection('testdb') | ||||
| @@ -17,7 +17,7 @@ fetch documents from the database:: | ||||
|  | ||||
|     As of MongoEngine 0.8 the querysets utilise a local cache.  So iterating | ||||
|     it multiple times will only cause a single query.  If this is not the | ||||
|     desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` | ||||
|     desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache` | ||||
|     (version **0.8.3+**) to return a non-caching queryset. | ||||
|  | ||||
| Filtering queries | ||||
| @@ -39,10 +39,18 @@ syntax:: | ||||
|     # been written by a user whose 'country' field is set to 'uk' | ||||
|     uk_pages = Page.objects(author__country='uk') | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|    (version **0.9.1+**) if your field name is like mongodb operator name (for example | ||||
|    type, lte, lt...) and you want to place it at the end of lookup keyword | ||||
|    mongoengine automatically  prepend $ to it. To avoid this use  __ at the end of | ||||
|    your lookup keyword. For example if your field name is ``type`` and you want to | ||||
|    query by this field you must use ``.objects(user__type__="admin")`` instead of | ||||
|    ``.objects(user__type="admin")`` | ||||
|  | ||||
| Query operators | ||||
| =============== | ||||
| Operators other than equality may also be used in queries; just attach the | ||||
| Operators other than equality may also be used in queries --- just attach the | ||||
| operator name to a key with a double-underscore:: | ||||
|  | ||||
|     # Only find users whose age is 18 or less | ||||
| @@ -84,19 +92,20 @@ expressions: | ||||
| Geo queries | ||||
| ----------- | ||||
|  | ||||
| There are a few special operators for performing geographical queries. The following | ||||
| were added in 0.8 for:  :class:`~mongoengine.fields.PointField`, | ||||
| There are a few special operators for performing geographical queries. | ||||
| The following were added in MongoEngine 0.8 for | ||||
| :class:`~mongoengine.fields.PointField`, | ||||
| :class:`~mongoengine.fields.LineStringField` and | ||||
| :class:`~mongoengine.fields.PolygonField`: | ||||
|  | ||||
| * ``geo_within`` -- Check if a geometry is within a polygon.  For ease of use | ||||
| * ``geo_within`` -- check if a geometry is within a polygon. For ease of use | ||||
|   it accepts either a geojson geometry or just the polygon coordinates eg:: | ||||
|  | ||||
|         loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) | ||||
|         loc.objects(point__geo_with={"type": "Polygon", | ||||
|         loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) | ||||
|         loc.objects(point__geo_within={"type": "Polygon", | ||||
|                                  "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||
|  | ||||
| * ``geo_within_box`` - simplified geo_within searching with a box eg:: | ||||
| * ``geo_within_box`` -- simplified geo_within searching with a box eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)]) | ||||
|         loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>]) | ||||
| @@ -132,23 +141,22 @@ were added in 0.8 for:  :class:`~mongoengine.fields.PointField`, | ||||
|         loc.objects(poly__geo_intersects={"type": "Polygon", | ||||
|                                           "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}) | ||||
|  | ||||
| * ``near`` -- Find all the locations near a given point:: | ||||
| * ``near`` -- find all the locations near a given point:: | ||||
|  | ||||
|         loc.objects(point__near=[40, 5]) | ||||
|         loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) | ||||
|  | ||||
|  | ||||
|     You can also set the maximum distance in meters as well:: | ||||
|   You can also set the maximum and/or the minimum distance in meters as well:: | ||||
|  | ||||
|         loc.objects(point__near=[40, 5], point__max_distance=1000) | ||||
|  | ||||
|         loc.objects(point__near=[40, 5], point__min_distance=100) | ||||
|  | ||||
| The older 2D indexes are still supported with the | ||||
| :class:`~mongoengine.fields.GeoPointField`: | ||||
|  | ||||
| * ``within_distance`` -- provide a list containing a point and a maximum | ||||
|   distance (e.g. [(41.342, -87.653), 5]) | ||||
| * ``within_spherical_distance`` -- Same as above but using the spherical geo model | ||||
| * ``within_spherical_distance`` -- same as above but using the spherical geo model | ||||
|   (e.g. [(41.342, -87.653), 5/earth_radius]) | ||||
| * ``near`` -- order the documents by how close they are to a given point | ||||
| * ``near_sphere`` -- Same as above but using the spherical geo model | ||||
| @@ -161,7 +169,8 @@ The older 2D indexes are still supported with the | ||||
|  | ||||
| * ``max_distance`` -- can be added to your location queries to set a maximum | ||||
|   distance. | ||||
|  | ||||
| * ``min_distance`` -- can be added to your location queries to set a minimum | ||||
|   distance. | ||||
|  | ||||
| Querying lists | ||||
| -------------- | ||||
| @@ -198,12 +207,14 @@ However, this doesn't map well to the syntax so you can also use a capital S ins | ||||
|  | ||||
|     Post.objects(comments__by="joe").update(inc__comments__S__votes=1) | ||||
|  | ||||
|     .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. | ||||
| .. note:: | ||||
|     Due to :program:`Mongo`, currently the $ operator only applies to the | ||||
|     first matched item in the query. | ||||
|  | ||||
|  | ||||
| Raw queries | ||||
| ----------- | ||||
| It is possible to provide a raw PyMongo query as a query parameter, which will | ||||
| It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will | ||||
| be integrated directly into the query. This is done using the ``__raw__`` | ||||
| keyword argument:: | ||||
|  | ||||
| @@ -213,12 +224,12 @@ keyword argument:: | ||||
|  | ||||
| Limiting and skipping results | ||||
| ============================= | ||||
| Just as with traditional ORMs, you may limit the number of results returned, or | ||||
| Just as with traditional ORMs, you may limit the number of results returned or | ||||
| skip a number or results in you query. | ||||
| :meth:`~mongoengine.queryset.QuerySet.limit` and | ||||
| :meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on | ||||
| :class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for | ||||
| achieving this is using array-slicing syntax:: | ||||
| :class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax | ||||
| is preferred for achieving this:: | ||||
|  | ||||
|     # Only the first 5 people | ||||
|     users = User.objects[:5] | ||||
| @@ -226,7 +237,7 @@ achieving this is using array-slicing syntax:: | ||||
|     # All except for the first 5 people | ||||
|     users = User.objects[5:] | ||||
|  | ||||
|     # 5 users, starting from the 10th user found | ||||
|     # 5 users, starting from the 11th user found | ||||
|     users = User.objects[10:15] | ||||
|  | ||||
| You may also index the query to retrieve a single result. If an item at that | ||||
| @@ -252,23 +263,17 @@ To retrieve a result that should be unique in the collection, use | ||||
| no document matches the query, and | ||||
| :class:`~mongoengine.queryset.MultipleObjectsReturned` | ||||
| if more than one document matched the query.  These exceptions are merged into | ||||
| your document defintions eg: `MyDoc.DoesNotExist` | ||||
| your document definitions eg: `MyDoc.DoesNotExist` | ||||
|  | ||||
| A variation of this method exists, | ||||
| :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | ||||
| document with the query arguments if no documents match the query. An | ||||
| additional keyword argument, :attr:`defaults` may be provided, which will be | ||||
| used as default values for the new document, in the case that it should need | ||||
| to be created:: | ||||
|  | ||||
|     >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) | ||||
|     >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) | ||||
|     >>> a.name == b.name and a.age == b.age | ||||
|     True | ||||
| A variation of this method, get_or_create() existed, but it was unsafe. It | ||||
| could not be made safe, because there are no transactions in mongoDB. Other | ||||
| approaches should be investigated, to ensure you don't accidentally duplicate | ||||
| data when using something similar to this method. Therefore it was deprecated | ||||
| in 0.8 and removed in 0.10. | ||||
|  | ||||
| Default Document queries | ||||
| ======================== | ||||
| By default, the objects :attr:`~mongoengine.Document.objects` attribute on a | ||||
| By default, the objects :attr:`~Document.objects` attribute on a | ||||
| document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter | ||||
| the collection -- it returns all objects. This may be changed by defining a | ||||
| method on a document that modifies a queryset. The method should accept two | ||||
| @@ -311,7 +316,7 @@ Should you want to add custom methods for interacting with or filtering | ||||
| documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be | ||||
| the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on | ||||
| a document, set ``queryset_class`` to the custom class in a | ||||
| :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | ||||
| :class:`~mongoengine.Document`'s ``meta`` dictionary:: | ||||
|  | ||||
|     class AwesomerQuerySet(QuerySet): | ||||
|  | ||||
| @@ -335,12 +340,19 @@ Javascript code that is executed on the database server. | ||||
|  | ||||
| Counting results | ||||
| ---------------- | ||||
| Just as with limiting and skipping results, there is a method on | ||||
| :class:`~mongoengine.queryset.QuerySet` objects -- | ||||
| :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | ||||
| way of achieving this:: | ||||
| Just as with limiting and skipping results, there is a method on a | ||||
| :class:`~mongoengine.queryset.QuerySet` object -- | ||||
| :meth:`~mongoengine.queryset.QuerySet.count`:: | ||||
|  | ||||
|     num_users = len(User.objects) | ||||
|     num_users = User.objects.count() | ||||
|  | ||||
| You could technically use ``len(User.objects)`` to get the same result, but it | ||||
| would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||
| When you execute a server-side count query, you let MongoDB do the heavy | ||||
| lifting and you receive a single integer over the wire. Meanwhile, len() | ||||
| retrieves all the results, places them in a local cache, and finally counts | ||||
| them. If we compare the performance of the two operations, len() is much slower | ||||
| than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||
|  | ||||
| Further aggregation | ||||
| ------------------- | ||||
| @@ -472,6 +484,8 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | ||||
| first positional argument to :attr:`Document.objects` when you filter it by | ||||
| calling it with keyword arguments:: | ||||
|  | ||||
|     from mongoengine.queryset.visitor import Q | ||||
|  | ||||
|     # Get published posts | ||||
|     Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) | ||||
|  | ||||
| @@ -488,22 +502,28 @@ calling it with keyword arguments:: | ||||
| Atomic updates | ||||
| ============== | ||||
| Documents may be updated atomically by using the | ||||
| :meth:`~mongoengine.queryset.QuerySet.update_one` and | ||||
| :meth:`~mongoengine.queryset.QuerySet.update` methods on a | ||||
| :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" | ||||
| that you may use with these methods: | ||||
| :meth:`~mongoengine.queryset.QuerySet.update_one`, | ||||
| :meth:`~mongoengine.queryset.QuerySet.update` and | ||||
| :meth:`~mongoengine.queryset.QuerySet.modify` methods on a | ||||
| :class:`~mongoengine.queryset.QuerySet` or | ||||
| :meth:`~mongoengine.Document.modify` and | ||||
| :meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a | ||||
| :class:`~mongoengine.Document`. | ||||
| There are several different "modifiers" that you may use with these methods: | ||||
|  | ||||
| * ``set`` -- set a particular value | ||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3+) | ||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3) | ||||
| * ``inc`` -- increment a value by a given amount | ||||
| * ``dec`` -- decrement a value by a given amount | ||||
| * ``push`` -- append a value to a list | ||||
| * ``push_all`` -- append several values to a list | ||||
| * ``pop`` -- remove the first or last element of a list | ||||
| * ``pop`` -- remove the first or last element of a list `depending on the value`_ | ||||
| * ``pull`` -- remove a value from a list | ||||
| * ``pull_all`` -- remove several values from a list | ||||
| * ``add_to_set`` -- add value to a list only if its not in the list already | ||||
|  | ||||
| .. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/ | ||||
|  | ||||
| The syntax for atomic updates is similar to the querying syntax, but the | ||||
| modifier comes before the field, not after it:: | ||||
|  | ||||
| @@ -522,6 +542,13 @@ modifier comes before the field, not after it:: | ||||
|     >>> post.tags | ||||
|     ['database', 'nosql'] | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|     If no modifier operator is specified the default will be ``$set``. So the following sentences are identical:: | ||||
|  | ||||
|         >>> BlogPost.objects(id=post.id).update(title='Example Post') | ||||
|         >>> BlogPost.objects(id=post.id).update(set__title='Example Post') | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates | ||||
| @@ -580,7 +607,7 @@ Some variables are made available in the scope of the Javascript function: | ||||
|  | ||||
| The following example demonstrates the intended usage of | ||||
| :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | ||||
| over a field on a document (this functionality is already available throught | ||||
| over a field on a document (this functionality is already available through | ||||
| :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | ||||
| example):: | ||||
|  | ||||
|   | ||||
| @@ -35,25 +35,25 @@ Available signals include: | ||||
|   :class:`~mongoengine.EmbeddedDocument` instance has been completed. | ||||
|  | ||||
| `pre_save` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` prior to performing | ||||
|   Called within :meth:`~mongoengine.Document.save` prior to performing | ||||
|   any actions. | ||||
|  | ||||
| `pre_save_post_validation` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` after validation | ||||
|   Called within :meth:`~mongoengine.Document.save` after validation | ||||
|   has taken place but before saving. | ||||
|  | ||||
| `post_save` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` after all actions | ||||
|   Called within :meth:`~mongoengine.Document.save` after all actions | ||||
|   (validation, insert/update, cascades, clearing dirty flags) have completed | ||||
|   successfully.  Passed the additional boolean keyword argument `created` to | ||||
|   indicate if the save was an insert or an update. | ||||
|  | ||||
| `pre_delete` | ||||
|   Called within :meth:`~mongoengine.document.Document.delete` prior to | ||||
|   Called within :meth:`~mongoengine.Document.delete` prior to | ||||
|   attempting the delete operation. | ||||
|  | ||||
| `post_delete` | ||||
|   Called within :meth:`~mongoengine.document.Document.delete` upon successful | ||||
|   Called within :meth:`~mongoengine.Document.delete` upon successful | ||||
|   deletion of the record. | ||||
|  | ||||
| `pre_bulk_insert` | ||||
| @@ -142,11 +142,4 @@ cleaner looking while still allowing manual execution of the callback:: | ||||
|         modified = DateTimeField() | ||||
|  | ||||
|  | ||||
| ReferenceFields and Signals | ||||
| --------------------------- | ||||
|  | ||||
| Currently `reverse_delete_rules` do not trigger signals on the other part of | ||||
| the relationship.  If this is required you must manually handle the | ||||
| reverse deletion. | ||||
|  | ||||
| .. _blinker: http://pypi.python.org/pypi/blinker | ||||
|   | ||||
							
								
								
									
										51
									
								
								docs/guide/text-indexes.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								docs/guide/text-indexes.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| =========== | ||||
| Text Search | ||||
| =========== | ||||
|  | ||||
| After MongoDB 2.4 version, supports search documents by text indexes. | ||||
|  | ||||
|  | ||||
| Defining a Document with text index | ||||
| =================================== | ||||
| Use the *$* prefix to set a text index, Look the declaration:: | ||||
|    | ||||
|   class News(Document): | ||||
|       title = StringField() | ||||
|       content = StringField() | ||||
|       is_active = BooleanField() | ||||
|  | ||||
|       meta = {'indexes': [ | ||||
|           {'fields': ['$title', "$content"], | ||||
|            'default_language': 'english', | ||||
|            'weights': {'title': 10, 'content': 2} | ||||
|           } | ||||
|       ]} | ||||
|  | ||||
|  | ||||
|  | ||||
| Querying | ||||
| ======== | ||||
|  | ||||
| Saving a document:: | ||||
|  | ||||
|   News(title="Using mongodb text search", | ||||
|        content="Testing text search").save() | ||||
|  | ||||
|   News(title="MongoEngine 0.9 released", | ||||
|        content="Various improvements").save() | ||||
|  | ||||
| Next, start a text search using :attr:`QuerySet.search_text` method:: | ||||
|    | ||||
|   document = News.objects.search_text('testing').first() | ||||
|   document.title # may be: "Using mongodb text search" | ||||
|    | ||||
|   document = News.objects.search_text('released').first() | ||||
|   document.title # may be: "MongoEngine 0.9 released" | ||||
|  | ||||
|  | ||||
| Ordering by text score | ||||
| ====================== | ||||
|  | ||||
| :: | ||||
|  | ||||
|   objects = News.objects.search('mongo').order_by('$text_score') | ||||
| @@ -14,7 +14,7 @@ MongoDB. To install it, simply run | ||||
|   MongoEngine. | ||||
|  | ||||
| :doc:`guide/index` | ||||
|   The Full guide to MongoEngine - from modeling documents to storing files, | ||||
|   The Full guide to MongoEngine --- from modeling documents to storing files, | ||||
|   from querying for data to firing signals and *everything* between. | ||||
|  | ||||
| :doc:`apireference` | ||||
|   | ||||
| @@ -3,11 +3,10 @@ Tutorial | ||||
| ======== | ||||
|  | ||||
| This tutorial introduces **MongoEngine** by means of example --- we will walk | ||||
| through how to create a simple **Tumblelog** application. A Tumblelog is a type | ||||
| of blog where posts are not constrained to being conventional text-based posts. | ||||
| As well as text-based entries, users may post images, links, videos, etc. For | ||||
| simplicity's sake, we'll stick to text, image and link entries in our | ||||
| application. As the purpose of this tutorial is to introduce MongoEngine, we'll | ||||
| through how to create a simple **Tumblelog** application. A tumblelog is a | ||||
| blog that supports mixed media content, including text, images, links, video, | ||||
| audio, etc. For simplicity's sake, we'll stick to text, image, and link | ||||
| entries. As the purpose of this tutorial is to introduce MongoEngine, we'll | ||||
| focus on the data-modelling side of the application, leaving out a user | ||||
| interface. | ||||
|  | ||||
| @@ -16,14 +15,14 @@ Getting started | ||||
|  | ||||
| Before we start, make sure that a copy of MongoDB is running in an accessible | ||||
| location --- running it locally will be easier, but if that is not an option | ||||
| then it may be run on a remote server. If you haven't installed mongoengine, | ||||
| then it may be run on a remote server. If you haven't installed MongoEngine, | ||||
| simply use pip to install it like so:: | ||||
|  | ||||
|     $ pip install mongoengine | ||||
|  | ||||
| Before we can start using MongoEngine, we need to tell it how to connect to our | ||||
| instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | ||||
| function. If running locally the only argument we need to provide is the name | ||||
| function. If running locally, the only argument we need to provide is the name | ||||
| of the MongoDB database to use:: | ||||
|  | ||||
|     from mongoengine import * | ||||
| @@ -39,8 +38,8 @@ Defining our documents | ||||
| MongoDB is *schemaless*, which means that no schema is enforced by the database | ||||
| --- we may add and remove fields however we want and MongoDB won't complain. | ||||
| This makes life a lot easier in many regards, especially when there is a change | ||||
| to the data model. However, defining schemata for our documents can help to | ||||
| iron out bugs involving incorrect types or missing fields, and also allow us to | ||||
| to the data model. However, defining schemas for our documents can help to iron | ||||
| out bugs involving incorrect types or missing fields, and also allow us to | ||||
| define utility methods on our documents in the same way that traditional | ||||
| :abbr:`ORMs (Object-Relational Mappers)` do. | ||||
|  | ||||
| @@ -65,7 +64,7 @@ which fields a :class:`User` may have, and what types of data they might store:: | ||||
|         first_name = StringField(max_length=50) | ||||
|         last_name = StringField(max_length=50) | ||||
|  | ||||
| This looks similar to how a the structure of a table would be defined in a | ||||
| This looks similar to how the structure of a table would be defined in a | ||||
| regular ORM. The key difference is that this schema will never be passed on to | ||||
| MongoDB --- this will only be enforced at the application level, making future | ||||
| changes easy to manage. Also, the User documents will be stored in a | ||||
| @@ -96,7 +95,7 @@ using* the new fields we need to support video posts. This fits with the | ||||
| Object-Oriented principle of *inheritance* nicely. We can think of | ||||
| :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | ||||
| :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | ||||
| this kind of modelling out of the box --- all you need do is turn on inheritance | ||||
| this kind of modeling out of the box --- all you need do is turn on inheritance | ||||
| by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: | ||||
|  | ||||
|     class Post(Document): | ||||
| @@ -128,8 +127,8 @@ link table, we can just store a list of tags in each post. So, for both | ||||
| efficiency and simplicity's sake, we'll store the tags as strings directly | ||||
| within the post, rather than storing references to tags in a separate | ||||
| collection. Especially as tags are generally very short (often even shorter | ||||
| than a document's id), this denormalisation won't impact very strongly on the | ||||
| size of our database. So let's take a look that the code our modified | ||||
| than a document's id), this denormalization won't impact the size of the | ||||
| database very strongly. Let's take a look at the code of our modified | ||||
| :class:`Post` class:: | ||||
|  | ||||
|     class Post(Document): | ||||
| @@ -141,7 +140,7 @@ The :class:`~mongoengine.fields.ListField` object that is used to define a Post' | ||||
| takes a field object as its first argument --- this means that you can have | ||||
| lists of any type of field (including lists). | ||||
|  | ||||
| .. note:: We don't need to modify the specialised post types as they all | ||||
| .. note:: We don't need to modify the specialized post types as they all | ||||
|     inherit from :class:`Post`. | ||||
|  | ||||
| Comments | ||||
| @@ -149,7 +148,7 @@ Comments | ||||
|  | ||||
| A comment is typically associated with *one* post. In a relational database, to | ||||
| display a post with its comments, we would have to retrieve the post from the | ||||
| database, then query the database again for the comments associated with the | ||||
| database and then query the database again for the comments associated with the | ||||
| post. This works, but there is no real reason to be storing the comments | ||||
| separately from their associated posts, other than to work around the | ||||
| relational model. Using MongoDB we can store the comments as a list of | ||||
| @@ -207,7 +206,10 @@ object:: | ||||
|         ross.last_name = 'Lawley' | ||||
|         ross.save() | ||||
|  | ||||
| Now that we've got our user in the database, let's add a couple of posts:: | ||||
| Assign another user to a variable called ``john``, just like we did above with | ||||
| ``ross``. | ||||
|  | ||||
| Now that we've got our users in the database, let's add a couple of posts:: | ||||
|  | ||||
|     post1 = TextPost(title='Fun with MongoEngine', author=john) | ||||
|     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | ||||
| @@ -219,8 +221,8 @@ Now that we've got our user in the database, let's add a couple of posts:: | ||||
|     post2.tags = ['mongoengine'] | ||||
|     post2.save() | ||||
|  | ||||
| .. note:: If you change a field on a object that has already been saved, then | ||||
|     call :meth:`save` again, the document will be updated. | ||||
| .. note:: If you change a field on an object that has already been saved and | ||||
|     then call :meth:`save` again, the document will be updated. | ||||
|  | ||||
| Accessing our data | ||||
| ================== | ||||
| @@ -232,17 +234,17 @@ used to access the documents in the database collection associated with that | ||||
| class. So let's see how we can get our posts' titles:: | ||||
|  | ||||
|     for post in Post.objects: | ||||
|         print post.title | ||||
|         print(post.title) | ||||
|  | ||||
| Retrieving type-specific information | ||||
| ------------------------------------ | ||||
|  | ||||
| This will print the titles of our posts, one on each line. But What if we want | ||||
| This will print the titles of our posts, one on each line. But what if we want | ||||
| to access the type-specific data (link_url, content, etc.)? One way is simply | ||||
| to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | ||||
|  | ||||
|     for post in TextPost.objects: | ||||
|         print post.content | ||||
|         print(post.content) | ||||
|  | ||||
| Using TextPost's :attr:`objects` attribute only returns documents that were | ||||
| created using :class:`TextPost`. Actually, there is a more general rule here: | ||||
| @@ -259,16 +261,14 @@ instances of :class:`Post` --- they were instances of the subclass of | ||||
| practice:: | ||||
|  | ||||
|     for post in Post.objects: | ||||
|         print post.title | ||||
|         print '=' * len(post.title) | ||||
|         print(post.title) | ||||
|         print('=' * len(post.title)) | ||||
|  | ||||
|         if isinstance(post, TextPost): | ||||
|             print post.content | ||||
|             print(post.content) | ||||
|  | ||||
|         if isinstance(post, LinkPost): | ||||
|             print 'Link:', post.link_url | ||||
|  | ||||
|         print | ||||
|             print('Link: {}'.format(post.link_url)) | ||||
|  | ||||
| This would print the title of each post, followed by the content if it was a | ||||
| text post, and "Link: <url>" if it was a link post. | ||||
| @@ -283,7 +283,7 @@ your query.  Let's adjust our query so that only posts with the tag "mongodb" | ||||
| are returned:: | ||||
|  | ||||
|     for post in Post.objects(tags='mongodb'): | ||||
|         print post.title | ||||
|         print(post.title) | ||||
|  | ||||
| There are also methods available on :class:`~mongoengine.queryset.QuerySet` | ||||
| objects that allow different results to be returned, for example, calling | ||||
| @@ -292,11 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be | ||||
| used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||
|  | ||||
|     num_posts = Post.objects(tags='mongodb').count() | ||||
|     print 'Found %d posts with tag "mongodb"' % num_posts | ||||
|     print('Found {} posts with tag "mongodb"'.format(num_posts)) | ||||
|  | ||||
| Learning more about mongoengine | ||||
| Learning more about MongoEngine | ||||
| ------------------------------- | ||||
|  | ||||
| If you got this far you've made a great start, so well done! The next step on | ||||
| your mongoengine journey is the `full user guide <guide/index.html>`_, where you | ||||
| can learn indepth about how to use mongoengine and mongodb. | ||||
| your MongoEngine journey is the `full user guide <guide/index.html>`_, where | ||||
| you can learn in-depth about how to use MongoEngine and MongoDB. | ||||
|   | ||||
| @@ -2,6 +2,83 @@ | ||||
| Upgrading | ||||
| ######### | ||||
|  | ||||
| Development | ||||
| *********** | ||||
| (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||
|  | ||||
| 0.13.0 | ||||
| ****** | ||||
| This release adds Unicode support to the `EmailField` and changes its | ||||
| structure significantly. Previously, email addresses containing Unicode | ||||
| characters didn't work at all. Starting with v0.13.0, domains with Unicode | ||||
| characters are supported out of the box, meaning some emails that previously | ||||
| didn't pass validation now do. Make sure the rest of your application can | ||||
| accept such email addresses. Additionally, if you subclassed the `EmailField` | ||||
| in your application and overrode `EmailField.EMAIL_REGEX`, you will have to | ||||
| adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, | ||||
| and potentially `EmailField.UTF8_USER_REGEX`. | ||||
|  | ||||
| 0.12.0 | ||||
| ****** | ||||
| This release includes various fixes for the `BaseQuerySet` methods and how they | ||||
| are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size | ||||
| to an already-existing queryset wouldn't modify the underlying PyMongo cursor. | ||||
| This has been fixed now, so you'll need to make sure that your code didn't rely | ||||
| on the broken implementation. | ||||
|  | ||||
| Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private | ||||
| `_clone_into`. If you directly used that method in your code, you'll need to | ||||
| rename its occurrences. | ||||
|  | ||||
| 0.11.0 | ||||
| ****** | ||||
| This release includes a major rehaul of MongoEngine's code quality and | ||||
| introduces a few breaking changes. It also touches many different parts of | ||||
| the package and although all the changes have been tested and scrutinized, | ||||
| you're encouraged to thorougly test the upgrade. | ||||
|  | ||||
| First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | ||||
| If you import or catch this exception, you'll need to rename it in your code. | ||||
|  | ||||
| Second breaking change drops Python v2.6 support. If you run MongoEngine on | ||||
| that Python version, you'll need to upgrade it first. | ||||
|  | ||||
| Third breaking change drops an old backward compatibility measure where | ||||
| `from mongoengine.base import ErrorClass` would work on top of | ||||
| `from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. | ||||
| `ValidationError`). If you import any exceptions from `mongoengine.base`, | ||||
| change it to `mongoengine.errors`. | ||||
|  | ||||
| 0.10.8 | ||||
| ****** | ||||
| This version fixed an issue where specifying a MongoDB URI host would override | ||||
| more information than it should. These changes are minor, but they still | ||||
| subtly modify the connection logic and thus you're encouraged to test your | ||||
| MongoDB connection before shipping v0.10.8 in production. | ||||
|  | ||||
| 0.10.7 | ||||
| ****** | ||||
|  | ||||
| `QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use | ||||
| `QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework | ||||
| by default from now on. | ||||
|  | ||||
| 0.9.0 | ||||
| ***** | ||||
|  | ||||
| The 0.8.7 package on pypi was corrupted.  If upgrading from 0.8.7 to 0.9.0 please follow: :: | ||||
|  | ||||
|     pip uninstall pymongo | ||||
|     pip uninstall mongoengine | ||||
|     pip install pymongo==2.8 | ||||
|     pip install mongoengine | ||||
|  | ||||
| 0.8.7 | ||||
| ***** | ||||
|  | ||||
| Calling reload on deleted / nonexistent documents now raises a DoesNotExist | ||||
| exception. | ||||
|  | ||||
|  | ||||
| 0.8.2 to 0.8.3 | ||||
| ************** | ||||
| @@ -257,7 +334,7 @@ update your code like so: :: | ||||
|     [m for m in mammals]                                               # This will return all carnivores | ||||
|  | ||||
| Len iterates the queryset | ||||
| -------------------------- | ||||
| ------------------------- | ||||
|  | ||||
| If you ever did `len(queryset)` it previously did a `count()` under the covers, | ||||
| this caused some unusual issues.  As `len(queryset)` is most often used by | ||||
| @@ -270,7 +347,7 @@ queryset you should upgrade to use count:: | ||||
|     len(Animal.objects(type="mammal")) | ||||
|  | ||||
|     # New code | ||||
|     Animal.objects(type="mammal").count()) | ||||
|     Animal.objects(type="mammal").count() | ||||
|  | ||||
|  | ||||
| .only() now inline with .exclude() | ||||
|   | ||||
| @@ -1,26 +1,36 @@ | ||||
| import document | ||||
| from document import * | ||||
| import fields | ||||
| from fields import * | ||||
| import connection | ||||
| from connection import * | ||||
| import queryset | ||||
| from queryset import * | ||||
| import signals | ||||
| from signals import * | ||||
| from errors import * | ||||
| import errors | ||||
| import django | ||||
| # Import submodules so that we can expose their __all__ | ||||
| from mongoengine import connection | ||||
| from mongoengine import document | ||||
| from mongoengine import errors | ||||
| from mongoengine import fields | ||||
| from mongoengine import queryset | ||||
| from mongoengine import signals | ||||
|  | ||||
| __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + | ||||
|            list(queryset.__all__) + signals.__all__ + list(errors.__all__)) | ||||
| # Import everything from each submodule so that it can be accessed via | ||||
| # mongoengine, e.g. instead of `from mongoengine.connection import connect`, | ||||
| # users can simply use `from mongoengine import connect`, or even | ||||
| # `from mongoengine import *` and then `connect('testdb')`. | ||||
| from mongoengine.connection import * | ||||
| from mongoengine.document import * | ||||
| from mongoengine.errors import * | ||||
| from mongoengine.fields import * | ||||
| from mongoengine.queryset import * | ||||
| from mongoengine.signals import * | ||||
|  | ||||
| VERSION = (0, 8, 4) | ||||
|  | ||||
| __all__ = (list(document.__all__) + list(fields.__all__) + | ||||
|            list(connection.__all__) + list(queryset.__all__) + | ||||
|            list(signals.__all__) + list(errors.__all__)) | ||||
|  | ||||
|  | ||||
| VERSION = (0, 13, 0) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|     if isinstance(VERSION[-1], basestring): | ||||
|         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] | ||||
|     """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return '.'.join(map(str, VERSION)) | ||||
|  | ||||
|  | ||||
| __version__ = get_version() | ||||
|   | ||||
| @@ -1,8 +1,28 @@ | ||||
| # Base module is split into several files for convenience. Files inside of | ||||
| # this module should import from a specific submodule (e.g. | ||||
| # `from mongoengine.base.document import BaseDocument`), but all of the | ||||
| # other modules should import directly from the top-level module (e.g. | ||||
| # `from mongoengine.base import BaseDocument`). This approach is cleaner and | ||||
| # also helps with cyclical import errors. | ||||
| from mongoengine.base.common import * | ||||
| from mongoengine.base.datastructures import * | ||||
| from mongoengine.base.document import * | ||||
| from mongoengine.base.fields import * | ||||
| from mongoengine.base.metaclasses import * | ||||
|  | ||||
| # Help with backwards compatibility | ||||
| from mongoengine.errors import * | ||||
| __all__ = ( | ||||
|     # common | ||||
|     'UPDATE_OPERATORS', '_document_registry', 'get_document', | ||||
|  | ||||
|     # datastructures | ||||
|     'BaseDict', 'BaseList', 'EmbeddedDocumentList', | ||||
|  | ||||
|     # document | ||||
|     'BaseDocument', | ||||
|  | ||||
|     # fields | ||||
|     'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', | ||||
|  | ||||
|     # metaclasses | ||||
|     'DocumentMetaclass', 'TopLevelDocumentMetaclass' | ||||
| ) | ||||
|   | ||||
| @@ -1,13 +1,18 @@ | ||||
| from mongoengine.errors import NotRegistered | ||||
|  | ||||
| __all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') | ||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | ||||
|  | ||||
|  | ||||
| UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert', 'min', 'max', 'rename']) | ||||
|  | ||||
| ALLOW_INHERITANCE = False | ||||
|  | ||||
| _document_registry = {} | ||||
|  | ||||
|  | ||||
| def get_document(name): | ||||
|     """Get a document class by name.""" | ||||
|     doc = _document_registry.get(name, None) | ||||
|     if not doc: | ||||
|         # Possible old style name | ||||
|   | ||||
| @@ -1,12 +1,16 @@ | ||||
| import itertools | ||||
| import weakref | ||||
| from mongoengine.common import _import_class | ||||
|  | ||||
| __all__ = ("BaseDict", "BaseList") | ||||
| import six | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||
|  | ||||
| __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') | ||||
|  | ||||
|  | ||||
| class BaseDict(dict): | ||||
|     """A special dict so we can watch any changes | ||||
|     """ | ||||
|     """A special dict so we can watch any changes.""" | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
| @@ -19,31 +23,39 @@ class BaseDict(dict): | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         return super(BaseDict, self).__init__(dict_items) | ||||
|         super(BaseDict, self).__init__(dict_items) | ||||
|  | ||||
|     def __getitem__(self, *args, **kwargs): | ||||
|         value = super(BaseDict, self).__getitem__(*args, **kwargs) | ||||
|     def __getitem__(self, key, *args, **kwargs): | ||||
|         value = super(BaseDict, self).__getitem__(key) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__setitem__(*args, **kwargs) | ||||
|     def __setitem__(self, key, value, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__setitem__(key, value) | ||||
|  | ||||
|     def __delete__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delitem__(*args, **kwargs) | ||||
|     def __delitem__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__delitem__(key) | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delattr__(*args, **kwargs) | ||||
|     def __delattr__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__delattr__(key) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
| @@ -56,7 +68,7 @@ class BaseDict(dict): | ||||
|  | ||||
|     def clear(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).clear(*args, **kwargs) | ||||
|         return super(BaseDict, self).clear() | ||||
|  | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
| @@ -64,20 +76,26 @@ class BaseDict(dict): | ||||
|  | ||||
|     def popitem(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).popitem(*args, **kwargs) | ||||
|         return super(BaseDict, self).popitem() | ||||
|  | ||||
|     def setdefault(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).setdefault(*args, **kwargs) | ||||
|  | ||||
|     def update(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).update(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self): | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed('%s.%s' % (self._name, key)) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
|  | ||||
| class BaseList(list): | ||||
|     """A special list so we can watch any changes | ||||
|     """ | ||||
|     """A special list so we can watch any changes.""" | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
| @@ -90,23 +108,38 @@ class BaseList(list): | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         return super(BaseList, self).__init__(list_items) | ||||
|         super(BaseList, self).__init__(list_items) | ||||
|  | ||||
|     def __getitem__(self, *args, **kwargs): | ||||
|         value = super(BaseList, self).__getitem__(*args, **kwargs) | ||||
|     def __getitem__(self, key, *args, **kwargs): | ||||
|         value = super(BaseList, self).__getitem__(key) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__setitem__(*args, **kwargs) | ||||
|     def __iter__(self): | ||||
|         for i in xrange(self.__len__()): | ||||
|             yield self[i] | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|     def __setitem__(self, key, value, *args, **kwargs): | ||||
|         if isinstance(key, slice): | ||||
|             self._mark_as_changed() | ||||
|         return super(BaseList, self).__delitem__(*args, **kwargs) | ||||
|         else: | ||||
|             self._mark_as_changed(key) | ||||
|         return super(BaseList, self).__setitem__(key, value) | ||||
|  | ||||
|     def __delitem__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__delitem__(key) | ||||
|  | ||||
|     def __setslice__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
| @@ -125,6 +158,14 @@ class BaseList(list): | ||||
|         self = state | ||||
|         return self | ||||
|  | ||||
|     def __iadd__(self, other): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__iadd__(other) | ||||
|  | ||||
|     def __imul__(self, other): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__imul__(other) | ||||
|  | ||||
|     def append(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).append(*args, **kwargs) | ||||
| @@ -147,12 +188,299 @@ class BaseList(list): | ||||
|  | ||||
|     def reverse(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).reverse(*args, **kwargs) | ||||
|         return super(BaseList, self).reverse() | ||||
|  | ||||
|     def sort(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).sort(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self): | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed( | ||||
|                     '%s.%s' % (self._name, key % len(self)) | ||||
|                 ) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentList(BaseList): | ||||
|  | ||||
|     @classmethod | ||||
|     def __match_all(cls, embedded_doc, kwargs): | ||||
|         """Return True if a given embedded doc matches all the filter | ||||
|         kwargs. If it doesn't return False. | ||||
|         """ | ||||
|         for key, expected_value in kwargs.items(): | ||||
|             doc_val = getattr(embedded_doc, key) | ||||
|             if doc_val != expected_value and six.text_type(doc_val) != expected_value: | ||||
|                 return False | ||||
|         return True | ||||
|  | ||||
|     @classmethod | ||||
|     def __only_matches(cls, embedded_docs, kwargs): | ||||
|         """Return embedded docs that match the filter kwargs.""" | ||||
|         if not kwargs: | ||||
|             return embedded_docs | ||||
|         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||
|  | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
|         self._instance = instance | ||||
|  | ||||
|     def filter(self, **kwargs): | ||||
|         """ | ||||
|         Filters the list by only including embedded documents with the | ||||
|         given keyword arguments. | ||||
|  | ||||
|         :param kwargs: The keyword arguments corresponding to the fields to | ||||
|          filter on. *Multiple arguments are treated as if they are ANDed | ||||
|          together.* | ||||
|         :return: A new ``EmbeddedDocumentList`` containing the matching | ||||
|          embedded documents. | ||||
|  | ||||
|         Raises ``AttributeError`` if a given keyword is not a valid field for | ||||
|         the embedded document class. | ||||
|         """ | ||||
|         values = self.__only_matches(self, kwargs) | ||||
|         return EmbeddedDocumentList(values, self._instance, self._name) | ||||
|  | ||||
|     def exclude(self, **kwargs): | ||||
|         """ | ||||
|         Filters the list by excluding embedded documents with the given | ||||
|         keyword arguments. | ||||
|  | ||||
|         :param kwargs: The keyword arguments corresponding to the fields to | ||||
|          exclude on. *Multiple arguments are treated as if they are ANDed | ||||
|          together.* | ||||
|         :return: A new ``EmbeddedDocumentList`` containing the non-matching | ||||
|          embedded documents. | ||||
|  | ||||
|         Raises ``AttributeError`` if a given keyword is not a valid field for | ||||
|         the embedded document class. | ||||
|         """ | ||||
|         exclude = self.__only_matches(self, kwargs) | ||||
|         values = [item for item in self if item not in exclude] | ||||
|         return EmbeddedDocumentList(values, self._instance, self._name) | ||||
|  | ||||
|     def count(self): | ||||
|         """ | ||||
|         The number of embedded documents in the list. | ||||
|  | ||||
|         :return: The length of the list, equivalent to the result of ``len()``. | ||||
|         """ | ||||
|         return len(self) | ||||
|  | ||||
|     def get(self, **kwargs): | ||||
|         """ | ||||
|         Retrieves an embedded document determined by the given keyword | ||||
|         arguments. | ||||
|  | ||||
|         :param kwargs: The keyword arguments corresponding to the fields to | ||||
|          search on. *Multiple arguments are treated as if they are ANDed | ||||
|          together.* | ||||
|         :return: The embedded document matched by the given keyword arguments. | ||||
|  | ||||
|         Raises ``DoesNotExist`` if the arguments used to query an embedded | ||||
|         document returns no results. ``MultipleObjectsReturned`` if more | ||||
|         than one result is returned. | ||||
|         """ | ||||
|         values = self.__only_matches(self, kwargs) | ||||
|         if len(values) == 0: | ||||
|             raise DoesNotExist( | ||||
|                 '%s matching query does not exist.' % self._name | ||||
|             ) | ||||
|         elif len(values) > 1: | ||||
|             raise MultipleObjectsReturned( | ||||
|                 '%d items returned, instead of 1' % len(values) | ||||
|             ) | ||||
|  | ||||
|         return values[0] | ||||
|  | ||||
|     def first(self): | ||||
|         """Return the first embedded document in the list, or ``None`` | ||||
|         if empty. | ||||
|         """ | ||||
|         if len(self) > 0: | ||||
|             return self[0] | ||||
|  | ||||
|     def create(self, **values): | ||||
|         """ | ||||
|         Creates a new embedded document and saves it to the database. | ||||
|  | ||||
|         .. note:: | ||||
|             The embedded document changes are not automatically saved | ||||
|             to the database after calling this method. | ||||
|  | ||||
|         :param values: A dictionary of values for the embedded document. | ||||
|         :return: The new embedded document instance. | ||||
|         """ | ||||
|         name = self._name | ||||
|         EmbeddedClass = self._instance._fields[name].field.document_type_obj | ||||
|         self._instance[self._name].append(EmbeddedClass(**values)) | ||||
|  | ||||
|         return self._instance[self._name][-1] | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         """ | ||||
|         Saves the ancestor document. | ||||
|  | ||||
|         :param args: Arguments passed up to the ancestor Document's save | ||||
|          method. | ||||
|         :param kwargs: Keyword arguments passed up to the ancestor Document's | ||||
|          save method. | ||||
|         """ | ||||
|         self._instance.save(*args, **kwargs) | ||||
|  | ||||
|     def delete(self): | ||||
|         """ | ||||
|         Deletes the embedded documents from the database. | ||||
|  | ||||
|         .. note:: | ||||
|             The embedded document changes are not automatically saved | ||||
|             to the database after calling this method. | ||||
|  | ||||
|         :return: The number of entries deleted. | ||||
|         """ | ||||
|         values = list(self) | ||||
|         for item in values: | ||||
|             self._instance[self._name].remove(item) | ||||
|  | ||||
|         return len(values) | ||||
|  | ||||
|     def update(self, **update): | ||||
|         """ | ||||
|         Updates the embedded documents with the given update values. | ||||
|  | ||||
|         .. note:: | ||||
|             The embedded document changes are not automatically saved | ||||
|             to the database after calling this method. | ||||
|  | ||||
|         :param update: A dictionary of update values to apply to each | ||||
|          embedded document. | ||||
|         :return: The number of entries updated. | ||||
|         """ | ||||
|         if len(update) == 0: | ||||
|             return 0 | ||||
|         values = list(self) | ||||
|         for item in values: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(item, k, v) | ||||
|  | ||||
|         return len(values) | ||||
|  | ||||
|  | ||||
| class StrictDict(object): | ||||
|     __slots__ = () | ||||
|     _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) | ||||
|     _classes = {} | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         for k, v in kwargs.iteritems(): | ||||
|             setattr(self, k, v) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         key = '_reserved_' + key if key in self._special_fields else key | ||||
|         try: | ||||
|             return getattr(self, key) | ||||
|         except AttributeError: | ||||
|             raise KeyError(key) | ||||
|  | ||||
|     def __setitem__(self, key, value): | ||||
|         key = '_reserved_' + key if key in self._special_fields else key | ||||
|         return setattr(self, key, value) | ||||
|  | ||||
|     def __contains__(self, key): | ||||
|         return hasattr(self, key) | ||||
|  | ||||
|     def get(self, key, default=None): | ||||
|         try: | ||||
|             return self[key] | ||||
|         except KeyError: | ||||
|             return default | ||||
|  | ||||
|     def pop(self, key, default=None): | ||||
|         v = self.get(key, default) | ||||
|         try: | ||||
|             delattr(self, key) | ||||
|         except AttributeError: | ||||
|             pass | ||||
|         return v | ||||
|  | ||||
|     def iteritems(self): | ||||
|         for key in self: | ||||
|             yield key, self[key] | ||||
|  | ||||
|     def items(self): | ||||
|         return [(k, self[k]) for k in iter(self)] | ||||
|  | ||||
|     def iterkeys(self): | ||||
|         return iter(self) | ||||
|  | ||||
|     def keys(self): | ||||
|         return list(iter(self)) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         return (key for key in self.__slots__ if hasattr(self, key)) | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(list(self.iteritems())) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return self.items() == other.items() | ||||
|  | ||||
|     def __ne__(self, other): | ||||
|         return self.items() != other.items() | ||||
|  | ||||
|     @classmethod | ||||
|     def create(cls, allowed_keys): | ||||
|         allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) | ||||
|         allowed_keys = frozenset(allowed_keys_tuple) | ||||
|         if allowed_keys not in cls._classes: | ||||
|             class SpecificStrictDict(cls): | ||||
|                 __slots__ = allowed_keys_tuple | ||||
|  | ||||
|                 def __repr__(self): | ||||
|                     return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) | ||||
|  | ||||
|             cls._classes[allowed_keys] = SpecificStrictDict | ||||
|         return cls._classes[allowed_keys] | ||||
|  | ||||
|  | ||||
| class SemiStrictDict(StrictDict): | ||||
|     __slots__ = ('_extras', ) | ||||
|     _classes = {} | ||||
|  | ||||
|     def __getattr__(self, attr): | ||||
|         try: | ||||
|             super(SemiStrictDict, self).__getattr__(attr) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 return self.__getattribute__('_extras')[attr] | ||||
|             except KeyError as e: | ||||
|                 raise AttributeError(e) | ||||
|  | ||||
|     def __setattr__(self, attr, value): | ||||
|         try: | ||||
|             super(SemiStrictDict, self).__setattr__(attr, value) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 self._extras[attr] = value | ||||
|             except AttributeError: | ||||
|                 self._extras = {attr: value} | ||||
|  | ||||
|     def __delattr__(self, attr): | ||||
|         try: | ||||
|             super(SemiStrictDict, self).__delattr__(attr) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 del self._extras[attr] | ||||
|             except KeyError as e: | ||||
|                 raise AttributeError(e) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         try: | ||||
|             extras_iter = iter(self.__getattribute__('_extras')) | ||||
|         except AttributeError: | ||||
|             extras_iter = () | ||||
|         return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -4,14 +4,17 @@ import weakref | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| from mongoengine.base.common import UPDATE_OPERATORS | ||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||
|                                              EmbeddedDocumentList) | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| from mongoengine.base.common import ALLOW_INHERITANCE | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList | ||||
|  | ||||
| __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | ||||
|            'GeoJsonBaseField') | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
| @@ -20,7 +23,6 @@ class BaseField(object): | ||||
|  | ||||
|     .. versionchanged:: 0.5 - added verbose and help text | ||||
|     """ | ||||
|  | ||||
|     name = None | ||||
|     _geo_index = False | ||||
|     _auto_gen = False  # Call `generate` to generate a value | ||||
| @@ -34,16 +36,16 @@ class BaseField(object): | ||||
|  | ||||
|     def __init__(self, db_field=None, name=None, required=False, default=None, | ||||
|                  unique=False, unique_with=None, primary_key=False, | ||||
|                  validation=None, choices=None, verbose_name=None, | ||||
|                  help_text=None): | ||||
|                  validation=None, choices=None, null=False, sparse=False, | ||||
|                  **kwargs): | ||||
|         """ | ||||
|         :param db_field: The database field to store this field in | ||||
|             (defaults to the name of the field) | ||||
|         :param name: Depreciated - use db_field | ||||
|         :param name: Deprecated - use db_field | ||||
|         :param required: If the field is required. Whether it has to have a | ||||
|             value or not. Defaults to False. | ||||
|         :param default: (optional) The default value for this field if no value | ||||
|             has been set (or if the value has been unset).  It Can be a | ||||
|             has been set (or if the value has been unset).  It can be a | ||||
|             callable. | ||||
|         :param unique: Is the field value unique or not.  Defaults to False. | ||||
|         :param unique_with: (optional) The other field this field should be | ||||
| @@ -53,15 +55,20 @@ class BaseField(object): | ||||
|             field.  Generally this is deprecated in favour of the | ||||
|             `FIELD.validate` method | ||||
|         :param choices: (optional) The valid choices | ||||
|         :param verbose_name: (optional)  The verbose name for the field. | ||||
|             Designed to be human readable and is often used when generating | ||||
|             model forms from the document model. | ||||
|         :param help_text: (optional) The help text for this field and is often | ||||
|             used when generating model forms from the document model. | ||||
|         :param null: (optional) Is the field value can be null. If no and there is a default value | ||||
|             then the default value is set | ||||
|         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||
|             means that uniqueness won't be enforced for `None` values | ||||
|         :param **kwargs: (optional) Arbitrary indirection-free metadata for | ||||
|             this field can be supplied as additional keyword arguments and | ||||
|             accessed as attributes of the field. Must not conflict with any | ||||
|             existing attributes. Common metadata includes `verbose_name` and | ||||
|             `help_text`. | ||||
|         """ | ||||
|         self.db_field = (db_field or name) if not primary_key else '_id' | ||||
|  | ||||
|         if name: | ||||
|             msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" | ||||
|             msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         self.required = required or primary_key | ||||
|         self.default = default | ||||
| @@ -70,8 +77,30 @@ class BaseField(object): | ||||
|         self.primary_key = primary_key | ||||
|         self.validation = validation | ||||
|         self.choices = choices | ||||
|         self.verbose_name = verbose_name | ||||
|         self.help_text = help_text | ||||
|         self.null = null | ||||
|         self.sparse = sparse | ||||
|         self._owner_document = None | ||||
|  | ||||
|         # Validate the db_field | ||||
|         if isinstance(self.db_field, six.string_types) and ( | ||||
|             '.' in self.db_field or | ||||
|             '\0' in self.db_field or | ||||
|             self.db_field.startswith('$') | ||||
|         ): | ||||
|             raise ValueError( | ||||
|                 'field names cannot contain dots (".") or null characters ' | ||||
|                 '("\\0"), and they must not start with a dollar sign ("$").' | ||||
|             ) | ||||
|  | ||||
|         # Detect and report conflicts between metadata and base properties. | ||||
|         conflicts = set(dir(self)) & set(kwargs) | ||||
|         if conflicts: | ||||
|             raise TypeError('%s already has attribute(s): %s' % ( | ||||
|                 self.__class__.__name__, ', '.join(conflicts))) | ||||
|  | ||||
|         # Assign metadata to the instance | ||||
|         # This efficient method is available because no __slots__ are defined. | ||||
|         self.__dict__.update(kwargs) | ||||
|  | ||||
|         # Adjust the appropriate creation counter, and save our local copy. | ||||
|         if self.db_field == '_id': | ||||
| @@ -89,20 +118,18 @@ class BaseField(object): | ||||
|             return self | ||||
|  | ||||
|         # Get value from document instance if available | ||||
|         value = instance._data.get(self.name) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = weakref.proxy(instance) | ||||
|         return value | ||||
|         return instance._data.get(self.name) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Descriptor for assigning a value to a field in a document. | ||||
|         """ | ||||
|  | ||||
|         # If setting to None and theres a default | ||||
|         # If setting to None and there is a default | ||||
|         # Then set the value to the default value | ||||
|         if value is None and self.default is not None: | ||||
|         if value is None: | ||||
|             if self.null: | ||||
|                 value = None | ||||
|             elif self.default is not None: | ||||
|                 value = self.default | ||||
|                 if callable(value): | ||||
|                     value = value() | ||||
| @@ -112,56 +139,80 @@ class BaseField(object): | ||||
|                 if (self.name not in instance._data or | ||||
|                         instance._data[self.name] != value): | ||||
|                     instance._mark_as_changed(self.name) | ||||
|             except: | ||||
|             except Exception: | ||||
|                 # Values cant be compared eg: naive and tz datetimes | ||||
|                 # So mark it as changed | ||||
|                 instance._mark_as_changed(self.name) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument): | ||||
|             value._instance = weakref.proxy(instance) | ||||
|         elif isinstance(value, (list, tuple)): | ||||
|             for v in value: | ||||
|                 if isinstance(v, EmbeddedDocument): | ||||
|                     v._instance = weakref.proxy(instance) | ||||
|         instance._data[self.name] = value | ||||
|  | ||||
|     def error(self, message="", errors=None, field_name=None): | ||||
|         """Raises a ValidationError. | ||||
|         """ | ||||
|     def error(self, message='', errors=None, field_name=None): | ||||
|         """Raise a ValidationError.""" | ||||
|         field_name = field_name if field_name else self.name | ||||
|         raise ValidationError(message, errors=errors, field_name=field_name) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         """Convert a Python type to a MongoDB-compatible type.""" | ||||
|         return self.to_python(value) | ||||
|  | ||||
|     def _to_mongo_safe_call(self, value, use_db_field=True, fields=None): | ||||
|         """Helper method to call to_mongo with proper inputs.""" | ||||
|         f_inputs = self.to_mongo.__code__.co_varnames | ||||
|         ex_vars = {} | ||||
|         if 'fields' in f_inputs: | ||||
|             ex_vars['fields'] = fields | ||||
|  | ||||
|         if 'use_db_field' in f_inputs: | ||||
|             ex_vars['use_db_field'] = use_db_field | ||||
|  | ||||
|         return self.to_mongo(value, **ex_vars) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         """Prepare a value that is being used in a query for PyMongo. | ||||
|         """ | ||||
|         """Prepare a value that is being used in a query for PyMongo.""" | ||||
|         if op in UPDATE_OPERATORS: | ||||
|             self.validate(value) | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         """Perform validation on a value. | ||||
|         """ | ||||
|         """Perform validation on a value.""" | ||||
|         pass | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|     def _validate_choices(self, value): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         # check choices | ||||
|  | ||||
|         choice_list = self.choices | ||||
|         if isinstance(next(iter(choice_list)), (list, tuple)): | ||||
|             # next(iter) is useful for sets | ||||
|             choice_list = [k for k, _ in choice_list] | ||||
|  | ||||
|         # Choices which are other types of Documents | ||||
|         if isinstance(value, (Document, EmbeddedDocument)): | ||||
|             if not any(isinstance(value, c) for c in choice_list): | ||||
|                 self.error( | ||||
|                     'Value must be an instance of %s' % ( | ||||
|                         six.text_type(choice_list) | ||||
|                     ) | ||||
|                 ) | ||||
|         # Choices which are types other than Documents | ||||
|         elif value not in choice_list: | ||||
|             self.error('Value must be one of %s' % six.text_type(choice_list)) | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|         # Check the Choices Constraint | ||||
|         if self.choices: | ||||
|             is_cls = isinstance(value, (Document, EmbeddedDocument)) | ||||
|             value_to_check = value.__class__ if is_cls else value | ||||
|             err_msg = 'an instance' if is_cls else 'one' | ||||
|             if isinstance(self.choices[0], (list, tuple)): | ||||
|                 option_keys = [k for k, v in self.choices] | ||||
|                 if value_to_check not in option_keys: | ||||
|                     msg = ('Value must be %s of %s' % | ||||
|                            (err_msg, unicode(option_keys))) | ||||
|                     self.error(msg) | ||||
|             elif value_to_check not in self.choices: | ||||
|                 msg = ('Value must be %s of %s' % | ||||
|                        (err_msg, unicode(self.choices))) | ||||
|                 self.error(msg) | ||||
|             self._validate_choices(value) | ||||
|  | ||||
|         # check validation argument | ||||
|         if self.validation is not None: | ||||
| @@ -174,6 +225,17 @@ class BaseField(object): | ||||
|  | ||||
|         self.validate(value, **kwargs) | ||||
|  | ||||
|     @property | ||||
|     def owner_document(self): | ||||
|         return self._owner_document | ||||
|  | ||||
|     def _set_owner_document(self, owner_document): | ||||
|         self._owner_document = owner_document | ||||
|  | ||||
|     @owner_document.setter | ||||
|     def owner_document(self, owner_document): | ||||
|         self._set_owner_document(owner_document) | ||||
|  | ||||
|  | ||||
| class ComplexBaseField(BaseField): | ||||
|     """Handles complex fields, such as lists / dictionaries. | ||||
| @@ -188,22 +250,22 @@ class ComplexBaseField(BaseField): | ||||
|     field = None | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor to automatically dereference references. | ||||
|         """ | ||||
|         """Descriptor to automatically dereference references.""" | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||
|         dereference = (self._auto_dereference and | ||||
|                        (self.field is None or isinstance(self.field, | ||||
|                                                          (GenericReferenceField, ReferenceField)))) | ||||
|  | ||||
|         _dereference = _import_class("DeReference")() | ||||
|         _dereference = _import_class('DeReference')() | ||||
|  | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         if instance._initialised and dereference: | ||||
|         if instance._initialised and dereference and instance._data.get(self.name): | ||||
|             instance._data[self.name] = _dereference( | ||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, | ||||
|                 name=self.name | ||||
| @@ -212,8 +274,11 @@ class ComplexBaseField(BaseField): | ||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if (isinstance(value, (list, tuple)) and | ||||
|            not isinstance(value, BaseList)): | ||||
|         if isinstance(value, (list, tuple)): | ||||
|             if (issubclass(type(self), EmbeddedDocumentListField) and | ||||
|                     not isinstance(value, EmbeddedDocumentList)): | ||||
|                 value = EmbeddedDocumentList(value, instance, self.name) | ||||
|             elif not isinstance(value, BaseList): | ||||
|                 value = BaseList(value, instance, self.name) | ||||
|             instance._data[self.name] = value | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
| @@ -221,8 +286,8 @@ class ComplexBaseField(BaseField): | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         if (self._auto_dereference and instance._initialised and | ||||
|            isinstance(value, (BaseList, BaseDict)) | ||||
|            and not value._dereferenced): | ||||
|                 isinstance(value, (BaseList, BaseDict)) and | ||||
|                 not value._dereferenced): | ||||
|             value = _dereference( | ||||
|                 value, max_depth=1, instance=instance, name=self.name | ||||
|             ) | ||||
| @@ -232,11 +297,8 @@ class ComplexBaseField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         Document = _import_class('Document') | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_python'): | ||||
| @@ -246,14 +308,16 @@ class ComplexBaseField(BaseField): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             value_dict = dict([(key, self.field.to_python(item)) | ||||
|                                for key, item in value.items()]) | ||||
|             self.field._auto_dereference = self._auto_dereference | ||||
|             value_dict = {key: self.field.to_python(item) | ||||
|                           for key, item in value.items()} | ||||
|         else: | ||||
|             Document = _import_class('Document') | ||||
|             value_dict = {} | ||||
|             for k, v in value.items(): | ||||
|                 if isinstance(v, Document): | ||||
| @@ -269,27 +333,26 @@ class ComplexBaseField(BaseField): | ||||
|                     value_dict[k] = self.to_python(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [v for k, v in sorted(value_dict.items(), | ||||
|             return [v for _, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|     def to_mongo(self, value, use_db_field=True, fields=None): | ||||
|         """Convert a Python type to a MongoDB-compatible type.""" | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             if isinstance(value, Document): | ||||
|                 return GenericReferenceField().to_mongo(value) | ||||
|             cls = value.__class__ | ||||
|             val = value.to_mongo() | ||||
|             # If we its a document thats not inherited add _cls | ||||
|             if (isinstance(value, EmbeddedDocument)): | ||||
|             val = value.to_mongo(use_db_field, fields) | ||||
|             # If it's a document that is not inherited add _cls | ||||
|             if isinstance(value, EmbeddedDocument): | ||||
|                 val['_cls'] = cls.__name__ | ||||
|             return val | ||||
|  | ||||
| @@ -297,13 +360,15 @@ class ComplexBaseField(BaseField): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             value_dict = dict([(key, self.field.to_mongo(item)) | ||||
|                                for key, item in value.iteritems()]) | ||||
|             value_dict = { | ||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||
|                 for key, item in value.iteritems() | ||||
|             } | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in value.iteritems(): | ||||
| @@ -317,9 +382,7 @@ class ComplexBaseField(BaseField): | ||||
|                     # any _cls data so make it a generic reference allows | ||||
|                     # us to dereference | ||||
|                     meta = getattr(v, '_meta', {}) | ||||
|                     allow_inheritance = ( | ||||
|                         meta.get('allow_inheritance', ALLOW_INHERITANCE) | ||||
|                         is True) | ||||
|                     allow_inheritance = meta.get('allow_inheritance') | ||||
|                     if not allow_inheritance and not self.field: | ||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||
|                     else: | ||||
| @@ -327,22 +390,21 @@ class ComplexBaseField(BaseField): | ||||
|                         value_dict[k] = DBRef(collection, v.pk) | ||||
|                 elif hasattr(v, 'to_mongo'): | ||||
|                     cls = v.__class__ | ||||
|                     val = v.to_mongo() | ||||
|                     # If we its a document thats not inherited add _cls | ||||
|                     if (isinstance(v, (Document, EmbeddedDocument))): | ||||
|                     val = v.to_mongo(use_db_field, fields) | ||||
|                     # If it's a document that is not inherited add _cls | ||||
|                     if isinstance(v, (Document, EmbeddedDocument)): | ||||
|                         val['_cls'] = cls.__name__ | ||||
|                     value_dict[k] = val | ||||
|                 else: | ||||
|                     value_dict[k] = self.to_mongo(v) | ||||
|                     value_dict[k] = self.to_mongo(v, use_db_field, fields) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [v for k, v in sorted(value_dict.items(), | ||||
|             return [v for _, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """If field is provided ensure the value is valid. | ||||
|         """ | ||||
|         """If field is provided ensure the value is valid.""" | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||
| @@ -352,9 +414,9 @@ class ComplexBaseField(BaseField): | ||||
|             for k, v in sequence: | ||||
|                 try: | ||||
|                     self.field._validate(v) | ||||
|                 except ValidationError, error: | ||||
|                 except ValidationError as error: | ||||
|                     errors[k] = error.errors or error | ||||
|                 except (ValueError, AssertionError), error: | ||||
|                 except (ValueError, AssertionError) as error: | ||||
|                     errors[k] = error | ||||
|  | ||||
|             if errors: | ||||
| @@ -378,28 +440,25 @@ class ComplexBaseField(BaseField): | ||||
|             self.field.owner_document = owner_document | ||||
|         self._owner_document = owner_document | ||||
|  | ||||
|     def _get_owner_document(self, owner_document): | ||||
|         self._owner_document = owner_document | ||||
|  | ||||
|     owner_document = property(_get_owner_document, _set_owner_document) | ||||
|  | ||||
|  | ||||
| class ObjectIdField(BaseField): | ||||
|     """A field wrapper around MongoDB's ObjectIds. | ||||
|     """ | ||||
|     """A field wrapper around MongoDB's ObjectIds.""" | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
|             if not isinstance(value, ObjectId): | ||||
|                 value = ObjectId(value) | ||||
|         except Exception: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if not isinstance(value, ObjectId): | ||||
|             try: | ||||
|                 return ObjectId(unicode(value)) | ||||
|             except Exception, e: | ||||
|                 return ObjectId(six.text_type(value)) | ||||
|             except Exception as e: | ||||
|                 # e.message attribute has been deprecated since Python 2.6 | ||||
|                 self.error(unicode(e)) | ||||
|                 self.error(six.text_type(e)) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -407,36 +466,37 @@ class ObjectIdField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             ObjectId(unicode(value)) | ||||
|         except: | ||||
|             ObjectId(six.text_type(value)) | ||||
|         except Exception: | ||||
|             self.error('Invalid Object ID') | ||||
|  | ||||
|  | ||||
| class GeoJsonBaseField(BaseField): | ||||
|     """A geo json field storing a geojson style object. | ||||
|  | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|  | ||||
|     _geo_index = pymongo.GEOSPHERE | ||||
|     _type = "GeoBase" | ||||
|     _type = 'GeoBase' | ||||
|  | ||||
|     def __init__(self, auto_index=True, *args, **kwargs): | ||||
|         """ | ||||
|         :param auto_index: Automatically create a "2dsphere" index. Defaults | ||||
|             to `True`. | ||||
|         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||
|             Defaults to `True`. | ||||
|         """ | ||||
|         self._name = "%sField" % self._type | ||||
|         self._name = '%sField' % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type | ||||
|         """ | ||||
|         """Validate the GeoJson object based on its type.""" | ||||
|         if isinstance(value, dict): | ||||
|             if set(value.keys()) == set(['type', 'coordinates']): | ||||
|                 if value['type'] != self._type: | ||||
|                     self.error('%s type must be "%s"' % (self._name, self._type)) | ||||
|                     self.error('%s type must be "%s"' % | ||||
|                                (self._name, self._type)) | ||||
|                 return self.validate(value['coordinates']) | ||||
|             else: | ||||
|                 self.error('%s can only accept a valid GeoJson dictionary' | ||||
| @@ -446,20 +506,20 @@ class GeoJsonBaseField(BaseField): | ||||
|             self.error('%s can only accept lists of [x, y]' % self._name) | ||||
|             return | ||||
|  | ||||
|         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||
|         validate = getattr(self, '_validate_%s' % self._type.lower()) | ||||
|         error = validate(value) | ||||
|         if error: | ||||
|             self.error(error) | ||||
|  | ||||
|     def _validate_polygon(self, value): | ||||
|     def _validate_polygon(self, value, top_level=True): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'Polygons must contain list of linestrings' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except: | ||||
|             return "Invalid Polygon must contain at least one valid linestring" | ||||
|         except (TypeError, IndexError): | ||||
|             return 'Invalid Polygon must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
| @@ -469,18 +529,21 @@ class GeoJsonBaseField(BaseField): | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||
|             if top_level: | ||||
|                 return 'Invalid Polygon:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_linestring(self, value, top_level=True): | ||||
|         """Validates a linestring""" | ||||
|         """Validate a linestring.""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'LineStrings must contain list of coordinate pairs' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except: | ||||
|             return "Invalid LineString must contain at least one valid point" | ||||
|         except (TypeError, IndexError): | ||||
|             return 'Invalid LineString must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
| @@ -489,21 +552,81 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid LineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_point(self, value): | ||||
|         """Validate each set of coords""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'Points must be a list of coordinate pairs' | ||||
|         elif not len(value) == 2: | ||||
|             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||
|             return 'Value (%s) must be a two-dimensional point' % repr(value) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             return "Both values (%s) in point must be float or int" % repr(value) | ||||
|             return 'Both values (%s) in point must be float or int' % repr(value) | ||||
|  | ||||
|     def _validate_multipoint(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'MultiPoint must be a list of Point' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return 'Invalid MultiPoint must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for point in value: | ||||
|             error = self._validate_point(point) | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multilinestring(self, value, top_level=True): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'MultiLineString must be a list of LineString' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return 'Invalid MultiLineString must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for linestring in value: | ||||
|             error = self._validate_linestring(linestring, False) | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return 'Invalid MultiLineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multipolygon(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'MultiPolygon must be a list of Polygon' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return 'Invalid MultiPolygon must contain at least one valid Polygon' | ||||
|  | ||||
|         errors = [] | ||||
|         for polygon in value: | ||||
|             error = self._validate_polygon(polygon, False) | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, dict): | ||||
|             return value | ||||
|         return SON([("type", self._type), ("coordinates", value)]) | ||||
|         return SON([('type', self._type), ('coordinates', value)]) | ||||
|   | ||||
| @@ -1,24 +1,23 @@ | ||||
| import warnings | ||||
|  | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| from mongoengine.base.common import _document_registry | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidDocumentError | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, | ||||
|                                   MultipleObjectsReturned, | ||||
|                                   QuerySet, QuerySetManager) | ||||
|                                   QuerySetManager) | ||||
|  | ||||
| from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
|  | ||||
| __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') | ||||
|  | ||||
|  | ||||
| class DocumentMetaclass(type): | ||||
|     """Metaclass for all documents. | ||||
|     """ | ||||
|     """Metaclass for all documents.""" | ||||
|  | ||||
|     # TODO lower complexity of this method | ||||
|     def __new__(cls, name, bases, attrs): | ||||
|         flattened_bases = cls._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, cls).__new__ | ||||
| @@ -29,6 +28,7 @@ class DocumentMetaclass(type): | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         attrs['_is_document'] = attrs.get('_is_document', False) | ||||
|         attrs['_cached_reference_fields'] = [] | ||||
|  | ||||
|         # EmbeddedDocuments could have meta data for inheritance | ||||
|         if 'meta' in attrs: | ||||
| @@ -44,6 +44,12 @@ class DocumentMetaclass(type): | ||||
|                 elif hasattr(base, '_meta'): | ||||
|                     meta.merge(base._meta) | ||||
|             attrs['_meta'] = meta | ||||
|             attrs['_meta']['abstract'] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|  | ||||
|         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||
|         if attrs['_meta'].get('allow_inheritance'): | ||||
|             StringField = _import_class('StringField') | ||||
|             attrs['_cls'] = StringField() | ||||
|  | ||||
|         # Handle document Fields | ||||
|  | ||||
| @@ -83,16 +89,17 @@ class DocumentMetaclass(type): | ||||
|         # Ensure no duplicate db_fields | ||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||
|         if duplicate_db_fields: | ||||
|             msg = ("Multiple db_fields defined for: %s " % | ||||
|                    ", ".join(duplicate_db_fields)) | ||||
|             msg = ('Multiple db_fields defined for: %s ' % | ||||
|                    ', '.join(duplicate_db_fields)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # Set _fields and db_field maps | ||||
|         attrs['_fields'] = doc_fields | ||||
|         attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) | ||||
|                                       for k, v in doc_fields.iteritems()]) | ||||
|         attrs['_reverse_db_field_map'] = dict( | ||||
|             (v, k) for k, v in attrs['_db_field_map'].iteritems()) | ||||
|         attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) | ||||
|                                   for k, v in doc_fields.items()} | ||||
|         attrs['_reverse_db_field_map'] = { | ||||
|             v: k for k, v in attrs['_db_field_map'].items() | ||||
|         } | ||||
|  | ||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||
|                                          (v.creation_counter, v.name) | ||||
| @@ -106,16 +113,14 @@ class DocumentMetaclass(type): | ||||
|         for base in flattened_bases: | ||||
|             if (not getattr(base, '_is_base_cls', True) and | ||||
|                     not getattr(base, '_meta', {}).get('abstract', True)): | ||||
|                 # Collate heirarchy for _cls and _subclasses | ||||
|                 # Collate hierarchy for _cls and _subclasses | ||||
|                 class_name.append(base.__name__) | ||||
|  | ||||
|             if hasattr(base, '_meta'): | ||||
|                 # Warn if allow_inheritance isn't set and prevent | ||||
|                 # inheritance of classes where inheritance is set to False | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance', | ||||
|                                                    ALLOW_INHERITANCE) | ||||
|                 if (allow_inheritance is not True and | ||||
|                    not base._meta.get('abstract')): | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance') | ||||
|                 if not allow_inheritance and not base._meta.get('abstract'): | ||||
|                     raise ValueError('Document %s may not be subclassed' % | ||||
|                                      base.__name__) | ||||
|  | ||||
| @@ -141,7 +146,8 @@ class DocumentMetaclass(type): | ||||
|                 base._subclasses += (_cls,) | ||||
|             base._types = base._subclasses  # TODO depreciate _types | ||||
|  | ||||
|         Document, EmbeddedDocument, DictField = cls._import_classes() | ||||
|         (Document, EmbeddedDocument, DictField, | ||||
|          CachedReferenceField) = cls._import_classes() | ||||
|  | ||||
|         if issubclass(new_class, Document): | ||||
|             new_class._collection = None | ||||
| @@ -156,8 +162,8 @@ class DocumentMetaclass(type): | ||||
|         # module continues to use im_func and im_self, so the code below | ||||
|         # copies __func__ into im_func and __self__ into im_self for | ||||
|         # classmethod objects in Document derived classes. | ||||
|         if PY3: | ||||
|             for key, val in new_class.__dict__.items(): | ||||
|         if six.PY3: | ||||
|             for val in new_class.__dict__.values(): | ||||
|                 if isinstance(val, classmethod): | ||||
|                     f = val.__get__(new_class) | ||||
|                     if hasattr(f, '__func__') and not hasattr(f, 'im_func'): | ||||
| @@ -168,15 +174,30 @@ class DocumentMetaclass(type): | ||||
|         # Handle delete rules | ||||
|         for field in new_class._fields.itervalues(): | ||||
|             f = field | ||||
|             if f.owner_document is None: | ||||
|                 f.owner_document = new_class | ||||
|             delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) | ||||
|             if isinstance(f, CachedReferenceField): | ||||
|  | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' | ||||
|                                                'allowed in EmbeddedDocuments') | ||||
|                 if not f.document_type: | ||||
|                     raise InvalidDocumentError( | ||||
|                         'Document is not available to sync') | ||||
|  | ||||
|                 if f.auto_sync: | ||||
|                     f.start_listener() | ||||
|  | ||||
|                 f.document_type._cached_reference_fields.append(f) | ||||
|  | ||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): | ||||
|                 delete_rule = getattr(f.field, | ||||
|                                       'reverse_delete_rule', | ||||
|                                       DO_NOTHING) | ||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||
|                     msg = ("Reverse delete rules are not supported " | ||||
|                            "for %s (field: %s)" % | ||||
|                     msg = ('Reverse delete rules are not supported ' | ||||
|                            'for %s (field: %s)' % | ||||
|                            (field.__class__.__name__, field.name)) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|  | ||||
| @@ -184,16 +205,16 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|             if delete_rule != DO_NOTHING: | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     msg = ("Reverse delete rules are not supported for " | ||||
|                            "EmbeddedDocuments (field: %s)" % field.name) | ||||
|                     msg = ('Reverse delete rules are not supported for ' | ||||
|                            'EmbeddedDocuments (field: %s)' % field.name) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|                 f.document_type.register_delete_rule(new_class, | ||||
|                                                      field.name, delete_rule) | ||||
|  | ||||
|             if (field.name and hasattr(Document, field.name) and | ||||
|                     EmbeddedDocument not in new_class.mro()): | ||||
|                 msg = ("%s is a document method and not a valid " | ||||
|                        "field name" % field.name) | ||||
|                 msg = ('%s is a document method and not a valid ' | ||||
|                        'field name' % field.name) | ||||
|                 raise InvalidDocumentError(msg) | ||||
|  | ||||
|         return new_class | ||||
| @@ -224,7 +245,8 @@ class DocumentMetaclass(type): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         DictField = _import_class('DictField') | ||||
|         return (Document, EmbeddedDocument, DictField) | ||||
|         CachedReferenceField = _import_class('CachedReferenceField') | ||||
|         return Document, EmbeddedDocument, DictField, CachedReferenceField | ||||
|  | ||||
|  | ||||
| class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
| @@ -237,7 +259,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|         super_new = super(TopLevelDocumentMetaclass, cls).__new__ | ||||
|  | ||||
|         # Set default _meta data if base class, otherwise get user defined meta | ||||
|         if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass): | ||||
|         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: | ||||
|             # defaults | ||||
|             attrs['_meta'] = { | ||||
|                 'abstract': True, | ||||
| @@ -250,13 +272,18 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|                 'index_drop_dups': False, | ||||
|                 'index_opts': None, | ||||
|                 'delete_rules': None, | ||||
|  | ||||
|                 # allow_inheritance can be True, False, and None. True means | ||||
|                 # "allow inheritance", False means "don't allow inheritance", | ||||
|                 # None means "do whatever your parent does, or don't allow | ||||
|                 # inheritance if you're a top-level class". | ||||
|                 'allow_inheritance': None, | ||||
|             } | ||||
|             attrs['_is_base_cls'] = True | ||||
|             attrs['_meta'].update(attrs.get('meta', {})) | ||||
|         else: | ||||
|             attrs['_meta'] = attrs.get('meta', {}) | ||||
|             # Explictly set abstract to false unless set | ||||
|             # Explicitly set abstract to false unless set | ||||
|             attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) | ||||
|             attrs['_is_base_cls'] = False | ||||
|  | ||||
| @@ -271,7 +298,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         # Clean up top level meta | ||||
|         if 'meta' in attrs: | ||||
|             del(attrs['meta']) | ||||
|             del attrs['meta'] | ||||
|  | ||||
|         # Find the parent document class | ||||
|         parent_doc_cls = [b for b in flattened_bases | ||||
| @@ -280,17 +307,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         # Prevent classes setting collection different to their parents | ||||
|         # If parent wasn't an abstract class | ||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) | ||||
|             and not parent_doc_cls._meta.get('abstract', True)): | ||||
|                 msg = "Trying to set a collection on a subclass (%s)" % name | ||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and | ||||
|                 not parent_doc_cls._meta.get('abstract', True)): | ||||
|             msg = 'Trying to set a collection on a subclass (%s)' % name | ||||
|             warnings.warn(msg, SyntaxWarning) | ||||
|                 del(attrs['_meta']['collection']) | ||||
|             del attrs['_meta']['collection'] | ||||
|  | ||||
|         # Ensure abstract documents have abstract bases | ||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): | ||||
|             if (parent_doc_cls and | ||||
|                     not parent_doc_cls._meta.get('abstract', False)): | ||||
|                 msg = "Abstract document cannot have non-abstract base" | ||||
|                 msg = 'Abstract document cannot have non-abstract base' | ||||
|                 raise ValueError(msg) | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
| @@ -313,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta | ||||
|  | ||||
|         # Only simple classes (direct subclasses of Document) | ||||
|         # may set allow_inheritance to False | ||||
|         # Only simple classes (i.e. direct subclasses of Document) may set | ||||
|         # allow_inheritance to False. If the base Document allows inheritance, | ||||
|         # none of its subclasses can override allow_inheritance to False. | ||||
|         simple_class = all([b._meta.get('abstract') | ||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) | ||||
|         if (not simple_class and meta['allow_inheritance'] is False and | ||||
|            not meta['abstract']): | ||||
|         if ( | ||||
|             not simple_class and | ||||
|             meta['allow_inheritance'] is False and | ||||
|             not meta['abstract'] | ||||
|         ): | ||||
|             raise ValueError('Only direct subclasses of Document may set ' | ||||
|                              '"allow_inheritance" to False') | ||||
|  | ||||
| @@ -359,17 +390,20 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|                     new_class.id = field | ||||
|  | ||||
|         # Set primary key if not defined by the document | ||||
|         new_class._auto_id_field = False | ||||
|         new_class._auto_id_field = getattr(parent_doc_cls, | ||||
|                                            '_auto_id_field', False) | ||||
|         if not new_class._meta.get('id_field'): | ||||
|             # After 0.10, find not existing names, instead of overwriting | ||||
|             id_name, id_db_name = cls.get_auto_id_names(new_class) | ||||
|             new_class._auto_id_field = True | ||||
|             new_class._meta['id_field'] = 'id' | ||||
|             new_class._fields['id'] = ObjectIdField(db_field='_id') | ||||
|             new_class._fields['id'].name = 'id' | ||||
|             new_class.id = new_class._fields['id'] | ||||
|  | ||||
|             new_class._meta['id_field'] = id_name | ||||
|             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||
|             new_class._fields[id_name].name = id_name | ||||
|             new_class.id = new_class._fields[id_name] | ||||
|             new_class._db_field_map[id_name] = id_db_name | ||||
|             new_class._reverse_db_field_map[id_db_name] = id_name | ||||
|             # Prepend id field to _fields_ordered | ||||
|         if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: | ||||
|             new_class._fields_ordered = ('id', ) + new_class._fields_ordered | ||||
|             new_class._fields_ordered = (id_name, ) + new_class._fields_ordered | ||||
|  | ||||
|         # Merge in exceptions with parent hierarchy | ||||
|         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||
| @@ -384,6 +418,20 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         return new_class | ||||
|  | ||||
|     @classmethod | ||||
|     def get_auto_id_names(cls, new_class): | ||||
|         id_name, id_db_name = ('id', '_id') | ||||
|         if id_name not in new_class._fields and \ | ||||
|                 id_db_name not in (v.db_field for v in new_class._fields.values()): | ||||
|             return id_name, id_db_name | ||||
|         id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 | ||||
|         while id_name in new_class._fields or \ | ||||
|                 id_db_name in (v.db_field for v in new_class._fields.values()): | ||||
|             id_name = '{0}_{1}'.format(id_basename, i) | ||||
|             id_db_name = '{0}_{1}'.format(id_db_basename, i) | ||||
|             i += 1 | ||||
|         return id_name, id_db_name | ||||
|  | ||||
|  | ||||
| class MetaDict(dict): | ||||
|     """Custom dictionary for meta classes. | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| _class_registry_cache = {} | ||||
| _field_list_cache = [] | ||||
|  | ||||
|  | ||||
| def _import_class(cls_name): | ||||
| @@ -20,16 +21,23 @@ def _import_class(cls_name): | ||||
|  | ||||
|     doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', | ||||
|                    'MapReduceDocument') | ||||
|     field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', | ||||
|                      'FileField', 'GenericReferenceField', | ||||
|                      'GenericEmbeddedDocumentField', 'GeoPointField', | ||||
|                      'PointField', 'LineStringField', 'ListField', | ||||
|                      'PolygonField', 'ReferenceField', 'StringField', | ||||
|                      'ComplexBaseField') | ||||
|  | ||||
|     # Field Classes | ||||
|     if not _field_list_cache: | ||||
|         from mongoengine.fields import __all__ as fields | ||||
|         _field_list_cache.extend(fields) | ||||
|         from mongoengine.base.fields import __all__ as fields | ||||
|         _field_list_cache.extend(fields) | ||||
|  | ||||
|     field_classes = _field_list_cache | ||||
|  | ||||
|     queryset_classes = ('OperationError',) | ||||
|     deref_classes = ('DeReference',) | ||||
|  | ||||
|     if cls_name in doc_classes: | ||||
|     if cls_name == 'BaseDocument': | ||||
|         from mongoengine.base import document as module | ||||
|         import_classes = ['BaseDocument'] | ||||
|     elif cls_name in doc_classes: | ||||
|         from mongoengine import document as module | ||||
|         import_classes = doc_classes | ||||
|     elif cls_name in field_classes: | ||||
|   | ||||
| @@ -1,15 +1,25 @@ | ||||
| import pymongo | ||||
| from pymongo import MongoClient, MongoReplicaSetClient, uri_parser | ||||
| from pymongo import MongoClient, ReadPreference, uri_parser | ||||
| import six | ||||
|  | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
|  | ||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | ||||
| __all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', | ||||
|            'DEFAULT_CONNECTION_NAME'] | ||||
|  | ||||
|  | ||||
| DEFAULT_CONNECTION_NAME = 'default' | ||||
|  | ||||
| if IS_PYMONGO_3: | ||||
|     READ_PREFERENCE = ReadPreference.PRIMARY | ||||
| else: | ||||
|     from pymongo import MongoReplicaSetClient | ||||
|     READ_PREFERENCE = False | ||||
|  | ||||
| class ConnectionError(Exception): | ||||
|  | ||||
| class MongoEngineConnectionError(Exception): | ||||
|     """Error raised when the database connection can't be established or | ||||
|     when a connection with a requested alias can't be retrieved. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| @@ -18,9 +28,12 @@ _connections = {} | ||||
| _dbs = {} | ||||
|  | ||||
|  | ||||
| def register_connection(alias, name, host='localhost', port=27017, | ||||
|                         is_slave=False, read_preference=False, slaves=None, | ||||
|                         username=None, password=None, **kwargs): | ||||
| def register_connection(alias, name=None, host=None, port=None, | ||||
|                         read_preference=READ_PREFERENCE, | ||||
|                         username=None, password=None, | ||||
|                         authentication_source=None, | ||||
|                         authentication_mechanism=None, | ||||
|                         **kwargs): | ||||
|     """Add a connection. | ||||
|  | ||||
|     :param alias: the name that will be used to refer to this connection | ||||
| @@ -28,106 +41,183 @@ def register_connection(alias, name, host='localhost', port=27017, | ||||
|     :param name: the name of the specific database to use | ||||
|     :param host: the host name of the :program:`mongod` instance to connect to | ||||
|     :param port: the port that the :program:`mongod` instance is running on | ||||
|     :param is_slave: whether the connection can act as a slave | ||||
|       ** Depreciated pymongo 2.0.1+ | ||||
|     :param read_preference: The read preference for the collection | ||||
|        ** Added pymongo 2.1 | ||||
|     :param slaves: a list of aliases of slave connections; each of these must | ||||
|         be a registered connection that has :attr:`is_slave` set to ``True`` | ||||
|     :param username: username to authenticate with | ||||
|     :param password: password to authenticate with | ||||
|     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver | ||||
|     :param authentication_source: database to authenticate against | ||||
|     :param authentication_mechanism: database authentication mechanisms. | ||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||
|     :param is_mock: explicitly use mongomock for this connection | ||||
|         (can also be done by using `mongomock://` as db host prefix) | ||||
|     :param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||
|         for example maxpoolsize, tz_aware, etc. See the documentation | ||||
|         for pymongo's `MongoClient` for a full list. | ||||
|  | ||||
|     .. versionchanged:: 0.10.6 - added mongomock support | ||||
|     """ | ||||
|     global _connection_settings | ||||
|  | ||||
|     conn_settings = { | ||||
|         'name': name, | ||||
|         'host': host, | ||||
|         'port': port, | ||||
|         'is_slave': is_slave, | ||||
|         'slaves': slaves or [], | ||||
|         'name': name or 'test', | ||||
|         'host': host or 'localhost', | ||||
|         'port': port or 27017, | ||||
|         'read_preference': read_preference, | ||||
|         'username': username, | ||||
|         'password': password, | ||||
|         'read_preference': read_preference | ||||
|         'authentication_source': authentication_source, | ||||
|         'authentication_mechanism': authentication_mechanism | ||||
|     } | ||||
|  | ||||
|     # Handle uri style connections | ||||
|     if "://" in host: | ||||
|         uri_dict = uri_parser.parse_uri(host) | ||||
|         conn_settings.update({ | ||||
|             'host': host, | ||||
|             'name': uri_dict.get('database') or name, | ||||
|             'username': uri_dict.get('username'), | ||||
|             'password': uri_dict.get('password'), | ||||
|             'read_preference': read_preference, | ||||
|         }) | ||||
|         if "replicaSet" in host: | ||||
|             conn_settings['replicaSet'] = True | ||||
|     conn_host = conn_settings['host'] | ||||
|  | ||||
|     # Host can be a list or a string, so if string, force to a list. | ||||
|     if isinstance(conn_host, six.string_types): | ||||
|         conn_host = [conn_host] | ||||
|  | ||||
|     resolved_hosts = [] | ||||
|     for entity in conn_host: | ||||
|  | ||||
|         # Handle Mongomock | ||||
|         if entity.startswith('mongomock://'): | ||||
|             conn_settings['is_mock'] = True | ||||
|             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||
|             resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) | ||||
|  | ||||
|         # Handle URI style connections, only updating connection params which | ||||
|         # were explicitly specified in the URI. | ||||
|         elif '://' in entity: | ||||
|             uri_dict = uri_parser.parse_uri(entity) | ||||
|             resolved_hosts.append(entity) | ||||
|  | ||||
|             if uri_dict.get('database'): | ||||
|                 conn_settings['name'] = uri_dict.get('database') | ||||
|  | ||||
|             for param in ('read_preference', 'username', 'password'): | ||||
|                 if uri_dict.get(param): | ||||
|                     conn_settings[param] = uri_dict[param] | ||||
|  | ||||
|             uri_options = uri_dict['options'] | ||||
|             if 'replicaset' in uri_options: | ||||
|                 conn_settings['replicaSet'] = uri_options['replicaset'] | ||||
|             if 'authsource' in uri_options: | ||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] | ||||
|             if 'authmechanism' in uri_options: | ||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] | ||||
|         else: | ||||
|             resolved_hosts.append(entity) | ||||
|     conn_settings['host'] = resolved_hosts | ||||
|  | ||||
|     # Deprecated parameters that should not be passed on | ||||
|     kwargs.pop('slaves', None) | ||||
|     kwargs.pop('is_slave', None) | ||||
|  | ||||
|     conn_settings.update(kwargs) | ||||
|     _connection_settings[alias] = conn_settings | ||||
|  | ||||
|  | ||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||
|     global _connections | ||||
|     global _dbs | ||||
|  | ||||
|     """Close the connection with a given alias.""" | ||||
|     if alias in _connections: | ||||
|         get_connection(alias=alias).disconnect() | ||||
|         get_connection(alias=alias).close() | ||||
|         del _connections[alias] | ||||
|     if alias in _dbs: | ||||
|         del _dbs[alias] | ||||
|  | ||||
|  | ||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     global _connections | ||||
|     """Return a connection with a given alias.""" | ||||
|  | ||||
|     # Connect to the database if not already connected | ||||
|     if reconnect: | ||||
|         disconnect(alias) | ||||
|  | ||||
|     if alias not in _connections: | ||||
|     # If the requested alias already exists in the _connections list, return | ||||
|     # it immediately. | ||||
|     if alias in _connections: | ||||
|         return _connections[alias] | ||||
|  | ||||
|     # Validate that the requested alias exists in the _connection_settings. | ||||
|     # Raise MongoEngineConnectionError if it doesn't. | ||||
|     if alias not in _connection_settings: | ||||
|             msg = 'Connection with alias "%s" has not been defined' % alias | ||||
|         if alias == DEFAULT_CONNECTION_NAME: | ||||
|             msg = 'You have not defined a default connection' | ||||
|             raise ConnectionError(msg) | ||||
|         conn_settings = _connection_settings[alias].copy() | ||||
|  | ||||
|         if hasattr(pymongo, 'version_tuple'):  # Support for 2.1+ | ||||
|             conn_settings.pop('name', None) | ||||
|             conn_settings.pop('slaves', None) | ||||
|             conn_settings.pop('is_slave', None) | ||||
|             conn_settings.pop('username', None) | ||||
|             conn_settings.pop('password', None) | ||||
|         else: | ||||
|             # Get all the slave connections | ||||
|             if 'slaves' in conn_settings: | ||||
|                 slaves = [] | ||||
|                 for slave_alias in conn_settings['slaves']: | ||||
|                     slaves.append(get_connection(slave_alias)) | ||||
|                 conn_settings['slaves'] = slaves | ||||
|                 conn_settings.pop('read_preference', None) | ||||
|             msg = 'Connection with alias "%s" has not been defined' % alias | ||||
|         raise MongoEngineConnectionError(msg) | ||||
|  | ||||
|     def _clean_settings(settings_dict): | ||||
|         irrelevant_fields = set([ | ||||
|             'name', 'username', 'password', 'authentication_source', | ||||
|             'authentication_mechanism' | ||||
|         ]) | ||||
|         return { | ||||
|             k: v for k, v in settings_dict.items() | ||||
|             if k not in irrelevant_fields | ||||
|         } | ||||
|  | ||||
|     # Retrieve a copy of the connection settings associated with the requested | ||||
|     # alias and remove the database name and authentication info (we don't | ||||
|     # care about them at this point). | ||||
|     conn_settings = _clean_settings(_connection_settings[alias].copy()) | ||||
|  | ||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||
|     is_mock = conn_settings.pop('is_mock', False) | ||||
|     if is_mock: | ||||
|         try: | ||||
|             import mongomock | ||||
|         except ImportError: | ||||
|             raise RuntimeError('You need mongomock installed to mock ' | ||||
|                                'MongoEngine.') | ||||
|         connection_class = mongomock.MongoClient | ||||
|     else: | ||||
|         connection_class = MongoClient | ||||
|         if 'replicaSet' in conn_settings: | ||||
|  | ||||
|         # For replica set connections with PyMongo 2.x, use | ||||
|         # MongoReplicaSetClient. | ||||
|         # TODO remove this once we stop supporting PyMongo 2.x. | ||||
|         if 'replicaSet' in conn_settings and not IS_PYMONGO_3: | ||||
|             connection_class = MongoReplicaSetClient | ||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|  | ||||
|             # hosts_or_uri has to be a string, so if 'host' was provided | ||||
|             # as a list, join its parts and separate them by ',' | ||||
|             if isinstance(conn_settings['hosts_or_uri'], list): | ||||
|                 conn_settings['hosts_or_uri'] = ','.join( | ||||
|                     conn_settings['hosts_or_uri']) | ||||
|  | ||||
|             # Discard port since it can't be used on MongoReplicaSetClient | ||||
|             conn_settings.pop('port', None) | ||||
|             # Discard replicaSet if not base string | ||||
|             if not isinstance(conn_settings['replicaSet'], basestring): | ||||
|                 conn_settings.pop('replicaSet', None) | ||||
|             connection_class = MongoReplicaSetClient | ||||
|  | ||||
|     # Iterate over all of the connection settings and if a connection with | ||||
|     # the same parameters is already established, use it instead of creating | ||||
|     # a new one. | ||||
|     existing_connection = None | ||||
|     connection_settings_iterator = ( | ||||
|         (db_alias, settings.copy()) | ||||
|         for db_alias, settings in _connection_settings.items() | ||||
|     ) | ||||
|     for db_alias, connection_settings in connection_settings_iterator: | ||||
|         connection_settings = _clean_settings(connection_settings) | ||||
|         if conn_settings == connection_settings and _connections.get(db_alias): | ||||
|             existing_connection = _connections[db_alias] | ||||
|             break | ||||
|  | ||||
|     # If an existing connection was found, assign it to the new alias | ||||
|     if existing_connection: | ||||
|         _connections[alias] = existing_connection | ||||
|     else: | ||||
|         # Otherwise, create the new connection for this alias. Raise | ||||
|         # MongoEngineConnectionError if it can't be established. | ||||
|         try: | ||||
|             _connections[alias] = connection_class(**conn_settings) | ||||
|         except Exception, e: | ||||
|             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) | ||||
|         except Exception as e: | ||||
|             raise MongoEngineConnectionError( | ||||
|                 'Cannot connect to database %s :\n%s' % (alias, e)) | ||||
|  | ||||
|     return _connections[alias] | ||||
|  | ||||
|  | ||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     global _dbs | ||||
|     if reconnect: | ||||
|         disconnect(alias) | ||||
|  | ||||
| @@ -135,15 +225,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|         conn = get_connection(alias) | ||||
|         conn_settings = _connection_settings[alias] | ||||
|         db = conn[conn_settings['name']] | ||||
|         auth_kwargs = {'source': conn_settings['authentication_source']} | ||||
|         if conn_settings['authentication_mechanism'] is not None: | ||||
|             auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] | ||||
|         # Authenticate if necessary | ||||
|         if conn_settings['username'] and conn_settings['password']: | ||||
|             db.authenticate(conn_settings['username'], | ||||
|                             conn_settings['password']) | ||||
|         if conn_settings['username'] and (conn_settings['password'] or | ||||
|                                           conn_settings['authentication_mechanism'] == 'MONGODB-X509'): | ||||
|             db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) | ||||
|         _dbs[alias] = db | ||||
|     return _dbs[alias] | ||||
|  | ||||
|  | ||||
| def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
| def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|     """Connect to the database specified by the 'db' argument. | ||||
|  | ||||
|     Connection settings may be provided here as well if the database is not | ||||
| @@ -153,9 +246,11 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|     Multiple databases are supported by using aliases. Provide a separate | ||||
|     `alias` to connect to a different instance of :program:`mongod`. | ||||
|  | ||||
|     See the docstring for `register_connection` for more details about all | ||||
|     supported kwargs. | ||||
|  | ||||
|     .. versionchanged:: 0.6 - added multiple database support. | ||||
|     """ | ||||
|     global _connections | ||||
|     if alias not in _connections: | ||||
|         register_connection(alias, db, **kwargs) | ||||
|  | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| __all__ = ("switch_db", "switch_collection", "no_dereference", | ||||
|            "no_sub_classes", "query_counter") | ||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | ||||
|            'no_sub_classes', 'query_counter') | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
| @@ -19,11 +18,10 @@ class switch_db(object): | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|         Group(name='test').save()  # Saves in the default db | ||||
|  | ||||
|         with switch_db(Group, 'testdb-1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in testdb-1 | ||||
|  | ||||
|             Group(name='hello testdb!').save()  # Saves in testdb-1 | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, db_alias): | ||||
| @@ -35,17 +33,17 @@ class switch_db(object): | ||||
|         self.cls = cls | ||||
|         self.collection = cls._get_collection() | ||||
|         self.db_alias = db_alias | ||||
|         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||
|         self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the db_alias and clear the cached collection """ | ||||
|         self.cls._meta["db_alias"] = self.db_alias | ||||
|         """Change the db_alias and clear the cached collection.""" | ||||
|         self.cls._meta['db_alias'] = self.db_alias | ||||
|         self.cls._collection = None | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the db_alias and collection """ | ||||
|         self.cls._meta["db_alias"] = self.ori_db_alias | ||||
|         """Reset the db_alias and collection.""" | ||||
|         self.cls._meta['db_alias'] = self.ori_db_alias | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| @@ -57,15 +55,14 @@ class switch_collection(object): | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|         Group(name='test').save()  # Saves in the default db | ||||
|  | ||||
|         with switch_collection(Group, 'group1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in group1 collection | ||||
|  | ||||
|             Group(name='hello testdb!').save()  # Saves in group1 collection | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, collection_name): | ||||
|         """ Construct the switch_collection context manager | ||||
|         """Construct the switch_collection context manager. | ||||
|  | ||||
|         :param cls: the class to change the registered db | ||||
|         :param collection_name: the name of the collection to use | ||||
| @@ -76,7 +73,7 @@ class switch_collection(object): | ||||
|         self.collection_name = collection_name | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the _get_collection_name and clear the cached collection """ | ||||
|         """Change the _get_collection_name and clear the cached collection.""" | ||||
|  | ||||
|         @classmethod | ||||
|         def _get_collection_name(cls): | ||||
| @@ -87,7 +84,7 @@ class switch_collection(object): | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the collection """ | ||||
|         """Reset the collection.""" | ||||
|         self.cls._collection = self.ori_collection | ||||
|         self.cls._get_collection_name = self.ori_get_collection_name | ||||
|  | ||||
| @@ -100,7 +97,6 @@ class no_dereference(object): | ||||
|  | ||||
|         with no_dereference(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
| @@ -120,13 +116,13 @@ class no_dereference(object): | ||||
|                                                ComplexBaseField))] | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = False | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         """Reset the default and _auto_dereference values.""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = True | ||||
|         return self.cls | ||||
| @@ -139,7 +135,6 @@ class no_sub_classes(object): | ||||
|  | ||||
|         with no_sub_classes(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
| @@ -150,24 +145,18 @@ class no_sub_classes(object): | ||||
|         self.cls = cls | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
|         self.cls._all_subclasses = self.cls._subclasses | ||||
|         self.cls._subclasses = (self.cls,) | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         """Reset the default and _auto_dereference values.""" | ||||
|         self.cls._subclasses = self.cls._all_subclasses | ||||
|         delattr(self.cls, '_all_subclasses') | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class QuerySetNoDeRef(QuerySet): | ||||
|     """Special no_dereference QuerySet""" | ||||
|     def __dereference(items, max_depth=1, instance=None, name=None): | ||||
|             return items | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
|     """Query_counter context manager to get the number of queries.""" | ||||
|  | ||||
| @@ -222,7 +211,7 @@ class query_counter(object): | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """Get the number of queries.""" | ||||
|         ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} | ||||
|         ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} | ||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter | ||||
|         self.counter += 1 | ||||
|         return count | ||||
|   | ||||
| @@ -1,18 +1,20 @@ | ||||
| from collections import OrderedDict | ||||
| from bson import DBRef, SON | ||||
| import six | ||||
|  | ||||
| from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) | ||||
| from fields import (ReferenceField, ListField, DictField, MapField) | ||||
| from connection import get_db | ||||
| from queryset import QuerySet | ||||
| from document import Document, EmbeddedDocument | ||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | ||||
|                               TopLevelDocumentMetaclass, get_document) | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.document import Document, EmbeddedDocument | ||||
| from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| class DeReference(object): | ||||
|  | ||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||
|         """ | ||||
|         Cheaply dereferences the items to a set depth. | ||||
|         Also handles the convertion of complex data types. | ||||
|         Also handles the conversion of complex data types. | ||||
|  | ||||
|         :param items: The iterable (dict, list, queryset) to be dereferenced. | ||||
|         :param max_depth: The maximum depth to recurse to | ||||
| @@ -22,7 +24,7 @@ class DeReference(object): | ||||
|             :class:`~mongoengine.base.ComplexBaseField` | ||||
|         :param get: A boolean determining if being called by __get__ | ||||
|         """ | ||||
|         if items is None or isinstance(items, basestring): | ||||
|         if items is None or isinstance(items, six.string_types): | ||||
|             return items | ||||
|  | ||||
|         # cheapest way to convert a queryset to a list | ||||
| @@ -36,7 +38,7 @@ class DeReference(object): | ||||
|         if instance and isinstance(instance, (Document, EmbeddedDocument, | ||||
|                                               TopLevelDocumentMetaclass)): | ||||
|             doc_type = instance._fields.get(name) | ||||
|             if hasattr(doc_type, 'field'): | ||||
|             while hasattr(doc_type, 'field'): | ||||
|                 doc_type = doc_type.field | ||||
|  | ||||
|             if isinstance(doc_type, ReferenceField): | ||||
| @@ -46,20 +48,30 @@ class DeReference(object): | ||||
|  | ||||
|                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||
|                     return items | ||||
|                 elif not is_list and all([i.__class__ == doc_type | ||||
|                                          for i in items.values()]): | ||||
|                 elif not is_list and all( | ||||
|                         [i.__class__ == doc_type for i in items.values()]): | ||||
|                     return items | ||||
|                 elif not field.dbref: | ||||
|                     if not hasattr(items, 'items'): | ||||
|                         items = [field.to_python(v) | ||||
|                              if not isinstance(v, (DBRef, Document)) else v | ||||
|                              for v in items] | ||||
|  | ||||
|                         def _get_items(items): | ||||
|                             new_items = [] | ||||
|                             for v in items: | ||||
|                                 if isinstance(v, list): | ||||
|                                     new_items.append(_get_items(v)) | ||||
|                                 elif not isinstance(v, (DBRef, Document)): | ||||
|                                     new_items.append(field.to_python(v)) | ||||
|                                 else: | ||||
|                         items = dict([ | ||||
|                             (k, field.to_python(v)) | ||||
|                             if not isinstance(v, (DBRef, Document)) else (k, v) | ||||
|                             for k, v in items.iteritems()] | ||||
|                         ) | ||||
|                                     new_items.append(v) | ||||
|                             return new_items | ||||
|  | ||||
|                         items = _get_items(items) | ||||
|                     else: | ||||
|                         items = { | ||||
|                             k: (v if isinstance(v, (DBRef, Document)) | ||||
|                                 else field.to_python(v)) | ||||
|                             for k, v in items.iteritems() | ||||
|                         } | ||||
|  | ||||
|         self.reference_map = self._find_references(items) | ||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||
| @@ -77,36 +89,36 @@ class DeReference(object): | ||||
|             return reference_map | ||||
|  | ||||
|         # Determine the iterator to use | ||||
|         if not hasattr(items, 'items'): | ||||
|             iterator = enumerate(items) | ||||
|         if isinstance(items, dict): | ||||
|             iterator = items.values() | ||||
|         else: | ||||
|             iterator = items.iteritems() | ||||
|             iterator = items | ||||
|  | ||||
|         # Recursively find dbreferences | ||||
|         depth += 1 | ||||
|         for k, item in iterator: | ||||
|             if isinstance(item, Document): | ||||
|         for item in iterator: | ||||
|             if isinstance(item, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in item._fields.iteritems(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
|                         reference_map.setdefault(field.document_type, []).append(v.id) | ||||
|                     if isinstance(v, DBRef): | ||||
|                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||
|                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) | ||||
|                         reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||
|                         references = self._find_references(v, depth) | ||||
|                         for key, refs in references.iteritems(): | ||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||
|                                 key = field_cls | ||||
|                             reference_map.setdefault(key, []).extend(refs) | ||||
|             elif isinstance(item, (DBRef)): | ||||
|                 reference_map.setdefault(item.collection, []).append(item.id) | ||||
|                             reference_map.setdefault(key, set()).update(refs) | ||||
|             elif isinstance(item, DBRef): | ||||
|                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: | ||||
|                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) | ||||
|                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) | ||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||
|                 references = self._find_references(item, depth - 1) | ||||
|                 for key, refs in references.iteritems(): | ||||
|                     reference_map.setdefault(key, []).extend(refs) | ||||
|                     reference_map.setdefault(key, set()).update(refs) | ||||
|  | ||||
|         return reference_map | ||||
|  | ||||
| @@ -114,34 +126,38 @@ class DeReference(object): | ||||
|         """Fetch all references and convert to their document objects | ||||
|         """ | ||||
|         object_map = {} | ||||
|         for col, dbrefs in self.reference_map.iteritems(): | ||||
|             keys = object_map.keys() | ||||
|             refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys])) | ||||
|             if hasattr(col, 'objects'):  # We have a document class for the refs | ||||
|                 references = col.objects.in_bulk(refs) | ||||
|         for collection, dbrefs in self.reference_map.iteritems(): | ||||
|             if hasattr(collection, 'objects'):  # We have a document class for the refs | ||||
|                 col_name = collection._get_collection_name() | ||||
|                 refs = [dbref for dbref in dbrefs | ||||
|                         if (col_name, dbref) not in object_map] | ||||
|                 references = collection.objects.in_bulk(refs) | ||||
|                 for key, doc in references.iteritems(): | ||||
|                     object_map[key] = doc | ||||
|                     object_map[(col_name, key)] = doc | ||||
|             else:  # Generic reference: use the refs data to convert to document | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField,)): | ||||
|                     continue | ||||
|  | ||||
|                 refs = [dbref for dbref in dbrefs | ||||
|                         if (collection, dbref) not in object_map] | ||||
|  | ||||
|                 if doc_type: | ||||
|                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) | ||||
|                     references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         doc = doc_type._from_son(ref) | ||||
|                         object_map[doc.id] = doc | ||||
|                         object_map[(collection, doc.id)] = doc | ||||
|                 else: | ||||
|                     references = get_db()[col].find({'_id': {'$in': refs}}) | ||||
|                     references = get_db()[collection].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         if '_cls' in ref: | ||||
|                             doc = get_document(ref["_cls"])._from_son(ref) | ||||
|                             doc = get_document(ref['_cls'])._from_son(ref) | ||||
|                         elif doc_type is None: | ||||
|                             doc = get_document( | ||||
|                                 ''.join(x.capitalize() | ||||
|                                     for x in col.split('_')))._from_son(ref) | ||||
|                                         for x in collection.split('_')))._from_son(ref) | ||||
|                         else: | ||||
|                             doc = doc_type._from_son(ref) | ||||
|                         object_map[doc.id] = doc | ||||
|                         object_map[(collection, doc.id)] = doc | ||||
|         return object_map | ||||
|  | ||||
|     def _attach_objects(self, items, depth=0, instance=None, name=None): | ||||
| @@ -167,17 +183,29 @@ class DeReference(object): | ||||
|  | ||||
|         if isinstance(items, (dict, SON)): | ||||
|             if '_ref' in items: | ||||
|                 return self.object_map.get(items['_ref'].id, items) | ||||
|                 return self.object_map.get( | ||||
|                     (items['_ref'].collection, items['_ref'].id), items) | ||||
|             elif '_cls' in items: | ||||
|                 doc = get_document(items['_cls'])._from_son(items) | ||||
|                 _cls = doc._data.pop('_cls', None) | ||||
|                 del items['_cls'] | ||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||
|                 if _cls is not None: | ||||
|                     doc._data['_cls'] = _cls | ||||
|                 return doc | ||||
|  | ||||
|         if not hasattr(items, 'items'): | ||||
|             is_list = True | ||||
|             list_type = BaseList | ||||
|             if isinstance(items, EmbeddedDocumentList): | ||||
|                 list_type = EmbeddedDocumentList | ||||
|             as_tuple = isinstance(items, tuple) | ||||
|             iterator = enumerate(items) | ||||
|             data = [] | ||||
|         elif isinstance(items, OrderedDict): | ||||
|             is_list = False | ||||
|             iterator = items.iteritems() | ||||
|             data = OrderedDict() | ||||
|         else: | ||||
|             is_list = False | ||||
|             iterator = items.iteritems() | ||||
| @@ -192,25 +220,27 @@ class DeReference(object): | ||||
|  | ||||
|             if k in self.object_map and not is_list: | ||||
|                 data[k] = self.object_map[k] | ||||
|             elif isinstance(v, Document): | ||||
|                 for field_name, field in v._fields.iteritems(): | ||||
|             elif isinstance(v, (Document, EmbeddedDocument)): | ||||
|                 for field_name in v._fields: | ||||
|                     v = data[k]._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
|                         data[k]._data[field_name] = self.object_map.get(v.id, v) | ||||
|                     if isinstance(v, DBRef): | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
|                             (v.collection, v.id), v) | ||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||
|                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) | ||||
|                     elif isinstance(v, dict) and depth <= self.max_depth: | ||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||
|                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: | ||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
|                             (v['_ref'].collection, v['_ref'].id), v) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) | ||||
|                         item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) | ||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) | ||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                 item_name = '%s.%s' % (name, k) if name else name | ||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) | ||||
|             elif hasattr(v, 'id'): | ||||
|                 data[k] = self.object_map.get(v.id, v) | ||||
|                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||
|  | ||||
|         if instance and name: | ||||
|             if is_list: | ||||
|                 return tuple(data) if as_tuple else BaseList(data, instance, name) | ||||
|                 return tuple(data) if as_tuple else list_type(data, instance, name) | ||||
|             return BaseDict(data, instance, name) | ||||
|         depth += 1 | ||||
|         return data | ||||
|   | ||||
| @@ -1,402 +0,0 @@ | ||||
| from mongoengine import * | ||||
|  | ||||
| from django.utils.encoding import smart_str | ||||
| from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms | ||||
| from django.db import models | ||||
| from django.contrib.contenttypes.models import ContentTypeManager | ||||
| from django.contrib import auth | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.utils.translation import ugettext_lazy as _ | ||||
|  | ||||
| try: | ||||
|     from django.contrib.auth.hashers import check_password, make_password | ||||
| except ImportError: | ||||
|     """Handle older versions of Django""" | ||||
|     from django.utils.hashcompat import md5_constructor, sha_constructor | ||||
|  | ||||
|     def get_hexdigest(algorithm, salt, raw_password): | ||||
|         raw_password, salt = smart_str(raw_password), smart_str(salt) | ||||
|         if algorithm == 'md5': | ||||
|             return md5_constructor(salt + raw_password).hexdigest() | ||||
|         elif algorithm == 'sha1': | ||||
|             return sha_constructor(salt + raw_password).hexdigest() | ||||
|         raise ValueError('Got unknown password algorithm type in password') | ||||
|  | ||||
|     def check_password(raw_password, password): | ||||
|         algo, salt, hash = password.split('$') | ||||
|         return hash == get_hexdigest(algo, salt, raw_password) | ||||
|  | ||||
|     def make_password(raw_password): | ||||
|         from random import random | ||||
|         algo = 'sha1' | ||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] | ||||
|         hash = get_hexdigest(algo, salt, raw_password) | ||||
|         return '%s$%s$%s' % (algo, salt, hash) | ||||
|  | ||||
| from .utils import datetime_now | ||||
|  | ||||
| REDIRECT_FIELD_NAME = 'next' | ||||
|  | ||||
|  | ||||
| class ContentType(Document): | ||||
|     name = StringField(max_length=100) | ||||
|     app_label = StringField(max_length=100) | ||||
|     model = StringField(max_length=100, verbose_name=_('python model class name'), | ||||
|                         unique_with='app_label') | ||||
|     objects = ContentTypeManager() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('content type') | ||||
|         verbose_name_plural = _('content types') | ||||
|         # db_table = 'django_content_type' | ||||
|         # ordering = ('name',) | ||||
|         # unique_together = (('app_label', 'model'),) | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return self.name | ||||
|  | ||||
|     def model_class(self): | ||||
|         "Returns the Python model class for this type of content." | ||||
|         from django.db import models | ||||
|         return models.get_model(self.app_label, self.model) | ||||
|  | ||||
|     def get_object_for_this_type(self, **kwargs): | ||||
|         """ | ||||
|         Returns an object of this type for the keyword arguments given. | ||||
|         Basically, this is a proxy around this object_type's get_object() model | ||||
|         method. The ObjectNotExist exception, if thrown, will not be caught, | ||||
|         so code that calls this method should catch it. | ||||
|         """ | ||||
|         return self.model_class()._default_manager.using(self._state.db).get(**kwargs) | ||||
|  | ||||
|     def natural_key(self): | ||||
|         return (self.app_label, self.model) | ||||
|  | ||||
|  | ||||
| class SiteProfileNotAvailable(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class PermissionManager(models.Manager): | ||||
|     def get_by_natural_key(self, codename, app_label, model): | ||||
|         return self.get( | ||||
|             codename=codename, | ||||
|             content_type=ContentType.objects.get_by_natural_key(app_label, model) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Permission(Document): | ||||
|     """The permissions system provides a way to assign permissions to specific | ||||
|     users and groups of users. | ||||
|  | ||||
|     The permission system is used by the Django admin site, but may also be | ||||
|     useful in your own code. The Django admin site uses permissions as follows: | ||||
|  | ||||
|         - The "add" permission limits the user's ability to view the "add" | ||||
|           form and add an object. | ||||
|         - The "change" permission limits a user's ability to view the change | ||||
|           list, view the "change" form and change an object. | ||||
|         - The "delete" permission limits the ability to delete an object. | ||||
|  | ||||
|     Permissions are set globally per type of object, not per specific object | ||||
|     instance. It is possible to say "Mary may change news stories," but it's | ||||
|     not currently possible to say "Mary may change news stories, but only the | ||||
|     ones she created herself" or "Mary may only change news stories that have | ||||
|     a certain status or publication date." | ||||
|  | ||||
|     Three basic permissions -- add, change and delete -- are automatically | ||||
|     created for each Django model. | ||||
|     """ | ||||
|     name = StringField(max_length=50, verbose_name=_('username')) | ||||
|     content_type = ReferenceField(ContentType) | ||||
|     codename = StringField(max_length=100, verbose_name=_('codename')) | ||||
|         # FIXME: don't access field of the other class | ||||
|         # unique_with=['content_type__app_label', 'content_type__model']) | ||||
|  | ||||
|     objects = PermissionManager() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('permission') | ||||
|         verbose_name_plural = _('permissions') | ||||
|         # unique_together = (('content_type', 'codename'),) | ||||
|         # ordering = ('content_type__app_label', 'content_type__model', 'codename') | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return u"%s | %s | %s" % ( | ||||
|             unicode(self.content_type.app_label), | ||||
|             unicode(self.content_type), | ||||
|             unicode(self.name)) | ||||
|  | ||||
|     def natural_key(self): | ||||
|         return (self.codename,) + self.content_type.natural_key() | ||||
|     natural_key.dependencies = ['contenttypes.contenttype'] | ||||
|  | ||||
|  | ||||
| class Group(Document): | ||||
|     """Groups are a generic way of categorizing users to apply permissions, | ||||
|     or some other label, to those users. A user can belong to any number of | ||||
|     groups. | ||||
|  | ||||
|     A user in a group automatically has all the permissions granted to that | ||||
|     group. For example, if the group Site editors has the permission | ||||
|     can_edit_home_page, any user in that group will have that permission. | ||||
|  | ||||
|     Beyond permissions, groups are a convenient way to categorize users to | ||||
|     apply some label, or extended functionality, to them. For example, you | ||||
|     could create a group 'Special users', and you could write code that would | ||||
|     do special things to those users -- such as giving them access to a | ||||
|     members-only portion of your site, or sending them members-only | ||||
|     e-mail messages. | ||||
|     """ | ||||
|     name = StringField(max_length=80, unique=True, verbose_name=_('name')) | ||||
|     permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('group') | ||||
|         verbose_name_plural = _('groups') | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return self.name | ||||
|  | ||||
|  | ||||
| class UserManager(models.Manager): | ||||
|     def create_user(self, username, email, password=None): | ||||
|         """ | ||||
|         Creates and saves a User with the given username, e-mail and password. | ||||
|         """ | ||||
|         now = datetime_now() | ||||
|  | ||||
|         # Normalize the address by lowercasing the domain part of the email | ||||
|         # address. | ||||
|         try: | ||||
|             email_name, domain_part = email.strip().split('@', 1) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         else: | ||||
|             email = '@'.join([email_name, domain_part.lower()]) | ||||
|  | ||||
|         user = self.model(username=username, email=email, is_staff=False, | ||||
|                           is_active=True, is_superuser=False, last_login=now, | ||||
|                           date_joined=now) | ||||
|  | ||||
|         user.set_password(password) | ||||
|         user.save(using=self._db) | ||||
|         return user | ||||
|  | ||||
|     def create_superuser(self, username, email, password): | ||||
|         u = self.create_user(username, email, password) | ||||
|         u.is_staff = True | ||||
|         u.is_active = True | ||||
|         u.is_superuser = True | ||||
|         u.save(using=self._db) | ||||
|         return u | ||||
|  | ||||
|     def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'): | ||||
|         "Generates a random password with the given length and given allowed_chars" | ||||
|         # Note that default value of allowed_chars does not have "I" or letters | ||||
|         # that look like it -- just to avoid confusion. | ||||
|         from random import choice | ||||
|         return ''.join([choice(allowed_chars) for i in range(length)]) | ||||
|  | ||||
|  | ||||
| class User(Document): | ||||
|     """A User document that aims to mirror most of the API specified by Django | ||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users | ||||
|     """ | ||||
|     username = StringField(max_length=30, required=True, | ||||
|                            verbose_name=_('username'), | ||||
|                            help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) | ||||
|  | ||||
|     first_name = StringField(max_length=30, | ||||
|                              verbose_name=_('first name')) | ||||
|  | ||||
|     last_name = StringField(max_length=30, | ||||
|                             verbose_name=_('last name')) | ||||
|     email = EmailField(verbose_name=_('e-mail address')) | ||||
|     password = StringField(max_length=128, | ||||
|                            verbose_name=_('password'), | ||||
|                            help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) | ||||
|     is_staff = BooleanField(default=False, | ||||
|                             verbose_name=_('staff status'), | ||||
|                             help_text=_("Designates whether the user can log into this admin site.")) | ||||
|     is_active = BooleanField(default=True, | ||||
|                              verbose_name=_('active'), | ||||
|                              help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) | ||||
|     is_superuser = BooleanField(default=False, | ||||
|                                 verbose_name=_('superuser status'), | ||||
|                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) | ||||
|     last_login = DateTimeField(default=datetime_now, | ||||
|                                verbose_name=_('last login')) | ||||
|     date_joined = DateTimeField(default=datetime_now, | ||||
|                                 verbose_name=_('date joined')) | ||||
|  | ||||
|     USERNAME_FIELD = 'username' | ||||
|     REQUIRED_FIELDS = ['email'] | ||||
|  | ||||
|     meta = { | ||||
|         'allow_inheritance': True, | ||||
|         'indexes': [ | ||||
|             {'fields': ['username'], 'unique': True, 'sparse': True} | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return self.username | ||||
|  | ||||
|     def get_full_name(self): | ||||
|         """Returns the users first and last names, separated by a space. | ||||
|         """ | ||||
|         full_name = u'%s %s' % (self.first_name or '', self.last_name or '') | ||||
|         return full_name.strip() | ||||
|  | ||||
|     def is_anonymous(self): | ||||
|         return False | ||||
|  | ||||
|     def is_authenticated(self): | ||||
|         return True | ||||
|  | ||||
|     def set_password(self, raw_password): | ||||
|         """Sets the user's password - always use this rather than directly | ||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the | ||||
|         password is hashed before storage. | ||||
|         """ | ||||
|         self.password = make_password(raw_password) | ||||
|         self.save() | ||||
|         return self | ||||
|  | ||||
|     def check_password(self, raw_password): | ||||
|         """Checks the user's password against a provided password - always use | ||||
|         this rather than directly comparing to | ||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is | ||||
|         hashed before storage. | ||||
|         """ | ||||
|         return check_password(raw_password, self.password) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_user(cls, username, password, email=None): | ||||
|         """Create (and save) a new user with the given username, password and | ||||
|         email address. | ||||
|         """ | ||||
|         now = datetime_now() | ||||
|  | ||||
|         # Normalize the address by lowercasing the domain part of the email | ||||
|         # address. | ||||
|         if email is not None: | ||||
|             try: | ||||
|                 email_name, domain_part = email.strip().split('@', 1) | ||||
|             except ValueError: | ||||
|                 pass | ||||
|             else: | ||||
|                 email = '@'.join([email_name, domain_part.lower()]) | ||||
|  | ||||
|         user = cls(username=username, email=email, date_joined=now) | ||||
|         user.set_password(password) | ||||
|         user.save() | ||||
|         return user | ||||
|  | ||||
|     def get_group_permissions(self, obj=None): | ||||
|         """ | ||||
|         Returns a list of permission strings that this user has through his/her | ||||
|         groups. This method queries all available auth backends. If an object | ||||
|         is passed in, only permissions matching this object are returned. | ||||
|         """ | ||||
|         permissions = set() | ||||
|         for backend in auth.get_backends(): | ||||
|             if hasattr(backend, "get_group_permissions"): | ||||
|                 permissions.update(backend.get_group_permissions(self, obj)) | ||||
|         return permissions | ||||
|  | ||||
|     def get_all_permissions(self, obj=None): | ||||
|         return _user_get_all_permissions(self, obj) | ||||
|  | ||||
|     def has_perm(self, perm, obj=None): | ||||
|         """ | ||||
|         Returns True if the user has the specified permission. This method | ||||
|         queries all available auth backends, but returns immediately if any | ||||
|         backend returns True. Thus, a user who has permission from a single | ||||
|         auth backend is assumed to have permission in general. If an object is | ||||
|         provided, permissions for this specific object are checked. | ||||
|         """ | ||||
|  | ||||
|         # Active superusers have all permissions. | ||||
|         if self.is_active and self.is_superuser: | ||||
|             return True | ||||
|  | ||||
|         # Otherwise we need to check the backends. | ||||
|         return _user_has_perm(self, perm, obj) | ||||
|  | ||||
|     def has_module_perms(self, app_label): | ||||
|         """ | ||||
|         Returns True if the user has any permissions in the given app label. | ||||
|         Uses pretty much the same logic as has_perm, above. | ||||
|         """ | ||||
|         # Active superusers have all permissions. | ||||
|         if self.is_active and self.is_superuser: | ||||
|             return True | ||||
|  | ||||
|         return _user_has_module_perms(self, app_label) | ||||
|  | ||||
|     def email_user(self, subject, message, from_email=None): | ||||
|         "Sends an e-mail to this User." | ||||
|         from django.core.mail import send_mail | ||||
|         send_mail(subject, message, from_email, [self.email]) | ||||
|  | ||||
|     def get_profile(self): | ||||
|         """ | ||||
|         Returns site-specific profile for this user. Raises | ||||
|         SiteProfileNotAvailable if this site does not allow profiles. | ||||
|         """ | ||||
|         if not hasattr(self, '_profile_cache'): | ||||
|             from django.conf import settings | ||||
|             if not getattr(settings, 'AUTH_PROFILE_MODULE', False): | ||||
|                 raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO' | ||||
|                                               'DULE in your project settings') | ||||
|             try: | ||||
|                 app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') | ||||
|             except ValueError: | ||||
|                 raise SiteProfileNotAvailable('app_label and model_name should' | ||||
|                         ' be separated by a dot in the AUTH_PROFILE_MODULE set' | ||||
|                         'ting') | ||||
|  | ||||
|             try: | ||||
|                 model = models.get_model(app_label, model_name) | ||||
|                 if model is None: | ||||
|                     raise SiteProfileNotAvailable('Unable to load the profile ' | ||||
|                         'model, check AUTH_PROFILE_MODULE in your project sett' | ||||
|                         'ings') | ||||
|                 self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id) | ||||
|                 self._profile_cache.user = self | ||||
|             except (ImportError, ImproperlyConfigured): | ||||
|                 raise SiteProfileNotAvailable | ||||
|         return self._profile_cache | ||||
|  | ||||
|  | ||||
| class MongoEngineBackend(object): | ||||
|     """Authenticate using MongoEngine and mongoengine.django.auth.User. | ||||
|     """ | ||||
|  | ||||
|     supports_object_permissions = False | ||||
|     supports_anonymous_user = False | ||||
|     supports_inactive_user = False | ||||
|  | ||||
|     def authenticate(self, username=None, password=None): | ||||
|         user = User.objects(username=username).first() | ||||
|         if user: | ||||
|             if password and user.check_password(password): | ||||
|                 backend = auth.get_backends()[0] | ||||
|                 user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__) | ||||
|                 return user | ||||
|         return None | ||||
|  | ||||
|     def get_user(self, user_id): | ||||
|         return User.objects.with_id(user_id) | ||||
|  | ||||
|  | ||||
| def get_user(userid): | ||||
|     """Returns a User object from an id (User.id). Django's equivalent takes | ||||
|     request, but taking an id instead leaves it up to the developer to store | ||||
|     the id in any way they want (session, signed cookie, etc.) | ||||
|     """ | ||||
|     if not userid: | ||||
|         return AnonymousUser() | ||||
|     return MongoEngineBackend().get_user(userid) or AnonymousUser() | ||||
| @@ -1,107 +0,0 @@ | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.models import UserManager | ||||
| from django.core.exceptions import ImproperlyConfigured | ||||
| from django.db import models | ||||
| from django.utils.importlib import import_module | ||||
| from django.utils.translation import ugettext_lazy as _ | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     'get_user_document', | ||||
| ) | ||||
|  | ||||
|  | ||||
| MONGOENGINE_USER_DOCUMENT = getattr( | ||||
|     settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') | ||||
|  | ||||
|  | ||||
| def get_user_document(): | ||||
|     """Get the user document class used for authentication. | ||||
|  | ||||
|     This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which | ||||
|     defaults to `mongoengine.django.auth.User`. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     name = MONGOENGINE_USER_DOCUMENT | ||||
|     dot = name.rindex('.') | ||||
|     module = import_module(name[:dot]) | ||||
|     return getattr(module, name[dot + 1:]) | ||||
|  | ||||
|  | ||||
| class MongoUserManager(UserManager): | ||||
|     """A User manager wich allows the use of MongoEngine documents in Django. | ||||
|  | ||||
|     To use the manager, you must tell django.contrib.auth to use MongoUser as | ||||
|     the user model. In you settings.py, you need: | ||||
|  | ||||
|         INSTALLED_APPS = ( | ||||
|             ... | ||||
|             'django.contrib.auth', | ||||
|             'mongoengine.django.mongo_auth', | ||||
|             ... | ||||
|         ) | ||||
|         AUTH_USER_MODEL = 'mongo_auth.MongoUser' | ||||
|  | ||||
|     Django will use the model object to access the custom Manager, which will | ||||
|     replace the original queryset with MongoEngine querysets. | ||||
|  | ||||
|     By default, mongoengine.django.auth.User will be used to store users. You | ||||
|     can specify another document class in MONGOENGINE_USER_DOCUMENT in your | ||||
|     settings.py. | ||||
|  | ||||
|     The User Document class has the same requirements as a standard custom user | ||||
|     model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/ | ||||
|  | ||||
|     In particular, the User Document class must define USERNAME_FIELD and | ||||
|     REQUIRED_FIELDS. | ||||
|  | ||||
|     `AUTH_USER_MODEL` has been added in Django 1.5. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def contribute_to_class(self, model, name): | ||||
|         super(MongoUserManager, self).contribute_to_class(model, name) | ||||
|         self.dj_model = self.model | ||||
|         self.model = get_user_document() | ||||
|  | ||||
|         self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD | ||||
|         username = models.CharField(_('username'), max_length=30, unique=True) | ||||
|         username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) | ||||
|  | ||||
|         self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS | ||||
|         for name in self.dj_model.REQUIRED_FIELDS: | ||||
|             field = models.CharField(_(name), max_length=30) | ||||
|             field.contribute_to_class(self.dj_model, name) | ||||
|  | ||||
|  | ||||
|     def get(self, *args, **kwargs): | ||||
|         try: | ||||
|             return self.get_query_set().get(*args, **kwargs) | ||||
|         except self.model.DoesNotExist: | ||||
|             # ModelBackend expects this exception | ||||
|             raise self.dj_model.DoesNotExist | ||||
|  | ||||
|     @property | ||||
|     def db(self): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_empty_query_set(self): | ||||
|         return self.model.objects.none() | ||||
|  | ||||
|     def get_query_set(self): | ||||
|         return self.model.objects | ||||
|  | ||||
|  | ||||
| class MongoUser(models.Model): | ||||
|     """"Dummy user model for Django. | ||||
|  | ||||
|     MongoUser is used to replace Django's UserManager with MongoUserManager. | ||||
|     The actual user document class is mongoengine.django.auth.User or any | ||||
|     other document class specified in MONGOENGINE_USER_DOCUMENT. | ||||
|  | ||||
|     To get the user document class, use `get_user_document()`. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     objects = MongoUserManager() | ||||
| @@ -1,105 +0,0 @@ | ||||
| from django.conf import settings | ||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||
| from django.core.exceptions import SuspiciousOperation | ||||
| try: | ||||
|     from django.utils.encoding import force_unicode | ||||
| except ImportError: | ||||
|     from django.utils.encoding import force_text as force_unicode | ||||
|  | ||||
| from mongoengine.document import Document | ||||
| from mongoengine import fields | ||||
| from mongoengine.queryset import OperationError | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME | ||||
|  | ||||
| from .utils import datetime_now | ||||
|  | ||||
|  | ||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||
|     DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
| # a setting for the name of the collection used to store sessions | ||||
| MONGOENGINE_SESSION_COLLECTION = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_COLLECTION', | ||||
|     'django_session') | ||||
|  | ||||
| # a setting for whether session data is stored encoded or not | ||||
| MONGOENGINE_SESSION_DATA_ENCODE = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DATA_ENCODE', | ||||
|     True) | ||||
|  | ||||
|  | ||||
| class MongoSession(Document): | ||||
|     session_key = fields.StringField(primary_key=True, max_length=40) | ||||
|     session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ | ||||
|                                         else fields.DictField() | ||||
|     expire_date = fields.DateTimeField() | ||||
|  | ||||
|     meta = { | ||||
|         'collection': MONGOENGINE_SESSION_COLLECTION, | ||||
|         'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||
|         'allow_inheritance': False, | ||||
|         'indexes': [ | ||||
|             { | ||||
|                 'fields': ['expire_date'], | ||||
|                 'expireAfterSeconds': 0 | ||||
|             } | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     def get_decoded(self): | ||||
|         return SessionStore().decode(self.session_data) | ||||
|  | ||||
|  | ||||
| class SessionStore(SessionBase): | ||||
|     """A MongoEngine-based session store for Django. | ||||
|     """ | ||||
|  | ||||
|     def load(self): | ||||
|         try: | ||||
|             s = MongoSession.objects(session_key=self.session_key, | ||||
|                                      expire_date__gt=datetime_now)[0] | ||||
|             if MONGOENGINE_SESSION_DATA_ENCODE: | ||||
|                 return self.decode(force_unicode(s.session_data)) | ||||
|             else: | ||||
|                 return s.session_data | ||||
|         except (IndexError, SuspiciousOperation): | ||||
|             self.create() | ||||
|             return {} | ||||
|  | ||||
|     def exists(self, session_key): | ||||
|         return bool(MongoSession.objects(session_key=session_key).first()) | ||||
|  | ||||
|     def create(self): | ||||
|         while True: | ||||
|             self._session_key = self._get_new_session_key() | ||||
|             try: | ||||
|                 self.save(must_create=True) | ||||
|             except CreateError: | ||||
|                 continue | ||||
|             self.modified = True | ||||
|             self._session_cache = {} | ||||
|             return | ||||
|  | ||||
|     def save(self, must_create=False): | ||||
|         if self.session_key is None: | ||||
|             self._session_key = self._get_new_session_key() | ||||
|         s = MongoSession(session_key=self.session_key) | ||||
|         if MONGOENGINE_SESSION_DATA_ENCODE: | ||||
|             s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||
|         else: | ||||
|             s.session_data = self._get_session(no_load=must_create) | ||||
|         s.expire_date = self.get_expiry_date() | ||||
|         try: | ||||
|             s.save(force_insert=must_create) | ||||
|         except OperationError: | ||||
|             if must_create: | ||||
|                 raise CreateError | ||||
|             raise | ||||
|  | ||||
|     def delete(self, session_key=None): | ||||
|         if session_key is None: | ||||
|             if self.session_key is None: | ||||
|                 return | ||||
|             session_key = self.session_key | ||||
|         MongoSession.objects(session_key=session_key).delete() | ||||
| @@ -1,47 +0,0 @@ | ||||
| from mongoengine.queryset import QuerySet | ||||
| from mongoengine.base import BaseDocument | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| def _get_queryset(cls): | ||||
|     """Inspired by django.shortcuts.*""" | ||||
|     if isinstance(cls, QuerySet): | ||||
|         return cls | ||||
|     else: | ||||
|         return cls.objects | ||||
|  | ||||
| def get_document_or_404(cls, *args, **kwargs): | ||||
|     """ | ||||
|     Uses get() to return an document, or raises a Http404 exception if the document | ||||
|     does not exist. | ||||
|  | ||||
|     cls may be a Document or QuerySet object. All other passed | ||||
|     arguments and keyword arguments are used in the get() query. | ||||
|  | ||||
|     Note: Like with get(), an MultipleObjectsReturned will be raised if more than one | ||||
|     object is found. | ||||
|  | ||||
|     Inspired by django.shortcuts.* | ||||
|     """ | ||||
|     queryset = _get_queryset(cls) | ||||
|     try: | ||||
|         return queryset.get(*args, **kwargs) | ||||
|     except (queryset._document.DoesNotExist, ValidationError): | ||||
|         from django.http import Http404 | ||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||
|  | ||||
| def get_list_or_404(cls, *args, **kwargs): | ||||
|     """ | ||||
|     Uses filter() to return a list of documents, or raise a Http404 exception if | ||||
|     the list is empty. | ||||
|  | ||||
|     cls may be a Document or QuerySet object. All other passed | ||||
|     arguments and keyword arguments are used in the filter() query. | ||||
|  | ||||
|     Inspired by django.shortcuts.* | ||||
|     """ | ||||
|     queryset = _get_queryset(cls) | ||||
|     obj_list = list(queryset.filter(*args, **kwargs)) | ||||
|     if not obj_list: | ||||
|         from django.http import Http404 | ||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||
|     return obj_list | ||||
| @@ -1,112 +0,0 @@ | ||||
| import os | ||||
| import itertools | ||||
| import urlparse | ||||
|  | ||||
| from mongoengine import * | ||||
| from django.conf import settings | ||||
| from django.core.files.storage import Storage | ||||
| from django.core.exceptions import ImproperlyConfigured | ||||
|  | ||||
|  | ||||
| class FileDocument(Document): | ||||
|     """A document used to store a single file in GridFS. | ||||
|     """ | ||||
|     file = FileField() | ||||
|  | ||||
|  | ||||
| class GridFSStorage(Storage): | ||||
|     """A custom storage backend to store files in GridFS | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, base_url=None): | ||||
|  | ||||
|         if base_url is None: | ||||
|             base_url = settings.MEDIA_URL | ||||
|         self.base_url = base_url | ||||
|         self.document = FileDocument | ||||
|         self.field = 'file' | ||||
|  | ||||
|     def delete(self, name): | ||||
|         """Deletes the specified file from the storage system. | ||||
|         """ | ||||
|         if self.exists(name): | ||||
|             doc = self.document.objects.first() | ||||
|             field = getattr(doc, self.field) | ||||
|             self._get_doc_with_name(name).delete()  # Delete the FileField | ||||
|             field.delete()                          # Delete the FileDocument | ||||
|  | ||||
|     def exists(self, name): | ||||
|         """Returns True if a file referened by the given name already exists in the | ||||
|         storage system, or False if the name is available for a new file. | ||||
|         """ | ||||
|         doc = self._get_doc_with_name(name) | ||||
|         if doc: | ||||
|             field = getattr(doc, self.field) | ||||
|             return bool(field.name) | ||||
|         else: | ||||
|             return False | ||||
|  | ||||
|     def listdir(self, path=None): | ||||
|         """Lists the contents of the specified path, returning a 2-tuple of lists; | ||||
|         the first item being directories, the second item being files. | ||||
|         """ | ||||
|         def name(doc): | ||||
|             return getattr(doc, self.field).name | ||||
|         docs = self.document.objects | ||||
|         return [], [name(d) for d in docs if name(d)] | ||||
|  | ||||
|     def size(self, name): | ||||
|         """Returns the total size, in bytes, of the file specified by name. | ||||
|         """ | ||||
|         doc = self._get_doc_with_name(name) | ||||
|         if doc: | ||||
|             return getattr(doc, self.field).length | ||||
|         else: | ||||
|             raise ValueError("No such file or directory: '%s'" % name) | ||||
|  | ||||
|     def url(self, name): | ||||
|         """Returns an absolute URL where the file's contents can be accessed | ||||
|         directly by a web browser. | ||||
|         """ | ||||
|         if self.base_url is None: | ||||
|             raise ValueError("This file is not accessible via a URL.") | ||||
|         return urlparse.urljoin(self.base_url, name).replace('\\', '/') | ||||
|  | ||||
|     def _get_doc_with_name(self, name): | ||||
|         """Find the documents in the store with the given name | ||||
|         """ | ||||
|         docs = self.document.objects | ||||
|         doc = [d for d in docs if getattr(d, self.field).name == name] | ||||
|         if doc: | ||||
|             return doc[0] | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def _open(self, name, mode='rb'): | ||||
|         doc = self._get_doc_with_name(name) | ||||
|         if doc: | ||||
|             return getattr(doc, self.field) | ||||
|         else: | ||||
|             raise ValueError("No file found with the name '%s'." % name) | ||||
|  | ||||
|     def get_available_name(self, name): | ||||
|         """Returns a filename that's free on the target storage system, and | ||||
|         available for new content to be written to. | ||||
|         """ | ||||
|         file_root, file_ext = os.path.splitext(name) | ||||
|         # If the filename already exists, add an underscore and a number (before | ||||
|         # the file extension, if one exists) to the filename until the generated | ||||
|         # filename doesn't exist. | ||||
|         count = itertools.count(1) | ||||
|         while self.exists(name): | ||||
|             # file_ext includes the dot. | ||||
|             name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext)) | ||||
|  | ||||
|         return name | ||||
|  | ||||
|     def _save(self, name, content): | ||||
|         doc = self.document() | ||||
|         getattr(doc, self.field).put(content, filename=name) | ||||
|         doc.save() | ||||
|  | ||||
|         return name | ||||
| @@ -1,39 +0,0 @@ | ||||
| #coding: utf-8 | ||||
| from nose.plugins.skip import SkipTest | ||||
|  | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine import connect | ||||
|  | ||||
| try: | ||||
|     from django.test import TestCase | ||||
|     from django.conf import settings | ||||
| except Exception as err: | ||||
|     if PY3: | ||||
|         from unittest import TestCase | ||||
|         # Dummy value so no error | ||||
|         class settings: | ||||
|             MONGO_DATABASE_NAME = 'dummy' | ||||
|     else: | ||||
|         raise err | ||||
|  | ||||
|  | ||||
| class MongoTestCase(TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         if PY3: | ||||
|             raise SkipTest('django does not have Python 3 support') | ||||
|  | ||||
|     """ | ||||
|     TestCase class that clear the collection between the tests | ||||
|     """ | ||||
|     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME | ||||
|     def __init__(self, methodName='runtest'): | ||||
|         self.db = connect(self.db_name).get_db() | ||||
|         super(MongoTestCase, self).__init__(methodName) | ||||
|  | ||||
|     def _post_teardown(self): | ||||
|         super(MongoTestCase, self)._post_teardown() | ||||
|         for collection in self.db.collection_names(): | ||||
|             if collection == 'system.indexes': | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
| @@ -1,6 +0,0 @@ | ||||
| try: | ||||
|     # django >= 1.4 | ||||
|     from django.utils.timezone import now as datetime_now | ||||
| except ImportError: | ||||
|     from datetime import datetime | ||||
|     datetime_now = datetime.now | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,11 +1,11 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from mongoengine.python_support import txt_type | ||||
|  | ||||
| import six | ||||
|  | ||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||
|            'OperationError', 'NotUniqueError', 'ValidationError') | ||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', | ||||
|            'ValidationError', 'SaveConditionError') | ||||
|  | ||||
|  | ||||
| class NotRegistered(Exception): | ||||
| @@ -40,6 +40,21 @@ class NotUniqueError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class SaveConditionError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class FieldDoesNotExist(Exception): | ||||
|     """Raised when trying to set a field | ||||
|     not declared in a :class:`~mongoengine.Document` | ||||
|     or an :class:`~mongoengine.EmbeddedDocument`. | ||||
|  | ||||
|     To avoid this behavior on data loading, | ||||
|     you should set the :attr:`strict` to ``False`` | ||||
|     in the :attr:`meta` dictionary. | ||||
|     """ | ||||
|  | ||||
|  | ||||
| class ValidationError(AssertionError): | ||||
|     """Validation exception. | ||||
|  | ||||
| @@ -55,13 +70,13 @@ class ValidationError(AssertionError): | ||||
|     field_name = None | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|     def __init__(self, message='', **kwargs): | ||||
|         self.errors = kwargs.get('errors', {}) | ||||
|         self.field_name = kwargs.get('field_name') | ||||
|         self.message = message | ||||
|  | ||||
|     def __str__(self): | ||||
|         return txt_type(self.message) | ||||
|         return six.text_type(self.message) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) | ||||
| @@ -95,16 +110,20 @@ class ValidationError(AssertionError): | ||||
|             errors_dict = {} | ||||
|             if not source: | ||||
|                 return errors_dict | ||||
|  | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in source.iteritems(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
|             else: | ||||
|                 return unicode(source) | ||||
|                 return six.text_type(source) | ||||
|  | ||||
|             return errors_dict | ||||
|  | ||||
|         if not self.errors: | ||||
|             return {} | ||||
|  | ||||
|         return build_dict(self.errors) | ||||
|  | ||||
|     def _format_errors(self): | ||||
| @@ -113,14 +132,14 @@ class ValidationError(AssertionError): | ||||
|         def generate_key(value, prefix=''): | ||||
|             if isinstance(value, list): | ||||
|                 value = ' '.join([generate_key(k) for k in value]) | ||||
|             if isinstance(value, dict): | ||||
|             elif isinstance(value, dict): | ||||
|                 value = ' '.join( | ||||
|                     [generate_key(v, k) for k, v in value.iteritems()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             results = '%s.%s' % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in self.to_dict().iteritems(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) | ||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,61 +1,25 @@ | ||||
| """Helper functions and types to aid with Python 2.5 - 3 support.""" | ||||
| """ | ||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x and | ||||
| PyMongo v2.7 - v3.x support. | ||||
| """ | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| import sys | ||||
|  | ||||
| PY3 = sys.version_info[0] == 3 | ||||
| PY25 = sys.version_info[:2] == (2, 5) | ||||
| UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 | ||||
|  | ||||
| if PY3: | ||||
|     import codecs | ||||
|     from io import BytesIO as StringIO | ||||
|     # return s converted to binary.  b('test') should be equivalent to b'test' | ||||
|     def b(s): | ||||
|         return codecs.latin_1_encode(s)[0] | ||||
|  | ||||
|     bin_type = bytes | ||||
|     txt_type   = str | ||||
| if pymongo.version_tuple[0] < 3: | ||||
|     IS_PYMONGO_3 = False | ||||
| else: | ||||
|     IS_PYMONGO_3 = True | ||||
|  | ||||
|  | ||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||
| StringIO = six.BytesIO | ||||
|  | ||||
| # Additionally for Py2, try to use the faster cStringIO, if available | ||||
| if not six.PY3: | ||||
|     try: | ||||
|         from cStringIO import StringIO | ||||
|         import cStringIO | ||||
|     except ImportError: | ||||
|         from StringIO import StringIO | ||||
|  | ||||
|     # Conversion to binary only necessary in Python 3 | ||||
|     def b(s): | ||||
|         return s | ||||
|  | ||||
|     bin_type = str | ||||
|     txt_type = unicode | ||||
|  | ||||
| str_types = (bin_type, txt_type) | ||||
|  | ||||
| if PY25: | ||||
|     def product(*args, **kwds): | ||||
|         pools = map(tuple, args) * kwds.get('repeat', 1) | ||||
|         result = [[]] | ||||
|         for pool in pools: | ||||
|             result = [x + [y] for x in result for y in pool] | ||||
|         for prod in result: | ||||
|             yield tuple(prod) | ||||
|     reduce = reduce | ||||
|         pass | ||||
|     else: | ||||
|     from itertools import product | ||||
|     from functools import reduce | ||||
|  | ||||
|  | ||||
| # For use with Python 2.5 | ||||
| # converts all keys from unicode to str for d and all nested dictionaries | ||||
| def to_str_keys_recursive(d): | ||||
|     if isinstance(d, list): | ||||
|         for val in d: | ||||
|             if isinstance(val, (dict, list)): | ||||
|                 to_str_keys_recursive(val) | ||||
|     elif isinstance(d, dict): | ||||
|         for key, val in d.items(): | ||||
|             if isinstance(val, (dict, list)): | ||||
|                 to_str_keys_recursive(val) | ||||
|             if isinstance(key, unicode): | ||||
|                 d[str(key)] = d.pop(key) | ||||
|     else: | ||||
|         raise ValueError("non list/dict parameter not allowed") | ||||
|         StringIO = cStringIO.StringIO | ||||
|   | ||||
| @@ -1,11 +1,17 @@ | ||||
| from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, | ||||
|                                 InvalidQueryError, OperationError, | ||||
|                                 NotUniqueError) | ||||
| from mongoengine.errors import * | ||||
| from mongoengine.queryset.field_list import * | ||||
| from mongoengine.queryset.manager import * | ||||
| from mongoengine.queryset.queryset import * | ||||
| from mongoengine.queryset.transform import * | ||||
| from mongoengine.queryset.visitor import * | ||||
|  | ||||
| __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + | ||||
|            transform.__all__ + visitor.__all__) | ||||
| # Expose just the public subset of all imported objects and constants. | ||||
| __all__ = ( | ||||
|     'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', | ||||
|     'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', | ||||
|  | ||||
|     # Errors that might be related to a queryset, mostly here for backward | ||||
|     # compatibility | ||||
|     'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', | ||||
|     'NotUniqueError', 'OperationError', | ||||
| ) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,4 +1,3 @@ | ||||
|  | ||||
| __all__ = ('QueryFieldList',) | ||||
|  | ||||
|  | ||||
| @@ -68,7 +67,7 @@ class QueryFieldList(object): | ||||
|         return bool(self.fields) | ||||
|  | ||||
|     def as_dict(self): | ||||
|         field_list = dict((field, self.value) for field in self.fields) | ||||
|         field_list = {field: self.value for field in self.fields} | ||||
|         if self.slice: | ||||
|             field_list.update(self.slice) | ||||
|         if self._id is not None: | ||||
|   | ||||
| @@ -29,7 +29,7 @@ class QuerySetManager(object): | ||||
|         Document.objects is accessed. | ||||
|         """ | ||||
|         if instance is not None: | ||||
|             # Document class being used rather than a document object | ||||
|             # Document object being used rather than a document class | ||||
|             return self | ||||
|  | ||||
|         # owner is the document that contains the QuerySetManager | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from mongoengine.errors import OperationError | ||||
| from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, | ||||
|                                        CASCADE, DENY, PULL) | ||||
| from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, | ||||
|                                        NULLIFY, PULL) | ||||
|  | ||||
| __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', | ||||
|            'DENY', 'PULL') | ||||
| @@ -30,6 +30,7 @@ class QuerySet(BaseQuerySet): | ||||
|         batch. Otherwise iterate the result_cache. | ||||
|         """ | ||||
|         self._iter = True | ||||
|  | ||||
|         if self._has_more: | ||||
|             return self._iter_results() | ||||
|  | ||||
| @@ -38,45 +39,60 @@ class QuerySet(BaseQuerySet): | ||||
|  | ||||
|     def __len__(self): | ||||
|         """Since __len__ is called quite frequently (for example, as part of | ||||
|         list(qs) we populate the result cache and cache the length. | ||||
|         list(qs)), we populate the result cache and cache the length. | ||||
|         """ | ||||
|         if self._len is not None: | ||||
|             return self._len | ||||
|  | ||||
|         # Populate the result cache with *all* of the docs in the cursor | ||||
|         if self._has_more: | ||||
|             # populate the cache | ||||
|             list(self._iter_results()) | ||||
|  | ||||
|         # Cache the length of the complete result cache and return it | ||||
|         self._len = len(self._result_cache) | ||||
|         return self._len | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
|         """ | ||||
|         """Provide a string representation of the QuerySet""" | ||||
|         if self._iter: | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         self._populate_cache() | ||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|         return repr(data) | ||||
|  | ||||
|  | ||||
|     def _iter_results(self): | ||||
|         """A generator for iterating over the result cache. | ||||
|  | ||||
|         Also populates the cache if there are more possible results to yield. | ||||
|         Raises StopIteration when there are no more results""" | ||||
|         Also populates the cache if there are more possible results to | ||||
|         yield. Raises StopIteration when there are no more results. | ||||
|         """ | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|  | ||||
|         pos = 0 | ||||
|         while True: | ||||
|             upper = len(self._result_cache) | ||||
|             while pos < upper: | ||||
|  | ||||
|             # For all positions lower than the length of the current result | ||||
|             # cache, serve the docs straight from the cache w/o hitting the | ||||
|             # database. | ||||
|             # XXX it's VERY important to compute the len within the `while` | ||||
|             # condition because the result cache might expand mid-iteration | ||||
|             # (e.g. if we call len(qs) inside a loop that iterates over the | ||||
|             # queryset). Fortunately len(list) is O(1) in Python, so this | ||||
|             # doesn't cause performance issues. | ||||
|             while pos < len(self._result_cache): | ||||
|                 yield self._result_cache[pos] | ||||
|                 pos = pos + 1 | ||||
|                 pos += 1 | ||||
|  | ||||
|             # Raise StopIteration if we already established there were no more | ||||
|             # docs in the db cursor. | ||||
|             if not self._has_more: | ||||
|                 raise StopIteration | ||||
|  | ||||
|             # Otherwise, populate more of the cache and repeat. | ||||
|             if len(self._result_cache) <= pos: | ||||
|                 self._populate_cache() | ||||
|  | ||||
| @@ -87,14 +103,24 @@ class QuerySet(BaseQuerySet): | ||||
|         """ | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|         if self._has_more: | ||||
|  | ||||
|         # Skip populating the cache if we already established there are no | ||||
|         # more docs to pull from the database. | ||||
|         if not self._has_more: | ||||
|             return | ||||
|  | ||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||
|         # the result cache. | ||||
|         try: | ||||
|                 for i in xrange(ITER_CHUNK_SIZE): | ||||
|             for _ in xrange(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(self.next()) | ||||
|         except StopIteration: | ||||
|             # Getting this exception means there are no more docs in the | ||||
|             # db cursor. Set _has_more to False so that we can use that | ||||
|             # information in other places. | ||||
|             self._has_more = False | ||||
|  | ||||
|     def count(self, with_limit_and_skip=True): | ||||
|     def count(self, with_limit_and_skip=False): | ||||
|         """Count the selected elements in the query. | ||||
|  | ||||
|         :param with_limit_and_skip (optional): take any :meth:`limit` or | ||||
| @@ -110,13 +136,15 @@ class QuerySet(BaseQuerySet): | ||||
|         return self._len | ||||
|  | ||||
|     def no_cache(self): | ||||
|         """Convert to a non_caching queryset | ||||
|         """Convert to a non-caching queryset | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||
|         """ | ||||
|         if self._result_cache is not None: | ||||
|             raise OperationError("QuerySet already cached") | ||||
|         return self.clone_into(QuerySetNoCache(self._document, self._collection)) | ||||
|             raise OperationError('QuerySet already cached') | ||||
|  | ||||
|         return self._clone_into(QuerySetNoCache(self._document, | ||||
|                                                 self._collection)) | ||||
|  | ||||
|  | ||||
| class QuerySetNoCache(BaseQuerySet): | ||||
| @@ -127,7 +155,7 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to caching queryset | ||||
|         """ | ||||
|         return self.clone_into(QuerySet(self._document, self._collection)) | ||||
|         return self._clone_into(QuerySet(self._document, self._collection)) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
| @@ -138,13 +166,14 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         data = [] | ||||
|         for i in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|         for _ in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|  | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|  | ||||
|         self.rewind() | ||||
|         return repr(data) | ||||
| @@ -155,3 +184,10 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             queryset = self.clone() | ||||
|         queryset.rewind() | ||||
|         return queryset | ||||
|  | ||||
|  | ||||
| class QuerySetNoDeRef(QuerySet): | ||||
|     """Special no_dereference QuerySet""" | ||||
|  | ||||
|     def __dereference(items, max_depth=1, instance=None, name=None): | ||||
|         return items | ||||
|   | ||||
| @@ -1,19 +1,23 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from bson import ObjectId, SON | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| from bson import SON | ||||
| import six | ||||
|  | ||||
| from mongoengine.base import UPDATE_OPERATORS | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidQueryError, LookUpError | ||||
| from mongoengine.connection import get_connection | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
|  | ||||
| __all__ = ('query', 'update') | ||||
|  | ||||
|  | ||||
| COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | ||||
|                         'all', 'size', 'exists', 'not') | ||||
|                         'all', 'size', 'exists', 'not', 'elemMatch', 'type') | ||||
| GEO_OPERATORS = ('within_distance', 'within_spherical_distance', | ||||
|                  'within_box', 'within_polygon', 'near', 'near_sphere', | ||||
|                         'max_distance', 'geo_within', 'geo_within_box', | ||||
|                  'max_distance', 'min_distance', 'geo_within', 'geo_within_box', | ||||
|                  'geo_within_polygon', 'geo_within_center', | ||||
|                  'geo_within_sphere', 'geo_intersects') | ||||
| STRING_OPERATORS = ('contains', 'icontains', 'startswith', | ||||
| @@ -23,22 +27,18 @@ CUSTOM_OPERATORS     = ('match',) | ||||
| MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + | ||||
|                    STRING_OPERATORS + CUSTOM_OPERATORS) | ||||
|  | ||||
| UPDATE_OPERATORS     = ('set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert') | ||||
|  | ||||
|  | ||||
| def query(_doc_cls=None, _field_operation=False, **query): | ||||
|     """Transform a query from Django-style format to Mongo format. | ||||
|     """ | ||||
| # TODO make this less complex | ||||
| def query(_doc_cls=None, **kwargs): | ||||
|     """Transform a query from Django-style format to Mongo format.""" | ||||
|     mongo_query = {} | ||||
|     merge_query = defaultdict(list) | ||||
|     for key, value in sorted(query.items()): | ||||
|         if key == "__raw__": | ||||
|     for key, value in sorted(kwargs.items()): | ||||
|         if key == '__raw__': | ||||
|             mongo_query.update(value) | ||||
|             continue | ||||
|  | ||||
|         parts = key.split('__') | ||||
|         parts = key.rsplit('__') | ||||
|         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||
|         parts = [part for part in parts if not part.isdigit()] | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
| @@ -46,6 +46,10 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|         if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: | ||||
|             op = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         negate = False | ||||
|         if len(parts) > 1 and parts[-1] == 'not': | ||||
|             parts.pop() | ||||
| @@ -55,18 +59,25 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|             except Exception as e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
|             CachedReferenceField = _import_class('CachedReferenceField') | ||||
|             GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|             cleaned_fields = [] | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 # is last and CachedReferenceField | ||||
|                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: | ||||
|                     parts.append('%s._id' % field.db_field) | ||||
|                 else: | ||||
|                     parts.append(field.db_field) | ||||
|  | ||||
|                 if append_field: | ||||
|                     cleaned_fields.append(field) | ||||
|  | ||||
| @@ -76,30 +87,65 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||
|             singular_ops += STRING_OPERATORS | ||||
|             if op in singular_ops: | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     if (op in STRING_OPERATORS and | ||||
|                        isinstance(value, basestring)): | ||||
|                             isinstance(value, six.string_types)): | ||||
|                         StringField = _import_class('StringField') | ||||
|                         value = StringField.prepare_query_value(op, value) | ||||
|                     else: | ||||
|                         value = field | ||||
|                 else: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|  | ||||
|                     if isinstance(field, CachedReferenceField) and value: | ||||
|                         value = value['_id'] | ||||
|  | ||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): | ||||
|                 # 'in', 'nin' and 'all' require a list of values | ||||
|                 # Raise an error if the in/nin/all/near param is not iterable. We need a | ||||
|                 # special check for BaseDocument, because - although it's iterable - using | ||||
|                 # it as such in the context of this method is most definitely a mistake. | ||||
|                 BaseDocument = _import_class('BaseDocument') | ||||
|                 if isinstance(value, BaseDocument): | ||||
|                     raise TypeError("When using the `in`, `nin`, `all`, or " | ||||
|                                     "`near`-operators you can\'t use a " | ||||
|                                     "`Document`, you must wrap your object " | ||||
|                                     "in a list (object -> [object]).") | ||||
|                 elif not hasattr(value, '__iter__'): | ||||
|                     raise TypeError("The `in`, `nin`, `all`, or " | ||||
|                                     "`near`-operators must be applied to an " | ||||
|                                     "iterable (e.g. a list).") | ||||
|                 else: | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|             # If we're querying a GenericReferenceField, we need to alter the | ||||
|             # key depending on the value: | ||||
|             # * If the value is a DBRef, the key should be "field_name._ref". | ||||
|             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||
|             if isinstance(field, GenericReferenceField): | ||||
|                 if isinstance(value, DBRef): | ||||
|                     parts[-1] += '._ref' | ||||
|                 elif isinstance(value, ObjectId): | ||||
|                     parts[-1] += '._ref.$id' | ||||
|  | ||||
|         # if op and op not in COMPARISON_OPERATORS: | ||||
|         if op: | ||||
|             if op in GEO_OPERATORS: | ||||
|                 value = _geo_operator(field, op, value) | ||||
|             elif op in CUSTOM_OPERATORS: | ||||
|                 if op == 'match': | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|                     value = {"$elemMatch": value} | ||||
|             elif op in ('match', 'elemMatch'): | ||||
|                 ListField = _import_class('ListField') | ||||
|                 EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|                 if ( | ||||
|                     isinstance(value, dict) and | ||||
|                     isinstance(field, ListField) and | ||||
|                     isinstance(field.field, EmbeddedDocumentField) | ||||
|                 ): | ||||
|                     value = query(field.field.document_type, **value) | ||||
|                 else: | ||||
|                     NotImplementedError("Custom method '%s' has not " | ||||
|                                         "been implemented" % op) | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|                 value = {'$elemMatch': value} | ||||
|             elif op in CUSTOM_OPERATORS: | ||||
|                 NotImplementedError('Custom method "%s" has not ' | ||||
|                                     'been implemented' % op) | ||||
|             elif op not in STRING_OPERATORS: | ||||
|                 value = {'$' + op: value} | ||||
|  | ||||
| @@ -108,21 +154,42 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|  | ||||
|         for i, part in indices: | ||||
|             parts.insert(i, part) | ||||
|  | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if op is None or key not in mongo_query: | ||||
|             mongo_query[key] = value | ||||
|         elif key in mongo_query: | ||||
|             if key in mongo_query and isinstance(mongo_query[key], dict): | ||||
|             if isinstance(mongo_query[key], dict): | ||||
|                 mongo_query[key].update(value) | ||||
|                 # $maxDistance needs to come last - convert to SON | ||||
|                 if '$maxDistance' in mongo_query[key]: | ||||
|                 # $max/minDistance needs to come last - convert to SON | ||||
|                 value_dict = mongo_query[key] | ||||
|                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ | ||||
|                         ('$near' in value_dict or '$nearSphere' in value_dict): | ||||
|                     value_son = SON() | ||||
|                     for k, v in value_dict.iteritems(): | ||||
|                         if k == '$maxDistance': | ||||
|                         if k == '$maxDistance' or k == '$minDistance': | ||||
|                             continue | ||||
|                         value_son[k] = v | ||||
|                     # Required for MongoDB >= 2.6, may fail when combining | ||||
|                     # PyMongo 3+ and MongoDB < 2.6 | ||||
|                     near_embedded = False | ||||
|                     for near_op in ('$near', '$nearSphere'): | ||||
|                         if isinstance(value_dict.get(near_op), dict) and ( | ||||
|                                 IS_PYMONGO_3 or get_connection().max_wire_version > 1): | ||||
|                             value_son[near_op] = SON(value_son[near_op]) | ||||
|                             if '$maxDistance' in value_dict: | ||||
|                                 value_son[near_op][ | ||||
|                                     '$maxDistance'] = value_dict['$maxDistance'] | ||||
|                             if '$minDistance' in value_dict: | ||||
|                                 value_son[near_op][ | ||||
|                                     '$minDistance'] = value_dict['$minDistance'] | ||||
|                             near_embedded = True | ||||
|                     if not near_embedded: | ||||
|                         if '$maxDistance' in value_dict: | ||||
|                             value_son['$maxDistance'] = value_dict['$maxDistance'] | ||||
|                         if '$minDistance' in value_dict: | ||||
|                             value_son['$minDistance'] = value_dict['$minDistance'] | ||||
|                     mongo_query[key] = value_son | ||||
|             else: | ||||
|                 # Store for manually merging later | ||||
| @@ -135,7 +202,7 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|         if isinstance(v, list): | ||||
|             value = [{k: val} for val in v] | ||||
|             if '$and' in mongo_query.keys(): | ||||
|                 mongo_query['$and'].append(value) | ||||
|                 mongo_query['$and'].extend(value) | ||||
|             else: | ||||
|                 mongo_query['$and'] = value | ||||
|  | ||||
| @@ -143,14 +210,18 @@ def query(_doc_cls=None, _field_operation=False, **query): | ||||
|  | ||||
|  | ||||
| def update(_doc_cls=None, **update): | ||||
|     """Transform an update spec from Django-style format to Mongo format. | ||||
|     """Transform an update spec from Django-style format to Mongo | ||||
|     format. | ||||
|     """ | ||||
|     mongo_update = {} | ||||
|     for key, value in update.items(): | ||||
|         if key == "__raw__": | ||||
|         if key == '__raw__': | ||||
|             mongo_update.update(value) | ||||
|             continue | ||||
|         parts = key.split('__') | ||||
|         # if there is no operator, default to 'set' | ||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||
|             parts.insert(0, 'set') | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
|         op = None | ||||
|         if parts[0] in UPDATE_OPERATORS: | ||||
| @@ -162,22 +233,25 @@ def update(_doc_cls=None, **update): | ||||
|                 # Support decrement by flipping a positive value's sign | ||||
|                 # and using 'inc' | ||||
|                 op = 'inc' | ||||
|                 if value > 0: | ||||
|                 value = -value | ||||
|             elif op == 'add_to_set': | ||||
|                 op = 'addToSet' | ||||
|             elif op == 'set_on_insert': | ||||
|                 op = "setOnInsert" | ||||
|                 op = 'setOnInsert' | ||||
|  | ||||
|         match = None | ||||
|         if parts[-1] in COMPARISON_OPERATORS: | ||||
|             match = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         if _doc_cls: | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|             except Exception as e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
| @@ -185,7 +259,7 @@ def update(_doc_cls=None, **update): | ||||
|             appended_sub_field = False | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     # Convert the S operator to $ | ||||
|                     if field == 'S': | ||||
|                         field = '$' | ||||
| @@ -206,6 +280,10 @@ def update(_doc_cls=None, **update): | ||||
|             else: | ||||
|                 field = cleaned_fields[-1] | ||||
|  | ||||
|             GeoJsonBaseField = _import_class('GeoJsonBaseField') | ||||
|             if isinstance(field, GeoJsonBaseField): | ||||
|                 value = field.to_mongo(value) | ||||
|  | ||||
|             if op in (None, 'set', 'push', 'pull'): | ||||
|                 if field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
| @@ -216,7 +294,7 @@ def update(_doc_cls=None, **update): | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|                 elif field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op == "unset": | ||||
|             elif op == 'unset': | ||||
|                 value = 1 | ||||
|  | ||||
|         if match: | ||||
| @@ -226,16 +304,16 @@ def update(_doc_cls=None, **update): | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if not op: | ||||
|             raise InvalidQueryError("Updates must supply an operation " | ||||
|                                     "eg: set__FIELD=value") | ||||
|             raise InvalidQueryError('Updates must supply an operation ' | ||||
|                                     'eg: set__FIELD=value') | ||||
|  | ||||
|         if 'pull' in op and '.' in key: | ||||
|             # Dot operators don't work on pull operations | ||||
|             # unless they point to a list field | ||||
|             # Otherwise it uses nested dict syntax | ||||
|             if op == 'pullAll': | ||||
|                 raise InvalidQueryError("pullAll operations only support " | ||||
|                                         "a single field depth") | ||||
|                 raise InvalidQueryError('pullAll operations only support ' | ||||
|                                         'a single field depth') | ||||
|  | ||||
|             # Look for the last list field and use dot notation until there | ||||
|             field_classes = [c.__class__ for c in cleaned_fields] | ||||
| @@ -244,8 +322,9 @@ def update(_doc_cls=None, **update): | ||||
|             if ListField in field_classes: | ||||
|                 # Join all fields via dot notation to the last ListField | ||||
|                 # Then process as normal | ||||
|                 last_listField = len(cleaned_fields) - field_classes.index(ListField) | ||||
|                 key = ".".join(parts[:last_listField]) | ||||
|                 last_listField = len( | ||||
|                     cleaned_fields) - field_classes.index(ListField) | ||||
|                 key = '.'.join(parts[:last_listField]) | ||||
|                 parts = parts[last_listField:] | ||||
|                 parts.insert(0, key) | ||||
|  | ||||
| @@ -253,7 +332,7 @@ def update(_doc_cls=None, **update): | ||||
|             for key in parts: | ||||
|                 value = {key: value} | ||||
|         elif op == 'addToSet' and isinstance(value, list): | ||||
|             value = {key: {"$each": value}} | ||||
|             value = {key: {'$each': value}} | ||||
|         else: | ||||
|             value = {key: value} | ||||
|         key = '$' + op | ||||
| @@ -267,73 +346,82 @@ def update(_doc_cls=None, **update): | ||||
|  | ||||
|  | ||||
| def _geo_operator(field, op, value): | ||||
|     """Helper to return the query for a given geo query""" | ||||
|     if field._geo_index == pymongo.GEO2D: | ||||
|         if op == "within_distance": | ||||
|     """Helper to return the query for a given geo query.""" | ||||
|     if op == 'max_distance': | ||||
|         value = {'$maxDistance': value} | ||||
|     elif op == 'min_distance': | ||||
|         value = {'$minDistance': value} | ||||
|     elif field._geo_index == pymongo.GEO2D: | ||||
|         if op == 'within_distance': | ||||
|             value = {'$within': {'$center': value}} | ||||
|         elif op == "within_spherical_distance": | ||||
|         elif op == 'within_spherical_distance': | ||||
|             value = {'$within': {'$centerSphere': value}} | ||||
|         elif op == "within_polygon": | ||||
|         elif op == 'within_polygon': | ||||
|             value = {'$within': {'$polygon': value}} | ||||
|         elif op == "near": | ||||
|         elif op == 'near': | ||||
|             value = {'$near': value} | ||||
|         elif op == "near_sphere": | ||||
|         elif op == 'near_sphere': | ||||
|             value = {'$nearSphere': value} | ||||
|         elif op == 'within_box': | ||||
|             value = {'$within': {'$box': value}} | ||||
|         elif op == "max_distance": | ||||
|             value = {'$maxDistance': value} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a GeoPointField" % op) | ||||
|             raise NotImplementedError('Geo method "%s" has not been ' | ||||
|                                       'implemented for a GeoPointField' % op) | ||||
|     else: | ||||
|         if op == "geo_within": | ||||
|             value = {"$geoWithin": _infer_geometry(value)} | ||||
|         elif op == "geo_within_box": | ||||
|             value = {"$geoWithin": {"$box": value}} | ||||
|         elif op == "geo_within_polygon": | ||||
|             value = {"$geoWithin": {"$polygon": value}} | ||||
|         elif op == "geo_within_center": | ||||
|             value = {"$geoWithin": {"$center": value}} | ||||
|         elif op == "geo_within_sphere": | ||||
|             value = {"$geoWithin": {"$centerSphere": value}} | ||||
|         elif op == "geo_intersects": | ||||
|             value = {"$geoIntersects": _infer_geometry(value)} | ||||
|         elif op == "near": | ||||
|         if op == 'geo_within': | ||||
|             value = {'$geoWithin': _infer_geometry(value)} | ||||
|         elif op == 'geo_within_box': | ||||
|             value = {'$geoWithin': {'$box': value}} | ||||
|         elif op == 'geo_within_polygon': | ||||
|             value = {'$geoWithin': {'$polygon': value}} | ||||
|         elif op == 'geo_within_center': | ||||
|             value = {'$geoWithin': {'$center': value}} | ||||
|         elif op == 'geo_within_sphere': | ||||
|             value = {'$geoWithin': {'$centerSphere': value}} | ||||
|         elif op == 'geo_intersects': | ||||
|             value = {'$geoIntersects': _infer_geometry(value)} | ||||
|         elif op == 'near': | ||||
|             value = {'$near': _infer_geometry(value)} | ||||
|         elif op == "max_distance": | ||||
|             value = {'$maxDistance': value} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a %s " % (op, field._name)) | ||||
|             raise NotImplementedError( | ||||
|                 'Geo method "%s" has not been implemented for a %s ' | ||||
|                 % (op, field._name) | ||||
|             ) | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def _infer_geometry(value): | ||||
|     """Helper method that tries to infer the $geometry shape for a given value""" | ||||
|     """Helper method that tries to infer the $geometry shape for a | ||||
|     given value. | ||||
|     """ | ||||
|     if isinstance(value, dict): | ||||
|         if "$geometry" in value: | ||||
|         if '$geometry' in value: | ||||
|             return value | ||||
|         elif 'coordinates' in value and 'type' in value: | ||||
|             return {"$geometry": value} | ||||
|         raise InvalidQueryError("Invalid $geometry dictionary should have " | ||||
|                                 "type and coordinates keys") | ||||
|             return {'$geometry': value} | ||||
|         raise InvalidQueryError('Invalid $geometry dictionary should have ' | ||||
|                                 'type and coordinates keys') | ||||
|     elif isinstance(value, (list, set)): | ||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||
|         # TODO: should both TypeError and IndexError be alike interpreted? | ||||
|  | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||
|         except: | ||||
|             pass | ||||
|         try: | ||||
|             value[0][0] | ||||
|             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||
|         except: | ||||
|             pass | ||||
|         try: | ||||
|             value[0] | ||||
|             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||
|         except: | ||||
|             return {'$geometry': {'type': 'Polygon', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|     raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " | ||||
|                             "or (nested) lists of coordinate(s)") | ||||
|         try: | ||||
|             value[0][0] | ||||
|             return {'$geometry': {'type': 'LineString', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             value[0] | ||||
|             return {'$geometry': {'type': 'Point', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|     raise InvalidQueryError('Invalid $geometry data. Can be either a ' | ||||
|                             'dictionary or (nested) lists of coordinate(s)') | ||||
|   | ||||
| @@ -1,8 +1,6 @@ | ||||
| import copy | ||||
|  | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.python_support import product, reduce | ||||
|  | ||||
| from mongoengine.queryset import transform | ||||
|  | ||||
| __all__ = ('Q',) | ||||
| @@ -28,7 +26,7 @@ class DuplicateQueryConditionsError(InvalidQueryError): | ||||
|  | ||||
|  | ||||
| class SimplificationVisitor(QNodeVisitor): | ||||
|     """Simplifies query trees by combinging unnecessary 'and' connection nodes | ||||
|     """Simplifies query trees by combining unnecessary 'and' connection nodes | ||||
|     into a single Q-object. | ||||
|     """ | ||||
|  | ||||
| @@ -71,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|         self.document = document | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         operator = "$and" | ||||
|         operator = '$and' | ||||
|         if combination.operation == combination.OR: | ||||
|             operator = "$or" | ||||
|             operator = '$or' | ||||
|         return {operator: combination.children} | ||||
|  | ||||
|     def visit_query(self, query): | ||||
| @@ -81,8 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|  | ||||
|  | ||||
| class QNode(object): | ||||
|     """Base class for nodes in query trees. | ||||
|     """ | ||||
|     """Base class for nodes in query trees.""" | ||||
|  | ||||
|     AND = 0 | ||||
|     OR = 1 | ||||
| @@ -96,7 +93,8 @@ class QNode(object): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def _combine(self, other, operation): | ||||
|         """Combine this node with another node into a QCombination object. | ||||
|         """Combine this node with another node into a QCombination | ||||
|         object. | ||||
|         """ | ||||
|         if getattr(other, 'empty', True): | ||||
|             return self | ||||
| @@ -118,8 +116,8 @@ class QNode(object): | ||||
|  | ||||
|  | ||||
| class QCombination(QNode): | ||||
|     """Represents the combination of several conditions by a given logical | ||||
|     operator. | ||||
|     """Represents the combination of several conditions by a given | ||||
|     logical operator. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, operation, children): | ||||
|   | ||||
| @@ -1,11 +1,10 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete'] | ||||
| __all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete') | ||||
|  | ||||
| signals_available = False | ||||
| try: | ||||
|     from blinker import Namespace | ||||
|  | ||||
|     signals_available = True | ||||
| except ImportError: | ||||
|     class Namespace(object): | ||||
| @@ -27,11 +26,13 @@ except ImportError: | ||||
|             raise RuntimeError('signalling support is unavailable ' | ||||
|                                'because the blinker library is ' | ||||
|                                'not installed.') | ||||
|         send = lambda *a, **kw: None | ||||
|  | ||||
|         send = lambda *a, **kw: None  # noqa | ||||
|         connect = disconnect = has_receivers_for = receivers_for = \ | ||||
|             temporarily_connected_to = _fail | ||||
|         del _fail | ||||
|  | ||||
|  | ||||
| # the namespace for code signals.  If you are not mongoengine code, do | ||||
| # not put signals in here.  Create your own namespace instead. | ||||
| _signals = Namespace() | ||||
|   | ||||
| @@ -5,7 +5,7 @@ | ||||
| %define srcname mongoengine | ||||
|  | ||||
| Name:           python-%{srcname} | ||||
| Version:        0.8.4 | ||||
| Version:        0.8.7 | ||||
| Release:        1%{?dist} | ||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | ||||
|  | ||||
|   | ||||
| @@ -1 +1,7 @@ | ||||
| pymongo | ||||
| nose | ||||
| pymongo>=2.7.1 | ||||
| six==1.10.0 | ||||
| flake8 | ||||
| flake8-import-order | ||||
| Sphinx==1.5.5 | ||||
| sphinx-rtd-theme==0.2.4 | ||||
|   | ||||
							
								
								
									
										18
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,11 +1,11 @@ | ||||
| [nosetests] | ||||
| verbosity = 3 | ||||
| verbosity=2 | ||||
| detailed-errors=1 | ||||
| #with-coverage = 1 | ||||
| #cover-erase = 1 | ||||
| #cover-html = 1 | ||||
| #cover-html-dir = ../htmlcov | ||||
| #cover-package = mongoengine | ||||
| py3where = build | ||||
| where = tests | ||||
| #tests =  document/__init__.py | ||||
| tests=tests | ||||
| cover-package=mongoengine | ||||
|  | ||||
| [flake8] | ||||
| ignore=E501,F401,F403,F405,I201 | ||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||
| max-complexity=47 | ||||
| application-import-names=mongoengine,tests | ||||
|   | ||||
							
								
								
									
										47
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										47
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,6 +1,6 @@ | ||||
| import os | ||||
| import sys | ||||
| from setuptools import setup, find_packages | ||||
| from setuptools import find_packages, setup | ||||
|  | ||||
| # Hack to silence atexit traceback in newer python versions | ||||
| try: | ||||
| @@ -8,20 +8,25 @@ try: | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
| DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ | ||||
| DESCRIPTION = ( | ||||
|     'MongoEngine is a Python Object-Document ' | ||||
|     'Mapper for working with MongoDB.' | ||||
| LONG_DESCRIPTION = None | ||||
| ) | ||||
|  | ||||
| try: | ||||
|     LONG_DESCRIPTION = open('README.rst').read() | ||||
| except: | ||||
|     pass | ||||
|     with open('README.rst') as fin: | ||||
|         LONG_DESCRIPTION = fin.read() | ||||
| except Exception: | ||||
|     LONG_DESCRIPTION = None | ||||
|  | ||||
|  | ||||
| def get_version(version_tuple): | ||||
|     if not isinstance(version_tuple[-1], int): | ||||
|         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] | ||||
|     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return '.'.join(map(str, version_tuple)) | ||||
|  | ||||
|  | ||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||
| # file is read | ||||
| @@ -29,7 +34,6 @@ init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||
| version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | ||||
|  | ||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | ||||
| print(VERSION) | ||||
|  | ||||
| CLASSIFIERS = [ | ||||
|     'Development Status :: 4 - Beta', | ||||
| @@ -38,27 +42,32 @@ CLASSIFIERS = [ | ||||
|     'Operating System :: OS Independent', | ||||
|     'Programming Language :: Python', | ||||
|     "Programming Language :: Python :: 2", | ||||
|     "Programming Language :: Python :: 2.6", | ||||
|     "Programming Language :: Python :: 2.7", | ||||
|     "Programming Language :: Python :: 3", | ||||
|     "Programming Language :: Python :: 3.1", | ||||
|     "Programming Language :: Python :: 3.2", | ||||
|     "Programming Language :: Python :: 3.3", | ||||
|     "Programming Language :: Python :: 3.4", | ||||
|     "Programming Language :: Python :: 3.5", | ||||
|     "Programming Language :: Python :: Implementation :: CPython", | ||||
|     "Programming Language :: Python :: Implementation :: PyPy", | ||||
|     'Topic :: Database', | ||||
|     'Topic :: Software Development :: Libraries :: Python Modules', | ||||
| ] | ||||
|  | ||||
| extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} | ||||
| extra_opts = { | ||||
|     'packages': find_packages(exclude=['tests', 'tests.*']), | ||||
|     'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] | ||||
| } | ||||
| if sys.version_info[0] == 3: | ||||
|     extra_opts['use_2to3'] = True | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] | ||||
|     if "test" in sys.argv or "nosetests" in sys.argv: | ||||
|     if 'test' in sys.argv or 'nosetests' in sys.argv: | ||||
|         extra_opts['packages'] = find_packages() | ||||
|         extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} | ||||
|         extra_opts['package_data'] = { | ||||
|             'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} | ||||
| else: | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil'] | ||||
|     extra_opts['tests_require'] += ['python-dateutil'] | ||||
|  | ||||
| setup(name='mongoengine', | ||||
| setup( | ||||
|     name='mongoengine', | ||||
|     version=VERSION, | ||||
|     author='Harry Marr', | ||||
|     author_email='harry.marr@{nospam}gmail.com', | ||||
| @@ -72,7 +81,7 @@ setup(name='mongoengine', | ||||
|     long_description=LONG_DESCRIPTION, | ||||
|     platforms=['any'], | ||||
|     classifiers=CLASSIFIERS, | ||||
|       install_requires=['pymongo>=2.5'], | ||||
|     install_requires=['pymongo>=2.7.1', 'six'], | ||||
|     test_suite='nose.collector', | ||||
|     **extra_opts | ||||
| ) | ||||
|   | ||||
| @@ -2,4 +2,3 @@ from all_warnings import AllWarnings | ||||
| from document import * | ||||
| from queryset import * | ||||
| from fields import * | ||||
| from migration import * | ||||
|   | ||||
| @@ -3,8 +3,6 @@ This test has been put into a module.  This is because it tests warnings that | ||||
| only get triggered on first hit.  This way we can ensure its imported into the | ||||
| top level and called first by the test suite. | ||||
| """ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from class_methods import * | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -36,9 +34,9 @@ class ClassMethodsTest(unittest.TestCase): | ||||
|     def test_definition(self): | ||||
|         """Ensure that document may be defined using fields. | ||||
|         """ | ||||
|         self.assertEqual(['age', 'id', 'name'], | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name'], | ||||
|                          sorted(self.Person._fields.keys())) | ||||
|         self.assertEqual(["IntField", "ObjectIdField", "StringField"], | ||||
|         self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"], | ||||
|                         sorted([x.__class__.__name__ for x in | ||||
|                                 self.Person._fields.values()])) | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from bson import SON | ||||
| @@ -93,6 +91,7 @@ class DeltaTest(unittest.TestCase): | ||||
|     def delta_recursive(self, DocClass, EmbeddedClass): | ||||
|  | ||||
|         class Embedded(EmbeddedClass): | ||||
|             id = StringField() | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
| @@ -114,6 +113,7 @@ class DeltaTest(unittest.TestCase): | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.id = "010101" | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
| @@ -123,6 +123,7 @@ class DeltaTest(unittest.TestCase): | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'id': "010101", | ||||
|             'string_field': 'hello', | ||||
|             'int_field': 1, | ||||
|             'dict_field': {'hello': 'world'}, | ||||
| @@ -207,22 +208,21 @@ class DeltaTest(unittest.TestCase): | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|             '_cls': 'Embedded', | ||||
|             'string_field': 'hello world', | ||||
|             'int_field': 1, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'dict_field': {'hello': 'world'}}]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                          ['embedded_field.list_field.2']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello world', | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'dict_field': {'hello': 'world'}} | ||||
|             ]}, {})) | ||||
|             }, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello world', | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'dict_field': {'hello': 'world'}} | ||||
|             }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
| @@ -251,13 +251,13 @@ class DeltaTest(unittest.TestCase): | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|                          [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||
|                          ({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field) | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||
|  | ||||
| @@ -548,22 +548,21 @@ class DeltaTest(unittest.TestCase): | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'db_list_field': ['1', 2, { | ||||
|             ['db_embedded_field.db_list_field.2']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': { | ||||
|             '_cls': 'Embedded', | ||||
|             'db_string_field': 'hello world', | ||||
|             'db_int_field': 1, | ||||
|             'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'db_dict_field': {'hello': 'world'}}]}, {})) | ||||
|             'db_dict_field': {'hello': 'world'}}}, {})) | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'db_embedded_field.db_list_field': ['1', 2, { | ||||
|             'db_embedded_field.db_list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello world', | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'db_dict_field': {'hello': 'world'}} | ||||
|             ]}, {})) | ||||
|             }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
| @@ -592,14 +591,13 @@ class DeltaTest(unittest.TestCase): | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|             [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [1, 2, {}]}, {})) | ||||
|             ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field) | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         self.assertEqual(doc._delta(), ({}, | ||||
|             {'db_embedded_field.db_list_field.2.db_list_field': 1})) | ||||
|  | ||||
| @@ -615,7 +613,7 @@ class DeltaTest(unittest.TestCase): | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
|         p.doc = 123 | ||||
|         del(p.doc) | ||||
|         del p.doc | ||||
|         self.assertEqual(p._delta(), ( | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
| @@ -713,6 +711,157 @@ class DeltaTest(unittest.TestCase): | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertTrue('employees' in updates) | ||||
|  | ||||
|     def test_nested_nested_fields_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||
|             name = StringField() | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs['a']['b'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_lower_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc().save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         mydoc.subs['a'] = EmbeddedDoc() | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         subdoc = mydoc.subs['a'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_upper_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs['a'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         mydoc.subs['a'] = EmbeddedDoc() | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_referenced_object_changed_attributes(self): | ||||
|         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||
|  | ||||
|         class Organization(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|             org = ReferenceField('Organization', required=True) | ||||
|  | ||||
|         Organization.drop_collection() | ||||
|         User.drop_collection() | ||||
|  | ||||
|         org1 = Organization(name='Org 1') | ||||
|         org1.save() | ||||
|  | ||||
|         org2 = Organization(name='Org 2') | ||||
|         org2.save() | ||||
|  | ||||
|         user = User(name='Fred', org=org1) | ||||
|         user.save() | ||||
|  | ||||
|         org1.reload() | ||||
|         org2.reload() | ||||
|         user.reload() | ||||
|         self.assertEqual(org1.name, 'Org 1') | ||||
|         self.assertEqual(org2.name, 'Org 2') | ||||
|         self.assertEqual(user.name, 'Fred') | ||||
|  | ||||
|         user.name = 'Harold' | ||||
|         user.org = org2 | ||||
|  | ||||
|         org2.name = 'New Org 2' | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|  | ||||
|         user.save() | ||||
|         org2.save() | ||||
|  | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|         org2.reload() | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|  | ||||
|     def test_delta_for_nested_map_fields(self): | ||||
|         class UInfoDocument(Document): | ||||
|             phone = StringField() | ||||
|  | ||||
|         class EmbeddedRole(EmbeddedDocument): | ||||
|             type = StringField() | ||||
|  | ||||
|         class EmbeddedUser(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             info = ReferenceField(UInfoDocument) | ||||
|  | ||||
|         class Doc(Document): | ||||
|             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) | ||||
|             num = IntField(default=-1) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc = Doc(num=1) | ||||
|         doc.users["007"] = EmbeddedUser(name="Agent007") | ||||
|         doc.save() | ||||
|  | ||||
|         uinfo = UInfoDocument(phone="79089269066") | ||||
|         uinfo.save() | ||||
|  | ||||
|         d = Doc.objects(num=1).first() | ||||
|         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") | ||||
|         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) | ||||
|         d.users["007"]["info"] = uinfo | ||||
|         delta = d._delta() | ||||
|         self.assertEqual(True, "users.007.roles.666" in delta[0]) | ||||
|         self.assertEqual(True, "users.007.rolist" in delta[0]) | ||||
|         self.assertEqual(True, "users.007.info" in delta[0]) | ||||
|         self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"]) | ||||
|         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) | ||||
|         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| import unittest | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -72,7 +70,7 @@ class DynamicTest(unittest.TestCase): | ||||
|         obj = collection.find_one() | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) | ||||
|  | ||||
|         del(p.misc) | ||||
|         del p.misc | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
| @@ -81,6 +79,25 @@ class DynamicTest(unittest.TestCase): | ||||
|         obj = collection.find_one() | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) | ||||
|  | ||||
|     def test_reload_after_unsetting(self): | ||||
|         p = self.Person() | ||||
|         p.misc = 22 | ||||
|         p.save() | ||||
|         p.update(unset__misc=1) | ||||
|         p.reload() | ||||
|  | ||||
|     def test_reload_dynamic_field(self): | ||||
|         self.Person.objects.delete() | ||||
|         p = self.Person.objects.create() | ||||
|         p.update(age=1) | ||||
|  | ||||
|         self.assertEqual(len(p._data), 3) | ||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) | ||||
|  | ||||
|         p.reload() | ||||
|         self.assertEqual(len(p._data), 4) | ||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) | ||||
|  | ||||
|     def test_dynamic_document_queries(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
|         p = self.Person() | ||||
| @@ -122,6 +139,13 @@ class DynamicTest(unittest.TestCase): | ||||
|  | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||
|  | ||||
|     def test_three_level_complex_data_lookups(self): | ||||
|         """Ensure you can query three level document dynamic fields""" | ||||
|         p = self.Person.objects.create( | ||||
|             misc={'hello': {'hello2': 'world'}} | ||||
|         ) | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) | ||||
|  | ||||
|     def test_complex_embedded_document_validation(self): | ||||
|         """Ensure embedded dynamic documents may be validated""" | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
| @@ -292,6 +316,58 @@ class DynamicTest(unittest.TestCase): | ||||
|         person.save() | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
|  | ||||
|     def test_dynamic_embedded_works_with_only(self): | ||||
|         """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" | ||||
|  | ||||
|         class Address(DynamicEmbeddedDocument): | ||||
|             city = StringField() | ||||
|  | ||||
|         class Person(DynamicDocument): | ||||
|             address = EmbeddedDocumentField(Address) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.street_number, '1337') | ||||
|         self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337') | ||||
|  | ||||
|     def test_dynamic_and_embedded_dict_access(self): | ||||
|         """Ensure embedded dynamic documents work with dict[] style access""" | ||||
|  | ||||
|         class Address(EmbeddedDocument): | ||||
|             city = StringField() | ||||
|  | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(name="Ross", address=Address(city="London")).save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.attrval = "This works" | ||||
|  | ||||
|         person["phone"] = "555-1212"  # but this should too | ||||
|  | ||||
|         # Same thing two levels deep | ||||
|         person["address"]["city"] = "Lundenne" | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().phone, "555-1212") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.address = Address(city="Londinium") | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person["age"] = 35 | ||||
|         person.save() | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,16 +1,15 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import os | ||||
| import pymongo | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from datetime import datetime | ||||
| import pymongo | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db, get_connection | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| from tests.utils import get_mongodb_version, needs_mongodb_v26 | ||||
|  | ||||
| __all__ = ("IndexesTest", ) | ||||
|  | ||||
| @@ -18,7 +17,7 @@ __all__ = ("IndexesTest", ) | ||||
| class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.connection = connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
| @@ -32,10 +31,7 @@ class IndexesTest(unittest.TestCase): | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|         self.connection.drop_database(self.db) | ||||
|  | ||||
|     def test_indexes_document(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
| @@ -175,6 +171,16 @@ class IndexesTest(unittest.TestCase): | ||||
|         info = A._get_collection().index_information() | ||||
|         self.assertEqual(len(info.keys()), 2) | ||||
|  | ||||
|         class B(A): | ||||
|             c = StringField() | ||||
|             d = StringField() | ||||
|             meta = { | ||||
|                 'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}], | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|         self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields']) | ||||
|         self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields']) | ||||
|  | ||||
|     def test_build_index_spec_is_not_destructive(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
| @@ -265,6 +271,60 @@ class IndexesTest(unittest.TestCase): | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('current.location.point', '2d')] in info) | ||||
|  | ||||
|     def test_explicit_geosphere_index(self): | ||||
|         """Ensure that geosphere indexes work when created via meta[indexes] | ||||
|         """ | ||||
|         class Place(Document): | ||||
|             location = DictField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': [ | ||||
|                     '(location.point', | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([{'fields': [('location.point', '2dsphere')]}], | ||||
|                          Place._meta['index_specs']) | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('location.point', '2dsphere')] in info) | ||||
|  | ||||
|     def test_explicit_geohaystack_index(self): | ||||
|         """Ensure that geohaystack indexes work when created via meta[indexes] | ||||
|         """ | ||||
|         raise SkipTest('GeoHaystack index creation is not supported for now' | ||||
|                        'from meta, as it requires a bucketSize parameter.') | ||||
|  | ||||
|         class Place(Document): | ||||
|             location = DictField() | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     (')location.point', 'name') | ||||
|                 ] | ||||
|             } | ||||
|         self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}], | ||||
|                          Place._meta['index_specs']) | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('location.point', 'geoHaystack')] in info) | ||||
|  | ||||
|     def test_create_geohaystack_index(self): | ||||
|         """Ensure that geohaystack indexes can be created | ||||
|         """ | ||||
|         class Place(Document): | ||||
|             location = DictField() | ||||
|             name = StringField() | ||||
|  | ||||
|         Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) | ||||
|  | ||||
|     def test_dictionary_indexes(self): | ||||
|         """Ensure that indexes are used when meta[indexes] contains | ||||
|         dictionaries instead of lists. | ||||
| @@ -352,7 +412,6 @@ class IndexesTest(unittest.TestCase): | ||||
|         User.ensure_indexes() | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) | ||||
|         User.drop_collection() | ||||
|  | ||||
|     def test_embedded_document_index(self): | ||||
|         """Tests settings an index on an embedded document | ||||
| @@ -374,7 +433,6 @@ class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1']) | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_list_embedded_document_index(self): | ||||
|         """Ensure list embedded documents can be indexed | ||||
| @@ -401,7 +459,6 @@ class IndexesTest(unittest.TestCase): | ||||
|         post1 = BlogPost(title="Embedded Indexes tests in place", | ||||
|                          tags=[Tag(name="about"), Tag(name="time")]) | ||||
|         post1.save() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_recursive_embedded_objects_dont_break_indexes(self): | ||||
|  | ||||
| @@ -422,6 +479,7 @@ class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|         class Test(Document): | ||||
|             a = IntField() | ||||
|             b = IntField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': ['a'], | ||||
| @@ -433,16 +491,35 @@ class IndexesTest(unittest.TestCase): | ||||
|         obj = Test(a=1) | ||||
|         obj.save() | ||||
|  | ||||
|         IS_MONGODB_3 = get_mongodb_version()[0] >= 3 | ||||
|  | ||||
|         # Need to be explicit about covered indexes as mongoDB doesn't know if | ||||
|         # the documents returned might have more keys in that here. | ||||
|         query_plan = Test.objects(id=obj.id).exclude('a').explain() | ||||
|         if not IS_MONGODB_3: | ||||
|             self.assertFalse(query_plan['indexOnly']) | ||||
|         else: | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') | ||||
|  | ||||
|         query_plan = Test.objects(id=obj.id).only('id').explain() | ||||
|         if not IS_MONGODB_3: | ||||
|             self.assertTrue(query_plan['indexOnly']) | ||||
|         else: | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') | ||||
|  | ||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||
|         if not IS_MONGODB_3: | ||||
|             self.assertTrue(query_plan['indexOnly']) | ||||
|         else: | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION') | ||||
|  | ||||
|         query_plan = Test.objects(a=1).explain() | ||||
|         if not IS_MONGODB_3: | ||||
|             self.assertFalse(query_plan['indexOnly']) | ||||
|         else: | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') | ||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH') | ||||
|  | ||||
|     def test_index_on_id(self): | ||||
|  | ||||
| @@ -475,23 +552,29 @@ class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         for i in xrange(0, 10): | ||||
|             tags = [("tag %i" % n) for n in xrange(0, i % 2)] | ||||
|         for i in range(0, 10): | ||||
|             tags = [("tag %i" % n) for n in range(0, i % 2)] | ||||
|             BlogPost(tags=tags).save() | ||||
|  | ||||
|         self.assertEqual(BlogPost.objects.count(), 10) | ||||
|         self.assertEqual(BlogPost.objects.hint().count(), 10) | ||||
|  | ||||
|         # PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions | ||||
|         if pymongo.version != '3.0': | ||||
|             self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) | ||||
|  | ||||
|             self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) | ||||
|  | ||||
|         if pymongo.version >= '2.8': | ||||
|             self.assertEqual(BlogPost.objects.hint('tags').count(), 10) | ||||
|         else: | ||||
|             def invalid_index(): | ||||
|             BlogPost.objects.hint('tags') | ||||
|                 BlogPost.objects.hint('tags').next() | ||||
|             self.assertRaises(TypeError, invalid_index) | ||||
|  | ||||
|         def invalid_index_2(): | ||||
|             return BlogPost.objects.hint(('tags', 1)) | ||||
|         self.assertRaises(TypeError, invalid_index_2) | ||||
|             return BlogPost.objects.hint(('tags', 1)).next() | ||||
|         self.assertRaises(Exception, invalid_index_2) | ||||
|  | ||||
|     def test_unique(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields. | ||||
| @@ -536,8 +619,6 @@ class IndexesTest(unittest.TestCase): | ||||
|         post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') | ||||
|         self.assertRaises(OperationError, post3.save) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_unique_embedded_document(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on embedded documents. | ||||
|         """ | ||||
| @@ -565,8 +646,36 @@ class IndexesTest(unittest.TestCase): | ||||
|                          sub=SubDocument(year=2010, slug='test')) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|  | ||||
|     def test_unique_embedded_document_in_list(self): | ||||
|         """ | ||||
|         Ensure that the uniqueness constraints are applied to fields in | ||||
|         embedded documents, even when the embedded documents in in a | ||||
|         list field. | ||||
|         """ | ||||
|         class SubDocument(EmbeddedDocument): | ||||
|             year = IntField(db_field='yr') | ||||
|             slug = StringField(unique=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             subs = ListField(EmbeddedDocumentField(SubDocument)) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost( | ||||
|             title='test1', subs=[ | ||||
|                 SubDocument(year=2009, slug='conflict'), | ||||
|                 SubDocument(year=2009, slug='conflict') | ||||
|             ] | ||||
|         ) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost( | ||||
|             title='test2', subs=[SubDocument(year=2014, slug='conflict')] | ||||
|         ) | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|  | ||||
|     def test_unique_with_embedded_document_and_embedded_unique(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on | ||||
|         embedded documents.  And work with unique_with as well. | ||||
| @@ -600,8 +709,6 @@ class IndexesTest(unittest.TestCase): | ||||
|                          sub=SubDocument(year=2009, slug='test-1')) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_ttl_indexes(self): | ||||
|  | ||||
|         class Log(Document): | ||||
| @@ -614,14 +721,6 @@ class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3: | ||||
|             raise SkipTest('pymongo needs to be 2.3 or higher for this test') | ||||
|  | ||||
|         connection = get_connection() | ||||
|         version_array = connection.server_info()['versionArray'] | ||||
|         if version_array[0] < 2 and version_array[1] < 2: | ||||
|             raise SkipTest('MongoDB needs to be 2.2 or higher for this test') | ||||
|  | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(Log.objects) | ||||
|         info = Log.objects._collection.index_information() | ||||
| @@ -649,13 +748,11 @@ class IndexesTest(unittest.TestCase): | ||||
|             raise AssertionError("We saved a dupe!") | ||||
|         except NotUniqueError: | ||||
|             pass | ||||
|         Customer.drop_collection() | ||||
|  | ||||
|     def test_unique_and_primary(self): | ||||
|         """If you set a field as primary, then unexpected behaviour can occur. | ||||
|         You won't create a duplicate but you will update an existing document. | ||||
|         """ | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField(primary_key=True, unique=True) | ||||
|             password = StringField() | ||||
| @@ -671,8 +768,23 @@ class IndexesTest(unittest.TestCase): | ||||
|         self.assertEqual(User.objects.count(), 1) | ||||
|         self.assertEqual(User.objects.get().password, 'secret2') | ||||
|  | ||||
|     def test_unique_and_primary_create(self): | ||||
|         """Create a new record with a duplicate primary key | ||||
|         throws an exception | ||||
|         """ | ||||
|         class User(Document): | ||||
|             name = StringField(primary_key=True) | ||||
|             password = StringField() | ||||
|  | ||||
|         User.drop_collection() | ||||
|  | ||||
|         User.objects.create(name='huangz', password='secret') | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|             User.objects.create(name='huangz', password='secret2') | ||||
|  | ||||
|         self.assertEqual(User.objects.count(), 1) | ||||
|         self.assertEqual(User.objects.get().password, 'secret') | ||||
|  | ||||
|     def test_index_with_pk(self): | ||||
|         """Ensure you can use `pk` as part of a query""" | ||||
|  | ||||
| @@ -699,33 +811,210 @@ class IndexesTest(unittest.TestCase): | ||||
|             name = StringField(required=True) | ||||
|             term = StringField(required=True) | ||||
|  | ||||
|         class Report(Document): | ||||
|         class ReportEmbedded(Document): | ||||
|             key = EmbeddedDocumentField(CompoundKey, primary_key=True) | ||||
|             text = StringField() | ||||
|  | ||||
|         Report.drop_collection() | ||||
|  | ||||
|         my_key = CompoundKey(name="n", term="ok") | ||||
|         report = Report(text="OK", key=my_key).save() | ||||
|         report = ReportEmbedded(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, | ||||
|                          report.to_mongo()) | ||||
|         self.assertEqual(report, Report.objects.get(pk=my_key)) | ||||
|         self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) | ||||
|  | ||||
|     def test_compound_key_dictfield(self): | ||||
|  | ||||
|         class Report(Document): | ||||
|         class ReportDictField(Document): | ||||
|             key = DictField(primary_key=True) | ||||
|             text = StringField() | ||||
|  | ||||
|         Report.drop_collection() | ||||
|  | ||||
|         my_key = {"name": "n", "term": "ok"} | ||||
|         report = Report(text="OK", key=my_key).save() | ||||
|         report = ReportDictField(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, | ||||
|                          report.to_mongo()) | ||||
|         self.assertEqual(report, Report.objects.get(pk=my_key)) | ||||
|  | ||||
|         # We can't directly call ReportDictField.objects.get(pk=my_key), | ||||
|         # because dicts are unordered, and if the order in MongoDB is | ||||
|         # different than the one in `my_key`, this test will fail. | ||||
|         self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name'])) | ||||
|         self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term'])) | ||||
|  | ||||
|     def test_string_indexes(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             provider_ids = DictField() | ||||
|             meta = { | ||||
|                 "indexes": ["provider_ids.foo", "provider_ids.bar"], | ||||
|             } | ||||
|  | ||||
|         info = MyDoc.objects._collection.index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('provider_ids.foo', 1)] in info) | ||||
|         self.assertTrue([('provider_ids.bar', 1)] in info) | ||||
|  | ||||
|     def test_sparse_compound_indexes(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             provider_ids = DictField() | ||||
|             meta = { | ||||
|                 "indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"), | ||||
|                              'sparse': True}], | ||||
|             } | ||||
|  | ||||
|         info = MyDoc.objects._collection.index_information() | ||||
|         self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)], | ||||
|                          info['provider_ids.foo_1_provider_ids.bar_1']['key']) | ||||
|         self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) | ||||
|  | ||||
|     @needs_mongodb_v26 | ||||
|     def test_text_indexes(self): | ||||
|         class Book(Document): | ||||
|             title = DictField() | ||||
|             meta = { | ||||
|                 "indexes": ["$title"], | ||||
|             } | ||||
|  | ||||
|         indexes = Book.objects._collection.index_information() | ||||
|         self.assertTrue("title_text" in indexes) | ||||
|         key = indexes["title_text"]["key"] | ||||
|         self.assertTrue(('_fts', 'text') in key) | ||||
|  | ||||
|     def test_hashed_indexes(self): | ||||
|  | ||||
|         class Book(Document): | ||||
|             ref_id = StringField() | ||||
|             meta = { | ||||
|                 "indexes": ["#ref_id"], | ||||
|             } | ||||
|  | ||||
|         indexes = Book.objects._collection.index_information() | ||||
|         self.assertTrue("ref_id_hashed" in indexes) | ||||
|         self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"]) | ||||
|  | ||||
|     def test_indexes_after_database_drop(self): | ||||
|         """ | ||||
|         Test to ensure that indexes are re-created on a collection even | ||||
|         after the database has been dropped. | ||||
|  | ||||
|         Issue #812 | ||||
|         """ | ||||
|         # Use a new connection and database since dropping the database could | ||||
|         # cause concurrent tests to fail. | ||||
|         connection = connect(db='tempdatabase', | ||||
|                              alias='test_indexes_after_database_drop') | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             slug = StringField(unique=True) | ||||
|  | ||||
|             meta = {'db_alias': 'test_indexes_after_database_drop'} | ||||
|  | ||||
|         try: | ||||
|             BlogPost.drop_collection() | ||||
|  | ||||
|             # Create Post #1 | ||||
|             post1 = BlogPost(title='test1', slug='test') | ||||
|             post1.save() | ||||
|  | ||||
|             # Drop the Database | ||||
|             connection.drop_database('tempdatabase') | ||||
|  | ||||
|             # Re-create Post #1 | ||||
|             post1 = BlogPost(title='test1', slug='test') | ||||
|             post1.save() | ||||
|  | ||||
|             # Create Post #2 | ||||
|             post2 = BlogPost(title='test2', slug='test') | ||||
|             self.assertRaises(NotUniqueError, post2.save) | ||||
|         finally: | ||||
|             # Drop the temporary database at the end | ||||
|             connection.drop_database('tempdatabase') | ||||
|  | ||||
|  | ||||
|     def test_index_dont_send_cls_option(self): | ||||
|         """ | ||||
|         Ensure that 'cls' option is not sent through ensureIndex. We shouldn't | ||||
|         send internal MongoEngine arguments that are not a part of the index | ||||
|         spec. | ||||
|  | ||||
|         This is directly related to the fact that MongoDB doesn't validate the | ||||
|         options that are passed to ensureIndex. For more details, see: | ||||
|         https://jira.mongodb.org/browse/SERVER-769 | ||||
|         """ | ||||
|         class TestDoc(Document): | ||||
|             txt = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': [ | ||||
|                     {'fields': ('txt',), 'cls': False} | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         class TestChildDoc(TestDoc): | ||||
|             txt2 = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     {'fields': ('txt2',), 'cls': False} | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         TestDoc.drop_collection() | ||||
|         TestDoc.ensure_indexes() | ||||
|         TestChildDoc.ensure_indexes() | ||||
|  | ||||
|         index_info = TestDoc._get_collection().index_information() | ||||
|         for key in index_info: | ||||
|             del index_info[key]['v']  # drop the index version - we don't care about that here | ||||
|             if 'ns' in index_info[key]: | ||||
|                 del index_info[key]['ns']  # drop the index namespace - we don't care about that here, MongoDB 3+ | ||||
|             if 'dropDups' in index_info[key]: | ||||
|                 del index_info[key]['dropDups']  # drop the index dropDups - it is deprecated in MongoDB 3+ | ||||
|  | ||||
|         self.assertEqual(index_info, { | ||||
|             'txt_1': { | ||||
|                 'key': [('txt', 1)], | ||||
|                 'background': False | ||||
|             }, | ||||
|             '_id_': { | ||||
|                 'key': [('_id', 1)], | ||||
|             }, | ||||
|             'txt2_1': { | ||||
|                 'key': [('txt2', 1)], | ||||
|                 'background': False | ||||
|             }, | ||||
|             '_cls_1': { | ||||
|                 'key': [('_cls', 1)], | ||||
|                 'background': False, | ||||
|             } | ||||
|         }) | ||||
|  | ||||
|     def test_compound_index_underscore_cls_not_overwritten(self): | ||||
|         """ | ||||
|         Test that the compound index doesn't get another _cls when it is specified | ||||
|         """ | ||||
|         class TestDoc(Document): | ||||
|             shard_1 = StringField() | ||||
|             txt_1 = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'collection': 'test', | ||||
|                 'allow_inheritance': True, | ||||
|                 'sparse': True, | ||||
|                 'shard_key': 'shard_1', | ||||
|                 'indexes': [ | ||||
|                     ('shard_1', '_cls', 'txt_1'), | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         TestDoc.drop_collection() | ||||
|         TestDoc.ensure_indexes() | ||||
|  | ||||
|         index_info = TestDoc._get_collection().index_information() | ||||
|         self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| @@ -163,7 +161,7 @@ class InheritanceTest(unittest.TestCase): | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['age', 'id', 'name', 'salary'], | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          Person._get_collection_name()) | ||||
| @@ -180,7 +178,7 @@ class InheritanceTest(unittest.TestCase): | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['age', 'id', 'name', 'salary'], | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), | ||||
|                          ['_cls', 'name', 'age']) | ||||
| @@ -253,19 +251,17 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertEqual(classes, [Human]) | ||||
|  | ||||
|     def test_allow_inheritance(self): | ||||
|         """Ensure that inheritance may be disabled on simple classes and that | ||||
|         _cls and _subclasses will not be used. | ||||
|         """Ensure that inheritance is disabled by default on simple | ||||
|         classes and that _cls will not be used. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_dog_class(): | ||||
|         # can't inherit because Animal didn't explicitly allow inheritance | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Dog(Animal): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_dog_class) | ||||
|  | ||||
|         # Check that _cls etc aren't present on simple documents | ||||
|         dog = Animal(name='dog').save() | ||||
|         self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) | ||||
| @@ -275,17 +271,15 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertFalse('_cls' in obj) | ||||
|  | ||||
|     def test_cant_turn_off_inheritance_on_subclass(self): | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 meta = {'allow_inheritance': False} | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|     def test_allow_inheritance_abstract_document(self): | ||||
|         """Ensure that abstract documents can set inheritance rules and that | ||||
| @@ -298,28 +292,87 @@ class InheritanceTest(unittest.TestCase): | ||||
|         class Animal(FinalDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 pass | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|         # Check that _cls isn't present in simple documents | ||||
|         doc = Animal(name='dog') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance | ||||
|         """ | ||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 3) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'id') | ||||
|  | ||||
|     def test_auto_id_not_set_if_specific_in_parent_class(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             city_id = IntField(primary_key=True) | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 3) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'city_id') | ||||
|  | ||||
|     def test_auto_id_vs_non_pk_id_field(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             id = IntField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 4) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') | ||||
|         berlin.save() | ||||
|         self.assertEqual(berlin.pk, berlin.auto_id_0) | ||||
|  | ||||
|     def test_abstract_document_creation_does_not_fail(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         bkk = City(continent='asia') | ||||
|         self.assertEqual(None, bkk.pk) | ||||
|         # TODO: expected error? Shouldn't we create a new error type? | ||||
|         with self.assertRaises(KeyError): | ||||
|             setattr(bkk, 'pk', 1) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance.""" | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|  | ||||
|         def create_special_comment(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class SpecialComment(Comment): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_special_comment) | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
| @@ -348,7 +401,7 @@ class InheritanceTest(unittest.TestCase): | ||||
|         try: | ||||
|             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||
|                 pass | ||||
|         except: | ||||
|         except Exception: | ||||
|             self.assertTrue(False, "Couldn't create MyDocument class") | ||||
|  | ||||
|     def test_abstract_documents(self): | ||||
| @@ -391,11 +444,21 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertEqual(Guppy._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Human._get_collection_name(), 'human') | ||||
|  | ||||
|         def create_bad_abstract(): | ||||
|         # ensure that a subclass of a non-abstract class can't be abstract | ||||
|         with self.assertRaises(ValueError): | ||||
|             class EvilHuman(Human): | ||||
|                 evil = BooleanField(default=True) | ||||
|                 meta = {'abstract': True} | ||||
|         self.assertRaises(ValueError, create_bad_abstract) | ||||
|  | ||||
|     def test_abstract_embedded_documents(self): | ||||
|         # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|         class A(EmbeddedDocument): | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class B(A): | ||||
|             pass | ||||
|  | ||||
|         self.assertFalse(B._meta["abstract"]) | ||||
|  | ||||
|     def test_inherited_collections(self): | ||||
|         """Ensure that subclassed documents don't override parents' | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
| @@ -20,6 +17,28 @@ class TestJson(unittest.TestCase): | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|     def test_json_names(self): | ||||
|         """ | ||||
|         Going to test reported issue: | ||||
|             https://github.com/MongoEngine/mongoengine/issues/654 | ||||
|         where the reporter asks for the availability to perform | ||||
|         a to_json with the original class names and not the abreviated | ||||
|         mongodb document keys | ||||
|         """ | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             string = StringField(db_field='s') | ||||
|  | ||||
|         class Doc(Document): | ||||
|             string = StringField(db_field='s') | ||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='e') | ||||
|  | ||||
|         doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello")) | ||||
|         doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':')) | ||||
|  | ||||
|         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||
|  | ||||
|         self.assertEqual( doc_json, expected_json) | ||||
|  | ||||
|     def test_json_simple(self): | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
| @@ -29,6 +48,10 @@ class TestJson(unittest.TestCase): | ||||
|             string = StringField() | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
|  | ||||
|             def __eq__(self, other): | ||||
|                 return (self.string == other.string and | ||||
|                         self.embedded_field == other.embedded_field) | ||||
|  | ||||
|         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) | ||||
|  | ||||
|         doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) | ||||
| @@ -77,6 +100,10 @@ class TestJson(unittest.TestCase): | ||||
|             generic_embedded_document_field = GenericEmbeddedDocumentField( | ||||
|                                         default=lambda: EmbeddedDoc()) | ||||
|  | ||||
|             def __eq__(self, other): | ||||
|                 import json | ||||
|                 return json.loads(self.to_json()) == json.loads(other.to_json()) | ||||
|  | ||||
|         doc = Doc() | ||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| from datetime import datetime | ||||
|  | ||||
| @@ -60,7 +57,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             User().validate() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("User:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'username': 'Field is required', | ||||
| @@ -70,7 +67,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|         user.name = None | ||||
|         try: | ||||
|             user.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("User:RossC0" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'name': 'Field is required'}) | ||||
| @@ -118,7 +115,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             Doc(id="bad").validate() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("SubDoc:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
| @@ -136,11 +133,82 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|         doc.e.val = "OK" | ||||
|         try: | ||||
|             doc.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("Doc:test" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
|  | ||||
|     def test_embedded_weakref(self): | ||||
|  | ||||
|         class SubDoc(EmbeddedDocument): | ||||
|             val = IntField(required=True) | ||||
|  | ||||
|         class Doc(Document): | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         d1 = Doc() | ||||
|         d2 = Doc() | ||||
|  | ||||
|         s = SubDoc() | ||||
|  | ||||
|         self.assertRaises(ValidationError, s.validate) | ||||
|  | ||||
|         d1.e = s | ||||
|         d2.e = s | ||||
|  | ||||
|         del d1 | ||||
|  | ||||
|         self.assertRaises(ValidationError, d2.validate) | ||||
|  | ||||
|     def test_parent_reference_in_child_document(self): | ||||
|         """ | ||||
|         Test to ensure a ReferenceField can store a reference to a parent | ||||
|         class when inherited. Issue #954. | ||||
|         """ | ||||
|         class Parent(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|             reference = ReferenceField('self') | ||||
|  | ||||
|         class Child(Parent): | ||||
|             pass | ||||
|  | ||||
|         parent = Parent() | ||||
|         parent.save() | ||||
|  | ||||
|         child = Child(reference=parent) | ||||
|  | ||||
|         # Saving child should not raise a ValidationError | ||||
|         try: | ||||
|             child.save() | ||||
|         except ValidationError as e: | ||||
|             self.fail("ValidationError raised: %s" % e.message) | ||||
|  | ||||
|     def test_parent_reference_set_as_attribute_in_child_document(self): | ||||
|         """ | ||||
|         Test to ensure a ReferenceField can store a reference to a parent | ||||
|         class when inherited and when set via attribute. Issue #954. | ||||
|         """ | ||||
|         class Parent(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|             reference = ReferenceField('self') | ||||
|  | ||||
|         class Child(Parent): | ||||
|             pass | ||||
|  | ||||
|         parent = Parent() | ||||
|         parent.save() | ||||
|  | ||||
|         child = Child() | ||||
|         child.reference = parent | ||||
|  | ||||
|         # Saving the child should not raise a ValidationError | ||||
|         try: | ||||
|             child.save() | ||||
|         except ValidationError as e: | ||||
|             self.fail("ValidationError raised: %s" % e.message) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,18 +1,16 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import copy | ||||
| import os | ||||
| import unittest | ||||
| import tempfile | ||||
|  | ||||
| import gridfs | ||||
| import six | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.python_support import PY3, b, StringIO | ||||
| from mongoengine.python_support import StringIO | ||||
|  | ||||
| try: | ||||
|     from PIL import Image | ||||
| @@ -20,15 +18,13 @@ try: | ||||
| except ImportError: | ||||
|     HAS_PIL = False | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | ||||
|  | ||||
|  | ||||
| class FileTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
| class FileTest(MongoDBTestCase): | ||||
|  | ||||
|     def tearDown(self): | ||||
|         self.db.drop_collection('fs.files') | ||||
| @@ -49,7 +45,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
| @@ -88,8 +84,8 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
| @@ -112,15 +108,51 @@ class FileTest(unittest.TestCase): | ||||
|         result.the_file.delete() | ||||
|  | ||||
|         # Ensure deleted file returns None | ||||
|         self.assertTrue(result.the_file.read() == None) | ||||
|         self.assertTrue(result.the_file.read() is None) | ||||
|  | ||||
|     def test_file_fields_stream_after_none(self): | ||||
|         """Ensure that a file field can be written to after it has been saved as | ||||
|         None | ||||
|         """ | ||||
|         class StreamFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.save() | ||||
|         streamfile.the_file.new_file() | ||||
|         streamfile.the_file.write(text) | ||||
|         streamfile.the_file.write(more_text) | ||||
|         streamfile.the_file.close() | ||||
|         streamfile.save() | ||||
|  | ||||
|         result = StreamFile.objects.first() | ||||
|         self.assertTrue(streamfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text + more_text) | ||||
|         # self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.seek(0) | ||||
|         self.assertEqual(result.the_file.tell(), 0) | ||||
|         self.assertEqual(result.the_file.read(len(text)), text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text)) | ||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) | ||||
|         result.the_file.delete() | ||||
|  | ||||
|         # Ensure deleted file returns None | ||||
|         self.assertTrue(result.the_file.read() is None) | ||||
|  | ||||
|     def test_file_fields_set(self): | ||||
|  | ||||
|         class SetFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|  | ||||
|         SetFile.drop_collection() | ||||
|  | ||||
| @@ -149,7 +181,7 @@ class FileTest(unittest.TestCase): | ||||
|         GridDocument.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             # Test without default | ||||
| @@ -166,7 +198,7 @@ class FileTest(unittest.TestCase): | ||||
|             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||
|  | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file=b('')) | ||||
|             doc_d = GridDocument(the_file=six.b('')) | ||||
|             doc_d.save() | ||||
|  | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
| @@ -192,7 +224,7 @@ class FileTest(unittest.TestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!')) | ||||
|         test_file.the_file.put(six.b('Hello, World!')) | ||||
|         test_file.save() | ||||
|  | ||||
|         # Second instance | ||||
| @@ -246,7 +278,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(bool(test_file.the_file)) | ||||
|         test_file.the_file.put(b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.save() | ||||
|         self.assertTrue(bool(test_file.the_file)) | ||||
|  | ||||
| @@ -261,6 +293,71 @@ class FileTest(unittest.TestCase): | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||
|  | ||||
|     def test_file_disk_space(self): | ||||
|         """ Test disk space usage when we delete/replace a file """ | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|  | ||||
|         # Now check fs.files and fs.chunks | ||||
|         db = TestFile._get_db() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
|  | ||||
|         # Deleting the docoument should delete the files | ||||
|         testfile.delete() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|  | ||||
|         # Test case where we don't store a file in the first place | ||||
|         testfile = TestFile() | ||||
|         testfile.save() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|  | ||||
|         testfile.delete() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|  | ||||
|         # Test case where we overwrite the file | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|  | ||||
|         text = six.b('Bonjour, World!') | ||||
|         testfile.the_file.replace(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
|  | ||||
|         testfile.delete() | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|  | ||||
|     def test_image_field(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| @@ -271,15 +368,15 @@ class FileTest(unittest.TestCase): | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             t = TestImage() | ||||
|             try: | ||||
|                 t.image.put(f) | ||||
|                 self.fail("Should have raised an invalidation error") | ||||
|             except ValidationError, e: | ||||
|                 self.assertEquals("%s" % e, "Invalid image: cannot identify image file") | ||||
|             except ValidationError as e: | ||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
| @@ -395,7 +492,7 @@ class FileTest(unittest.TestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!'), | ||||
|         test_file.the_file.put(six.b('Hello, World!'), | ||||
|                           name="hello.txt") | ||||
|         test_file.save() | ||||
|  | ||||
| @@ -403,16 +500,15 @@ class FileTest(unittest.TestCase): | ||||
|         self.assertEqual(data.get('name'), 'hello.txt') | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('Hello, World!')) | ||||
|         self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = b('HELLO, WORLD!') | ||||
|         test_file.the_file = six.b('HELLO, WORLD!') | ||||
|         test_file.save() | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('HELLO, WORLD!')) | ||||
|                          six.b('HELLO, WORLD!')) | ||||
|  | ||||
|     def test_copyable(self): | ||||
|         class PutFile(Document): | ||||
| @@ -420,7 +516,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
|   | ||||
| @@ -1,7 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -19,8 +16,8 @@ class GeoFieldTest(unittest.TestCase): | ||||
|     def _test_for_expected_error(self, Cls, loc, expected): | ||||
|         try: | ||||
|             Cls(loc=loc).validate() | ||||
|             self.fail() | ||||
|         except ValidationError, e: | ||||
|             self.fail('Should not validate the location {0}'.format(loc)) | ||||
|         except ValidationError as e: | ||||
|             self.assertEqual(expected, e.to_dict()['loc']) | ||||
|  | ||||
|     def test_geopoint_validation(self): | ||||
| @@ -75,6 +72,12 @@ class GeoFieldTest(unittest.TestCase): | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         Location(loc=[1, 2]).validate() | ||||
|         Location(loc={ | ||||
|             "type": "Point", | ||||
|             "coordinates": [ | ||||
|               81.4471435546875, | ||||
|               23.61432859499169 | ||||
|             ]}).validate() | ||||
|  | ||||
|     def test_linestring_validation(self): | ||||
|         class Location(Document): | ||||
| @@ -149,6 +152,117 @@ class GeoFieldTest(unittest.TestCase): | ||||
|  | ||||
|         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() | ||||
|  | ||||
|     def test_multipoint_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = MultiPointField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
|         expected = 'MultiPointField type must be "MultiPoint"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]} | ||||
|         expected = "Value ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[]] | ||||
|         expected = "Invalid MultiPoint must contain at least one valid point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[1]], [[1, 2, 3]]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Value (%s) must be a two-dimensional point" % repr(coord[0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         invalid_coords = [[[{}, {}]], [("a", "b")]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord[0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         Location(loc=[[1, 2]]).validate() | ||||
|         Location(loc={ | ||||
|             "type": "MultiPoint", | ||||
|             "coordinates": [ | ||||
|                 [1, 2], | ||||
|                 [81.4471435546875, 23.61432859499169] | ||||
|             ]}).validate() | ||||
|  | ||||
|     def test_multilinestring_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = MultiLineStringField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
|         expected = 'MultiLineStringField type must be "MultiLineString"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]} | ||||
|         expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [5, "a"] | ||||
|         expected = "Invalid MultiLineString must contain at least one valid linestring" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[1]]] | ||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[1, 2, 3]]] | ||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() | ||||
|  | ||||
|     def test_multipolygon_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = MultiPolygonField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
|         expected = 'MultiPolygonField type must be "MultiPolygon"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} | ||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[5, "a"]]]] | ||||
|         expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[]]]] | ||||
|         expected = "Invalid MultiPolygon must contain at least one valid Polygon" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[1, 2, 3]]]] | ||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||
|         expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[[1, 2], [3, 4]]]] | ||||
|         expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() | ||||
|  | ||||
|     def test_indexes_geopoint(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
| @@ -219,12 +333,11 @@ class GeoFieldTest(unittest.TestCase): | ||||
|         Location.drop_collection() | ||||
|         Parent.drop_collection() | ||||
|  | ||||
|         list(Parent.objects) | ||||
|  | ||||
|         collection = Parent._get_collection() | ||||
|         info = collection.index_information() | ||||
|  | ||||
|         Parent(name='Berlin').save() | ||||
|         info = Parent._get_collection().index_information() | ||||
|         self.assertFalse('location_2d' in info) | ||||
|         info = Location._get_collection().index_information() | ||||
|         self.assertTrue('location_2d' in info) | ||||
|  | ||||
|         self.assertEqual(len(Parent._geo_indices()), 0) | ||||
|         self.assertEqual(len(Location._geo_indices()), 1) | ||||
|   | ||||
| @@ -17,7 +17,16 @@ class PickleTest(Document): | ||||
|     photo = FileField() | ||||
|  | ||||
|  | ||||
| class PickleDyanmicEmbedded(DynamicEmbeddedDocument): | ||||
| class NewDocumentPickleTest(Document): | ||||
|     number = IntField() | ||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||
|     lists = ListField(StringField()) | ||||
|     photo = FileField() | ||||
|     new_field = StringField() | ||||
|  | ||||
|  | ||||
| class PickleDynamicEmbedded(DynamicEmbeddedDocument): | ||||
|     date = DateTimeField(default=datetime.now) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,8 +0,0 @@ | ||||
| from convert_to_new_inheritance_model import * | ||||
| from decimalfield_as_float import * | ||||
| from refrencefield_dbref_to_object_id import * | ||||
| from turn_off_inheritance import * | ||||
| from uuidfield_to_binary import * | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,51 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('ConvertToNewInheritanceModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToNewInheritanceModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_convert_to_the_new_inheritance_model(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Declaration of the class | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Remove _types | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({'_types': {"$exists": True}}).count() | ||||
|         self.assertEqual(0, count) | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
| @@ -1,50 +0,0 @@ | ||||
|  # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import decimal | ||||
| from decimal import Decimal | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, DecimalField, ListField | ||||
|  | ||||
| __all__ = ('ConvertDecimalField', ) | ||||
|  | ||||
|  | ||||
| class ConvertDecimalField(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_decimal_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(force_string=True) | ||||
|             monies = ListField(DecimalField(force_string=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", money=Decimal("2.50"), | ||||
|                monies=[Decimal("2.10"), Decimal("5.00")]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change DecimalField - add precision and rounding settings | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP) | ||||
|             monies = ListField(DecimalField(precision=2, | ||||
|                                             rounding=decimal.ROUND_HALF_UP)) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('money') | ||||
|             p._mark_as_changed('monies') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['money'], float)) | ||||
|         self.assertTrue(all([isinstance(m, float) for m in wilson['monies']])) | ||||
| @@ -1,52 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, ReferenceField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToObjectIdsModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToObjectIdsModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_object_id_reference_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self', dbref=True) | ||||
|             friends = ListField(ReferenceField('self', dbref=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="Wilson", parent=None).save() | ||||
|         f1 = Person(name="John", parent=None).save() | ||||
|         f2 = Person(name="Paul", parent=None).save() | ||||
|         f3 = Person(name="George", parent=None).save() | ||||
|         f4 = Person(name="Ringo", parent=None).save() | ||||
|         Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change ReferenceField as now dbref defaults to False | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self') | ||||
|             friends = ListField(ReferenceField('self')) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('parent') | ||||
|             p._mark_as_changed('friends') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertEqual(p1.id, wilson['parent']) | ||||
|         self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends']) | ||||
| @@ -1,62 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('TurnOffInheritanceTest', ) | ||||
|  | ||||
|  | ||||
| class TurnOffInheritanceTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_turn_off_inheritance(self): | ||||
|         """Demonstrates migrating from allow_inheritance = True to False. | ||||
|         """ | ||||
|  | ||||
|         # 1. Old declaration of the class | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Turn off inheritance | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': False, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 3. Remove _types and _cls | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({"$or": [{'_types': {"$exists": True}}, | ||||
|                                          {'_cls': {"$exists": True}}]}).count() | ||||
|         assert count == 0 | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key']) | ||||
|                               or '_cls' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
| @@ -1,48 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, UUIDField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToBinaryUUID', ) | ||||
|  | ||||
|  | ||||
| class ConvertToBinaryUUID(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_binary_uuid_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField(binary=False) | ||||
|             uuids = ListField(UUIDField(binary=False)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", uuid=uuid.uuid4(), | ||||
|                uuids=[uuid.uuid4(), uuid.uuid4()]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change UUIDFIeld as now binary defaults to True | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField() | ||||
|             uuids = ListField(UUIDField()) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('uuid') | ||||
|             p._mark_as_changed('uuids') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['uuid'], uuid.UUID)) | ||||
|         self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']])) | ||||
| @@ -3,3 +3,4 @@ from field_list import * | ||||
| from queryset import * | ||||
| from visitor import * | ||||
| from geo import * | ||||
| from modify import * | ||||
| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -95,7 +92,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) | ||||
|         qs = qs.only(*only) | ||||
| @@ -103,14 +100,14 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.exclude(*exclude) | ||||
|         qs = qs.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = qs.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
| @@ -129,7 +126,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         qs = qs.only(*only) | ||||
|         qs = qs.fields(slice__b=5) | ||||
| @@ -144,6 +141,16 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'b': {'$slice': 5}}) | ||||
|  | ||||
|     def test_mix_slice_with_other_fields(self): | ||||
|         class MyDoc(Document): | ||||
|             a = ListField() | ||||
|             b = ListField() | ||||
|             c = ListField() | ||||
|  | ||||
|         qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'c': {'$slice': 2}, 'a': 1}) | ||||
|  | ||||
|     def test_only(self): | ||||
|         """Ensure that QuerySet.only only returns the requested fields. | ||||
|         """ | ||||
|   | ||||
| @@ -1,93 +1,139 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import datetime | ||||
| import unittest | ||||
| from datetime import datetime, timedelta | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase, needs_mongodb_v3 | ||||
|  | ||||
|  | ||||
| __all__ = ("GeoQueriesTest",) | ||||
|  | ||||
|  | ||||
| class GeoQueriesTest(unittest.TestCase): | ||||
| class GeoQueriesTest(MongoDBTestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|     def test_geospatial_operators(self): | ||||
|         """Ensure that geospatial queries are working. | ||||
|         """ | ||||
|     def _create_event_data(self, point_field_class=GeoPointField): | ||||
|         """Create some sample data re-used in many of the tests below.""" | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             date = DateTimeField() | ||||
|             location = GeoPointField() | ||||
|             location = point_field_class() | ||||
|  | ||||
|             def __unicode__(self): | ||||
|                 return self.title | ||||
|  | ||||
|         self.Event = Event | ||||
|  | ||||
|         Event.drop_collection() | ||||
|  | ||||
|         event1 = Event(title="Coltrane Motion @ Double Door", | ||||
|                        date=datetime.now() - timedelta(days=1), | ||||
|                        location=[-87.677137, 41.909889]).save() | ||||
|         event2 = Event(title="Coltrane Motion @ Bottom of the Hill", | ||||
|                        date=datetime.now() - timedelta(days=10), | ||||
|                        location=[-122.4194155, 37.7749295]).save() | ||||
|         event3 = Event(title="Coltrane Motion @ Empty Bottle", | ||||
|                        date=datetime.now(), | ||||
|                        location=[-87.686638, 41.900474]).save() | ||||
|         event1 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Double Door", | ||||
|             date=datetime.datetime.now() - datetime.timedelta(days=1), | ||||
|             location=[-87.677137, 41.909889]) | ||||
|         event2 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Bottom of the Hill", | ||||
|             date=datetime.datetime.now() - datetime.timedelta(days=10), | ||||
|             location=[-122.4194155, 37.7749295]) | ||||
|         event3 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Empty Bottle", | ||||
|             date=datetime.datetime.now(), | ||||
|             location=[-87.686638, 41.900474]) | ||||
|  | ||||
|         return event1, event2, event3 | ||||
|  | ||||
|     def test_near(self): | ||||
|         """Make sure the "near" operator works.""" | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
|  | ||||
|     def test_near_and_max_distance(self): | ||||
|         """Ensure the "max_distance" operator works alongside the "near" | ||||
|         operator. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         # find events within 10 degrees of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__max_distance=10) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy | ||||
|     # until v3.0; skip for older versions | ||||
|     @needs_mongodb_v3 | ||||
|     def test_near_and_min_distance(self): | ||||
|         """Ensure the "min_distance" operator works alongside the "near" | ||||
|         operator. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         # find events at least 10 degrees away of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__min_distance=10) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|  | ||||
|     def test_within_distance(self): | ||||
|         """Make sure the "within_distance" operator works.""" | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         # find events within 5 degrees of pitchfork office, chicago | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 5] | ||||
|         events = Event.objects(location__within_distance=point_and_distance) | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         events = list(events) | ||||
|         self.assertTrue(event2 not in events) | ||||
|         self.assertTrue(event1 in events) | ||||
|         self.assertTrue(event3 in events) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
|  | ||||
|         # find events within 10 degrees of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = Event.objects(location__near=point, location__max_distance=10) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|         # find events within 10 degrees of san francisco | ||||
|         point_and_distance = [[-122.415579, 37.7566023], 10] | ||||
|         events = Event.objects(location__within_distance=point_and_distance) | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|         # find events within 1 degree of greenpoint, broolyn, nyc, ny | ||||
|         point_and_distance = [[-73.9509714, 40.7237134], 1] | ||||
|         events = Event.objects(location__within_distance=point_and_distance) | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|         # ensure ordering is respected by "within_distance" | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 10] | ||||
|         events = Event.objects(location__within_distance=point_and_distance) | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         events = events.order_by("-date") | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         self.assertEqual(events[0], event3) | ||||
|  | ||||
|     def test_within_box(self): | ||||
|         """Ensure the "within_box" operator works.""" | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         # check that within_box works | ||||
|         box = [(-125.0, 35.0), (-100.0, 40.0)] | ||||
|         events = Event.objects(location__within_box=box) | ||||
|         events = self.Event.objects(location__within_box=box) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event2.id) | ||||
|  | ||||
|     def test_within_polygon(self): | ||||
|         """Ensure the "within_polygon" operator works.""" | ||||
|         event1, event2, event3 = self._create_event_data() | ||||
|  | ||||
|         polygon = [ | ||||
|             (-87.694445, 41.912114), | ||||
|             (-87.69084, 41.919395), | ||||
| @@ -95,7 +141,7 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|             (-87.654276, 41.911731), | ||||
|             (-87.656164, 41.898061), | ||||
|         ] | ||||
|         events = Event.objects(location__within_polygon=polygon) | ||||
|         events = self.Event.objects(location__within_polygon=polygon) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event1.id) | ||||
|  | ||||
| @@ -104,13 +150,151 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|             (-1.225891, 52.792797), | ||||
|             (-4.40094, 53.389881) | ||||
|         ] | ||||
|         events = Event.objects(location__within_polygon=polygon2) | ||||
|         events = self.Event.objects(location__within_polygon=polygon2) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|     def test_geo_spatial_embedded(self): | ||||
|     def test_2dsphere_near(self): | ||||
|         """Make sure the "near" operator works with a PointField, which | ||||
|         corresponds to a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
|  | ||||
|     def test_2dsphere_near_and_max_distance(self): | ||||
|         """Ensure the "max_distance" operator works alongside the "near" | ||||
|         operator with a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         # find events within 10km of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__max_distance=10000) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|         # find events within 1km of greenpoint, broolyn, nyc, ny | ||||
|         events = self.Event.objects(location__near=[-73.9509714, 40.7237134], | ||||
|                                     location__max_distance=1000) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__max_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         self.assertEqual(events[0], event3) | ||||
|  | ||||
|     def test_2dsphere_geo_within_box(self): | ||||
|         """Ensure the "geo_within_box" operator works with a 2dsphere | ||||
|         index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         # check that within_box works | ||||
|         box = [(-125.0, 35.0), (-100.0, 40.0)] | ||||
|         events = self.Event.objects(location__geo_within_box=box) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event2.id) | ||||
|  | ||||
|     def test_2dsphere_geo_within_polygon(self): | ||||
|         """Ensure the "geo_within_polygon" operator works with a | ||||
|         2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         polygon = [ | ||||
|             (-87.694445, 41.912114), | ||||
|             (-87.69084, 41.919395), | ||||
|             (-87.681742, 41.927186), | ||||
|             (-87.654276, 41.911731), | ||||
|             (-87.656164, 41.898061), | ||||
|         ] | ||||
|         events = self.Event.objects(location__geo_within_polygon=polygon) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event1.id) | ||||
|  | ||||
|         polygon2 = [ | ||||
|             (-1.742249, 54.033586), | ||||
|             (-1.225891, 52.792797), | ||||
|             (-4.40094, 53.389881) | ||||
|         ] | ||||
|         events = self.Event.objects(location__geo_within_polygon=polygon2) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy | ||||
|     # until v3.0; skip for older versions | ||||
|     @needs_mongodb_v3 | ||||
|     def test_2dsphere_near_and_min_max_distance(self): | ||||
|         """Ensure "min_distace" and "max_distance" operators work well | ||||
|         together with the "near" operator in a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         # ensure min_distance and max_distance combine well | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__min_distance=1000, | ||||
|             location__max_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event3) | ||||
|  | ||||
|         # ensure ordering is respected by "near" with "min_distance" | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__min_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|     def test_2dsphere_geo_within_center(self): | ||||
|         """Make sure the "geo_within_center" operator works with a | ||||
|         2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
|  | ||||
|         # find events within 5 degrees of pitchfork office, chicago | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 2] | ||||
|         events = self.Event.objects( | ||||
|             location__geo_within_center=point_and_distance) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         events = list(events) | ||||
|         self.assertTrue(event2 not in events) | ||||
|         self.assertTrue(event1 in events) | ||||
|         self.assertTrue(event3 in events) | ||||
|  | ||||
|     def _test_embedded(self, point_field_class): | ||||
|         """Helper test method ensuring given point field class works | ||||
|         well in an embedded document. | ||||
|         """ | ||||
|         class Venue(EmbeddedDocument): | ||||
|             location = GeoPointField() | ||||
|             location = point_field_class() | ||||
|             name = StringField() | ||||
|  | ||||
|         class Event(Document): | ||||
| @@ -136,9 +320,18 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
|  | ||||
|     def test_geo_spatial_embedded(self): | ||||
|         """Make sure GeoPointField works properly in an embedded document.""" | ||||
|         self._test_embedded(point_field_class=GeoPointField) | ||||
|  | ||||
|     def test_2dsphere_point_embedded(self): | ||||
|         """Make sure PointField works properly in an embedded document.""" | ||||
|         self._test_embedded(point_field_class=PointField) | ||||
|  | ||||
|     # Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 | ||||
|     @needs_mongodb_v3 | ||||
|     def test_spherical_geospatial_operators(self): | ||||
|         """Ensure that spherical geospatial queries are working | ||||
|         """ | ||||
|         """Ensure that spherical geospatial queries are working.""" | ||||
|         class Point(Document): | ||||
|             location = GeoPointField() | ||||
|  | ||||
| @@ -158,7 +351,10 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|  | ||||
|         # Same behavior for _within_spherical_distance | ||||
|         points = Point.objects( | ||||
|             location__within_spherical_distance=[[-122, 37.5], 60/earth_radius] | ||||
|             location__within_spherical_distance=[ | ||||
|                 [-122, 37.5], | ||||
|                 60 / earth_radius | ||||
|             ] | ||||
|         ) | ||||
|         self.assertEqual(points.count(), 2) | ||||
|  | ||||
| @@ -166,6 +362,19 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|                                location__max_distance=60 / earth_radius) | ||||
|         self.assertEqual(points.count(), 2) | ||||
|  | ||||
|         # Test query works with max_distance, being farer from one point | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.8], | ||||
|                                location__max_distance=60 / earth_radius) | ||||
|         close_point = points.first() | ||||
|         self.assertEqual(points.count(), 1) | ||||
|  | ||||
|         # Test query works with min_distance, being farer from one point | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.8], | ||||
|                                location__min_distance=60 / earth_radius) | ||||
|         self.assertEqual(points.count(), 1) | ||||
|         far_point = points.first() | ||||
|         self.assertNotEqual(close_point, far_point) | ||||
|  | ||||
|         # Finds both points, but orders the north point first because it's | ||||
|         # closer to the reference point to the north. | ||||
|         points = Point.objects(location__near_sphere=[-122, 38.5]) | ||||
| @@ -183,127 +392,15 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|         # Finds only one point because only the first point is within 60km of | ||||
|         # the reference point to the south. | ||||
|         points = Point.objects( | ||||
|             location__within_spherical_distance=[[-122, 36.5], 60/earth_radius]) | ||||
|             location__within_spherical_distance=[ | ||||
|                 [-122, 36.5], | ||||
|                 60 / earth_radius | ||||
|             ] | ||||
|         ) | ||||
|         self.assertEqual(points.count(), 1) | ||||
|         self.assertEqual(points[0].id, south_point.id) | ||||
|  | ||||
|     def test_2dsphere_point(self): | ||||
|  | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             date = DateTimeField() | ||||
|             location = PointField() | ||||
|  | ||||
|             def __unicode__(self): | ||||
|                 return self.title | ||||
|  | ||||
|         Event.drop_collection() | ||||
|  | ||||
|         event1 = Event(title="Coltrane Motion @ Double Door", | ||||
|                        date=datetime.now() - timedelta(days=1), | ||||
|                        location=[-87.677137, 41.909889]) | ||||
|         event1.save() | ||||
|         event2 = Event(title="Coltrane Motion @ Bottom of the Hill", | ||||
|                        date=datetime.now() - timedelta(days=10), | ||||
|                        location=[-122.4194155, 37.7749295]).save() | ||||
|         event3 = Event(title="Coltrane Motion @ Empty Bottle", | ||||
|                        date=datetime.now(), | ||||
|                        location=[-87.686638, 41.900474]).save() | ||||
|  | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
|  | ||||
|         # find events within 5 degrees of pitchfork office, chicago | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 2] | ||||
|         events = Event.objects(location__geo_within_center=point_and_distance) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         events = list(events) | ||||
|         self.assertTrue(event2 not in events) | ||||
|         self.assertTrue(event1 in events) | ||||
|         self.assertTrue(event3 in events) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
|  | ||||
|         # find events within 10km of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = Event.objects(location__near=point, location__max_distance=10000) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
|  | ||||
|         # find events within 1km of greenpoint, broolyn, nyc, ny | ||||
|         events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = Event.objects(location__near=[-87.67892, 41.9120459], | ||||
|                                location__max_distance=10000).order_by("-date") | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         self.assertEqual(events[0], event3) | ||||
|  | ||||
|         # check that within_box works | ||||
|         box = [(-125.0, 35.0), (-100.0, 40.0)] | ||||
|         events = Event.objects(location__geo_within_box=box) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event2.id) | ||||
|  | ||||
|         polygon = [ | ||||
|             (-87.694445, 41.912114), | ||||
|             (-87.69084, 41.919395), | ||||
|             (-87.681742, 41.927186), | ||||
|             (-87.654276, 41.911731), | ||||
|             (-87.656164, 41.898061), | ||||
|         ] | ||||
|         events = Event.objects(location__geo_within_polygon=polygon) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event1.id) | ||||
|  | ||||
|         polygon2 = [ | ||||
|             (-1.742249, 54.033586), | ||||
|             (-1.225891, 52.792797), | ||||
|             (-4.40094, 53.389881) | ||||
|         ] | ||||
|         events = Event.objects(location__geo_within_polygon=polygon2) | ||||
|         self.assertEqual(events.count(), 0) | ||||
|  | ||||
|     def test_2dsphere_point_embedded(self): | ||||
|  | ||||
|         class Venue(EmbeddedDocument): | ||||
|             location = GeoPointField() | ||||
|             name = StringField() | ||||
|  | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             venue = EmbeddedDocumentField(Venue) | ||||
|  | ||||
|         Event.drop_collection() | ||||
|  | ||||
|         venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) | ||||
|         venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) | ||||
|  | ||||
|         event1 = Event(title="Coltrane Motion @ Double Door", | ||||
|                        venue=venue1).save() | ||||
|         event2 = Event(title="Coltrane Motion @ Bottom of the Hill", | ||||
|                        venue=venue2).save() | ||||
|         event3 = Event(title="Coltrane Motion @ Empty Bottle", | ||||
|                        venue=venue1).save() | ||||
|  | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
|  | ||||
|     def test_linestring(self): | ||||
|  | ||||
|         class Road(Document): | ||||
|             name = StringField() | ||||
|             line = LineStringField() | ||||
| @@ -359,7 +456,6 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|         self.assertEqual(1, roads) | ||||
|  | ||||
|     def test_polygon(self): | ||||
|  | ||||
|         class Road(Document): | ||||
|             name = StringField() | ||||
|             poly = PolygonField() | ||||
| @@ -414,5 +510,48 @@ class GeoQueriesTest(unittest.TestCase): | ||||
|         roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() | ||||
|         self.assertEqual(1, roads) | ||||
|  | ||||
|     def test_2dsphere_point_sets_correctly(self): | ||||
|         class Location(Document): | ||||
|             loc = PointField() | ||||
|  | ||||
|         Location.drop_collection() | ||||
|  | ||||
|         Location(loc=[1,2]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) | ||||
|  | ||||
|         Location.objects.update(set__loc=[2,1]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) | ||||
|  | ||||
|     def test_2dsphere_linestring_sets_correctly(self): | ||||
|         class Location(Document): | ||||
|             line = LineStringField() | ||||
|  | ||||
|         Location.drop_collection() | ||||
|  | ||||
|         Location(line=[[1, 2], [2, 2]]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) | ||||
|  | ||||
|         Location.objects.update(set__line=[[2, 1], [1, 2]]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}) | ||||
|  | ||||
|     def test_geojson_PolygonField(self): | ||||
|         class Location(Document): | ||||
|             poly = PolygonField() | ||||
|  | ||||
|         Location.drop_collection() | ||||
|  | ||||
|         Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||
|  | ||||
|         Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
							
								
								
									
										99
									
								
								tests/queryset/modify.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										99
									
								
								tests/queryset/modify.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,99 @@ | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import connect, Document, IntField | ||||
|  | ||||
| __all__ = ("FindAndModifyTest",) | ||||
|  | ||||
|  | ||||
| class Doc(Document): | ||||
|     id = IntField(primary_key=True) | ||||
|     value = IntField() | ||||
|  | ||||
|  | ||||
| class FindAndModifyTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db="mongoenginetest") | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|     def assertDbEqual(self, docs): | ||||
|         self.assertEqual(list(Doc._collection.find().sort("id")), docs) | ||||
|  | ||||
|     def test_modify(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         doc = Doc(id=1, value=1).save() | ||||
|  | ||||
|         old_doc = Doc.objects(id=1).modify(set__value=-1) | ||||
|         self.assertEqual(old_doc.to_json(), doc.to_json()) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) | ||||
|  | ||||
|     def test_modify_with_new(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         doc = Doc(id=1, value=1).save() | ||||
|  | ||||
|         new_doc = Doc.objects(id=1).modify(set__value=-1, new=True) | ||||
|         doc.value = -1 | ||||
|         self.assertEqual(new_doc.to_json(), doc.to_json()) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) | ||||
|  | ||||
|     def test_modify_not_existing(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}]) | ||||
|  | ||||
|     def test_modify_with_upsert(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) | ||||
|         self.assertEqual(old_doc, None) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) | ||||
|  | ||||
|     def test_modify_with_upsert_existing(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         doc = Doc(id=1, value=1).save() | ||||
|  | ||||
|         old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True) | ||||
|         self.assertEqual(old_doc.to_json(), doc.to_json()) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) | ||||
|  | ||||
|     def test_modify_with_upsert_with_new(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1) | ||||
|         self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1}) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) | ||||
|  | ||||
|     def test_modify_with_remove(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         doc = Doc(id=1, value=1).save() | ||||
|  | ||||
|         old_doc = Doc.objects(id=1).modify(remove=True) | ||||
|         self.assertEqual(old_doc.to_json(), doc.to_json()) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}]) | ||||
|  | ||||
|     def test_find_and_modify_with_remove_not_existing(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         self.assertEqual(Doc.objects(id=1).modify(remove=True), None) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}]) | ||||
|  | ||||
|     def test_modify_with_order_by(self): | ||||
|         Doc(id=0, value=3).save() | ||||
|         Doc(id=1, value=2).save() | ||||
|         Doc(id=2, value=1).save() | ||||
|         doc = Doc(id=3, value=0).save() | ||||
|  | ||||
|         old_doc = Doc.objects().order_by("-id").modify(set__value=-1) | ||||
|         self.assertEqual(old_doc.to_json(), doc.to_json()) | ||||
|         self.assertDbEqual([ | ||||
|             {"_id": 0, "value": 3}, {"_id": 1, "value": 2}, | ||||
|             {"_id": 2, "value": 1}, {"_id": 3, "value": -1}]) | ||||
|  | ||||
|     def test_modify_with_fields(self): | ||||
|         Doc(id=0, value=0).save() | ||||
|         Doc(id=1, value=1).save() | ||||
|  | ||||
|         old_doc = Doc.objects(id=1).only("id").modify(set__value=-1) | ||||
|         self.assertEqual(old_doc.to_mongo(), {"_id": 1}) | ||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										78
									
								
								tests/queryset/pickable.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								tests/queryset/pickable.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,78 @@ | ||||
| import pickle | ||||
| import unittest | ||||
| from pymongo.mongo_client import MongoClient | ||||
| from mongoengine import Document, StringField, IntField | ||||
| from mongoengine.connection import connect | ||||
|  | ||||
| __author__ = 'stas' | ||||
|  | ||||
| class Person(Document): | ||||
|     name = StringField() | ||||
|     age = IntField() | ||||
|  | ||||
| class TestQuerysetPickable(unittest.TestCase): | ||||
|     """ | ||||
|     Test for adding pickling support for QuerySet instances | ||||
|     See issue https://github.com/MongoEngine/mongoengine/issues/442 | ||||
|     """ | ||||
|     def setUp(self): | ||||
|         super(TestQuerysetPickable, self).setUp() | ||||
|  | ||||
|         connection = connect(db="test") #type: pymongo.mongo_client.MongoClient | ||||
|  | ||||
|         connection.drop_database("test") | ||||
|  | ||||
|         self.john = Person.objects.create( | ||||
|             name="John", | ||||
|             age=21 | ||||
|         ) | ||||
|  | ||||
|  | ||||
|     def test_picke_simple_qs(self): | ||||
|  | ||||
|         qs = Person.objects.all() | ||||
|  | ||||
|         pickle.dumps(qs) | ||||
|  | ||||
|     def _get_loaded(self, qs): | ||||
|         s = pickle.dumps(qs) | ||||
|  | ||||
|         return pickle.loads(s) | ||||
|  | ||||
|     def test_unpickle(self): | ||||
|         qs = Person.objects.all() | ||||
|  | ||||
|         loadedQs = self._get_loaded(qs) | ||||
|  | ||||
|         self.assertEqual(qs.count(), loadedQs.count()) | ||||
|  | ||||
|         #can update loadedQs | ||||
|         loadedQs.update(age=23) | ||||
|  | ||||
|         #check | ||||
|         self.assertEqual(Person.objects.first().age, 23) | ||||
|  | ||||
|     def test_pickle_support_filtration(self): | ||||
|         Person.objects.create( | ||||
|             name="Alice", | ||||
|             age=22 | ||||
|         ) | ||||
|  | ||||
|         Person.objects.create( | ||||
|             name="Bob", | ||||
|             age=23 | ||||
|         ) | ||||
|  | ||||
|         qs = Person.objects.filter(age__gte=22) | ||||
|         self.assertEqual(qs.count(), 2) | ||||
|  | ||||
|         loaded = self._get_loaded(qs) | ||||
|  | ||||
|         self.assertEqual(loaded.count(), 2) | ||||
|         self.assertEqual(loaded.filter(name="Bob").first().age, 23) | ||||
|      | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,11 +1,7 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.queryset import Q | ||||
| from mongoengine.queryset import transform | ||||
| from mongoengine.queryset import Q, transform | ||||
|  | ||||
| __all__ = ("TransformTest",) | ||||
|  | ||||
| @@ -41,8 +37,8 @@ class TransformTest(unittest.TestCase): | ||||
|         DicDoc.drop_collection() | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         DicDoc().save() | ||||
|         doc = Doc().save() | ||||
|         dic_doc = DicDoc().save() | ||||
|  | ||||
|         for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): | ||||
|             update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) | ||||
| @@ -55,7 +51,6 @@ class TransformTest(unittest.TestCase): | ||||
|         update = transform.update(DicDoc, pull__dictField__test=doc) | ||||
|         self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) | ||||
|  | ||||
|  | ||||
|     def test_query_field_name(self): | ||||
|         """Ensure that the correct field name is used when querying. | ||||
|         """ | ||||
| @@ -156,16 +151,95 @@ class TransformTest(unittest.TestCase): | ||||
|         class Doc(Document): | ||||
|             meta = {'allow_inheritance': False} | ||||
|  | ||||
|         raw_query = Doc.objects(__raw__={'deleted': False, | ||||
|         raw_query = Doc.objects(__raw__={ | ||||
|             'deleted': False, | ||||
|             'scraped': 'yes', | ||||
|                                 '$nor': [{'views.extracted': 'no'}, | ||||
|                                          {'attachments.views.extracted':'no'}] | ||||
|             '$nor': [ | ||||
|                 {'views.extracted': 'no'}, | ||||
|                 {'attachments.views.extracted': 'no'} | ||||
|             ] | ||||
|         })._query | ||||
|  | ||||
|         expected = {'deleted': False, 'scraped': 'yes', | ||||
|                     '$nor': [{'views.extracted': 'no'}, | ||||
|                              {'attachments.views.extracted': 'no'}]} | ||||
|         self.assertEqual(expected, raw_query) | ||||
|         self.assertEqual(raw_query, { | ||||
|             'deleted': False, | ||||
|             'scraped': 'yes', | ||||
|             '$nor': [ | ||||
|                 {'views.extracted': 'no'}, | ||||
|                 {'attachments.views.extracted': 'no'} | ||||
|             ] | ||||
|         }) | ||||
|  | ||||
|     def test_geojson_PointField(self): | ||||
|         class Location(Document): | ||||
|             loc = PointField() | ||||
|  | ||||
|         update = transform.update(Location, set__loc=[1, 2]) | ||||
|         self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) | ||||
|  | ||||
|         update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]}) | ||||
|         self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) | ||||
|  | ||||
|     def test_geojson_LineStringField(self): | ||||
|         class Location(Document): | ||||
|             line = LineStringField() | ||||
|  | ||||
|         update = transform.update(Location, set__line=[[1, 2], [2, 2]]) | ||||
|         self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) | ||||
|  | ||||
|         update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) | ||||
|         self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) | ||||
|  | ||||
|     def test_geojson_PolygonField(self): | ||||
|         class Location(Document): | ||||
|             poly = PolygonField() | ||||
|  | ||||
|         update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) | ||||
|         self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) | ||||
|  | ||||
|         update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||
|         self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) | ||||
|  | ||||
|     def test_type(self): | ||||
|         class Doc(Document): | ||||
|             df = DynamicField() | ||||
|         Doc(df=True).save() | ||||
|         Doc(df=7).save() | ||||
|         Doc(df="df").save() | ||||
|         self.assertEqual(Doc.objects(df__type=1).count(), 0)  # double | ||||
|         self.assertEqual(Doc.objects(df__type=8).count(), 1)  # bool | ||||
|         self.assertEqual(Doc.objects(df__type=2).count(), 1)  # str | ||||
|         self.assertEqual(Doc.objects(df__type=16).count(), 1)  # int | ||||
|  | ||||
|     def test_last_field_name_like_operator(self): | ||||
|         class EmbeddedItem(EmbeddedDocument): | ||||
|             type = StringField() | ||||
|             name = StringField() | ||||
|  | ||||
|         class Doc(Document): | ||||
|             item = EmbeddedDocumentField(EmbeddedItem) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe")) | ||||
|         doc.save() | ||||
|  | ||||
|         self.assertEqual(1, Doc.objects(item__type__="axe").count()) | ||||
|         self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) | ||||
|  | ||||
|         Doc.objects(id=doc.id).update(set__item__type__='sword') | ||||
|         self.assertEqual(1, Doc.objects(item__type__="sword").count()) | ||||
|         self.assertEqual(0, Doc.objects(item__type__="axe").count()) | ||||
|  | ||||
|     def test_understandable_error_raised(self): | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             location = GeoPointField() | ||||
|  | ||||
|         box = [(35.0, -125.0), (40.0, -100.0)] | ||||
|         # I *meant* to execute location__within_box=box | ||||
|         events = Event.objects(location__within=box) | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             events.count() | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|   | ||||
| @@ -1,14 +1,12 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import datetime | ||||
| import re | ||||
| import unittest | ||||
|  | ||||
| from bson import ObjectId | ||||
| from datetime import datetime | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.queryset import Q | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.queryset import Q | ||||
|  | ||||
| __all__ = ("QTest",) | ||||
|  | ||||
| @@ -132,12 +130,12 @@ class QTest(unittest.TestCase): | ||||
|         TestDoc(x=10).save() | ||||
|         TestDoc(y=True).save() | ||||
|  | ||||
|         self.assertEqual(query, | ||||
|         {'$and': [ | ||||
|         self.assertEqual(query, { | ||||
|             '$and': [ | ||||
|                 {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, | ||||
|                 {'$or': [{'x': {'$lt': 100}}, {'y': True}]} | ||||
|         ]}) | ||||
|  | ||||
|             ] | ||||
|         }) | ||||
|         self.assertEqual(2, TestDoc.objects(q1 & q2).count()) | ||||
|  | ||||
|     def test_or_and_or_combination(self): | ||||
| @@ -157,15 +155,14 @@ class QTest(unittest.TestCase): | ||||
|         q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) | ||||
|         query = (q1 | q2).to_query(TestDoc) | ||||
|  | ||||
|         self.assertEqual(query, | ||||
|             {'$or': [ | ||||
|         self.assertEqual(query, { | ||||
|             '$or': [ | ||||
|                 {'$and': [{'x': {'$gt': 0}}, | ||||
|                           {'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, | ||||
|                 {'$and': [{'x': {'$lt': 100}}, | ||||
|                           {'$or': [{'y': False}, {'y': {'$exists': False}}]}]} | ||||
|             ]} | ||||
|         ) | ||||
|  | ||||
|             ] | ||||
|         }) | ||||
|         self.assertEqual(2, TestDoc.objects(q1 | q2).count()) | ||||
|  | ||||
|     def test_multiple_occurence_in_field(self): | ||||
| @@ -188,7 +185,7 @@ class QTest(unittest.TestCase): | ||||
|             x = IntField() | ||||
|  | ||||
|         TestDoc.drop_collection() | ||||
|         for i in xrange(1, 101): | ||||
|         for i in range(1, 101): | ||||
|             t = TestDoc(x=i) | ||||
|             t.save() | ||||
|  | ||||
| @@ -215,19 +212,19 @@ class QTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) | ||||
|         post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) | ||||
|         post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True) | ||||
|         post2.save() | ||||
|  | ||||
|         post3 = BlogPost(title='Test 3', published=True) | ||||
|         post3.save() | ||||
|  | ||||
|         post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) | ||||
|         post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8)) | ||||
|         post4.save() | ||||
|  | ||||
|         post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) | ||||
|         post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15)) | ||||
|         post5.save() | ||||
|  | ||||
|         post6 = BlogPost(title='Test 1', published=False) | ||||
| @@ -250,7 +247,7 @@ class QTest(unittest.TestCase): | ||||
|         self.assertTrue(all(obj.id in posts for obj in published_posts)) | ||||
|  | ||||
|         # Check Q object combination | ||||
|         date = datetime(2010, 1, 10) | ||||
|         date = datetime.datetime(2010, 1, 10) | ||||
|         q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) | ||||
|         posts = [post.id for post in q] | ||||
|  | ||||
| @@ -271,12 +268,13 @@ class QTest(unittest.TestCase): | ||||
|         self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) | ||||
|  | ||||
|         # Test invalid query objs | ||||
|         def wrong_query_objs(): | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             self.Person.objects('user1') | ||||
|         def wrong_query_objs_filter(): | ||||
|             self.Person.objects('user1') | ||||
|         self.assertRaises(InvalidQueryError, wrong_query_objs) | ||||
|         self.assertRaises(InvalidQueryError, wrong_query_objs_filter) | ||||
|  | ||||
|         # filter should fail, too | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             self.Person.objects.filter('user1') | ||||
|  | ||||
|  | ||||
|     def test_q_regex(self): | ||||
|         """Ensure that Q objects can be queried using regexes. | ||||
| @@ -284,7 +282,6 @@ class QTest(unittest.TestCase): | ||||
|         person = self.Person(name='Guido van Rossum') | ||||
|         person.save() | ||||
|  | ||||
|         import re | ||||
|         obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() | ||||
|         self.assertEqual(obj, person) | ||||
|         obj = self.Person.objects(Q(name=re.compile('^gui'))).first() | ||||
|   | ||||
| @@ -1,14 +1,30 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import datetime | ||||
| from pymongo.errors import OperationFailure | ||||
|  | ||||
| try: | ||||
|     import unittest2 as unittest | ||||
| except ImportError: | ||||
|     import unittest | ||||
| from nose.plugins.skip import SkipTest | ||||
|  | ||||
| import pymongo | ||||
| from bson.tz_util import utc | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine import ( | ||||
|     connect, register_connection, | ||||
|     Document, DateTimeField | ||||
| ) | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
| import mongoengine.connection | ||||
| from mongoengine.connection import get_db, get_connection, ConnectionError | ||||
| from mongoengine.connection import (MongoEngineConnectionError, get_db, | ||||
|                                     get_connection) | ||||
|  | ||||
|  | ||||
| def get_tz_awareness(connection): | ||||
|     if not IS_PYMONGO_3: | ||||
|         return connection.tz_aware | ||||
|     else: | ||||
|         return connection.codec_options.tz_aware | ||||
|  | ||||
|  | ||||
| class ConnectionTest(unittest.TestCase): | ||||
| @@ -19,8 +35,7 @@ class ConnectionTest(unittest.TestCase): | ||||
|         mongoengine.connection._dbs = {} | ||||
|  | ||||
|     def test_connect(self): | ||||
|         """Ensure that the connect() method works properly. | ||||
|         """ | ||||
|         """Ensure that the connect() method works properly.""" | ||||
|         connect('mongoenginetest') | ||||
|  | ||||
|         conn = get_connection() | ||||
| @@ -34,9 +49,103 @@ class ConnectionTest(unittest.TestCase): | ||||
|         conn = get_connection('testdb') | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
|     def test_connect_uri(self): | ||||
|         """Ensure that the connect() method works properly with uri's | ||||
|     def test_connect_in_mocking(self): | ||||
|         """Ensure that the connect() method works properly in mocking. | ||||
|         """ | ||||
|         try: | ||||
|             import mongomock | ||||
|         except ImportError: | ||||
|             raise SkipTest('you need mongomock installed to run this testcase') | ||||
|  | ||||
|         connect('mongoenginetest', host='mongomock://localhost') | ||||
|         conn = get_connection() | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') | ||||
|         conn = get_connection('testdb2') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') | ||||
|         conn = get_connection('testdb3') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect('mongoenginetest4', is_mock=True, alias='testdb4') | ||||
|         conn = get_connection('testdb4') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') | ||||
|         conn = get_connection('testdb5') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') | ||||
|         conn = get_connection('testdb6') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') | ||||
|         conn = get_connection('testdb7') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|     def test_connect_with_host_list(self): | ||||
|         """Ensure that the connect() method works when host is a list | ||||
|  | ||||
|         Uses mongomock to test w/o needing multiple mongod/mongos processes | ||||
|         """ | ||||
|         try: | ||||
|             import mongomock | ||||
|         except ImportError: | ||||
|             raise SkipTest('you need mongomock installed to run this testcase') | ||||
|  | ||||
|         connect(host=['mongomock://localhost']) | ||||
|         conn = get_connection() | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host=['mongodb://localhost'], is_mock=True,  alias='testdb2') | ||||
|         conn = get_connection('testdb2') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host=['localhost'], is_mock=True,  alias='testdb3') | ||||
|         conn = get_connection('testdb3') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') | ||||
|         conn = get_connection('testdb4') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True,  alias='testdb5') | ||||
|         conn = get_connection('testdb5') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|         connect(host=['localhost:27017', 'localhost:27018'], is_mock=True,  alias='testdb6') | ||||
|         conn = get_connection('testdb6') | ||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) | ||||
|  | ||||
|     def test_disconnect(self): | ||||
|         """Ensure that the disconnect() method works properly | ||||
|         """ | ||||
|         conn1 = connect('mongoenginetest') | ||||
|         mongoengine.connection.disconnect() | ||||
|         conn2 = connect('mongoenginetest') | ||||
|         self.assertTrue(conn1 is not conn2) | ||||
|  | ||||
|     def test_sharing_connections(self): | ||||
|         """Ensure that connections are shared when the connection settings are exactly the same | ||||
|         """ | ||||
|         connect('mongoenginetests', alias='testdb1') | ||||
|         expected_connection = get_connection('testdb1') | ||||
|  | ||||
|         connect('mongoenginetests', alias='testdb2') | ||||
|         actual_connection = get_connection('testdb2') | ||||
|  | ||||
|         # Handle PyMongo 3+ Async Connection | ||||
|         if IS_PYMONGO_3: | ||||
|             # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. | ||||
|             # Purposely not catching exception to fail test if thrown. | ||||
|             expected_connection.server_info() | ||||
|  | ||||
|         self.assertEqual(expected_connection, actual_connection) | ||||
|  | ||||
|     def test_connect_uri(self): | ||||
|         """Ensure that the connect() method works properly with URIs.""" | ||||
|         c = connect(db='mongoenginetest', alias='admin') | ||||
|         c.admin.system.users.remove({}) | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
| @@ -45,7 +154,11 @@ class ConnectionTest(unittest.TestCase): | ||||
|         c.admin.authenticate("admin", "password") | ||||
|         c.mongoenginetest.add_user("username", "password") | ||||
|  | ||||
|         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||
|         if not IS_PYMONGO_3: | ||||
|             self.assertRaises( | ||||
|                 MongoEngineConnectionError, connect, 'testdb_uri_bad', | ||||
|                 host='mongodb://test:password@localhost' | ||||
|             ) | ||||
|  | ||||
|         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') | ||||
|  | ||||
| @@ -60,19 +173,9 @@ class ConnectionTest(unittest.TestCase): | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|  | ||||
|     def test_connect_uri_without_db(self): | ||||
|         """Ensure that the connect() method works properly with uri's | ||||
|         without database_name | ||||
|         """Ensure connect() method works properly if the URI doesn't | ||||
|         include a database name. | ||||
|         """ | ||||
|         c = connect(db='mongoenginetest', alias='admin') | ||||
|         c.admin.system.users.remove({}) | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|  | ||||
|         c.admin.add_user("admin", "password") | ||||
|         c.admin.authenticate("admin", "password") | ||||
|         c.mongoenginetest.add_user("username", "password") | ||||
|  | ||||
|         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||
|  | ||||
|         connect("mongoenginetest", host='mongodb://localhost/') | ||||
|  | ||||
|         conn = get_connection() | ||||
| @@ -82,15 +185,75 @@ class ConnectionTest(unittest.TestCase): | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'mongoenginetest') | ||||
|  | ||||
|     def test_connect_uri_default_db(self): | ||||
|         """Ensure connect() defaults to the right database name if | ||||
|         the URI and the database_name don't explicitly specify it. | ||||
|         """ | ||||
|         connect(host='mongodb://localhost/') | ||||
|  | ||||
|         conn = get_connection() | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
|         db = get_db() | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'test') | ||||
|  | ||||
|     def test_uri_without_credentials_doesnt_override_conn_settings(self): | ||||
|         """Ensure connect() uses the username & password params if the URI | ||||
|         doesn't explicitly specify them. | ||||
|         """ | ||||
|         c = connect(host='mongodb://localhost/mongoenginetest', | ||||
|                     username='user', | ||||
|                     password='pass') | ||||
|  | ||||
|         # OperationFailure means that mongoengine attempted authentication | ||||
|         # w/ the provided username/password and failed - that's the desired | ||||
|         # behavior. If the MongoDB URI would override the credentials | ||||
|         self.assertRaises(OperationFailure, get_db) | ||||
|  | ||||
|     def test_connect_uri_with_authsource(self): | ||||
|         """Ensure that the connect() method works well with `authSource` | ||||
|         option in the URI. | ||||
|         """ | ||||
|         # Create users | ||||
|         c = connect('mongoenginetest') | ||||
|         c.admin.system.users.remove({}) | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|         c.admin.add_user('username2', 'password') | ||||
|  | ||||
|         # Authentication fails without "authSource" | ||||
|         if IS_PYMONGO_3: | ||||
|             test_conn = connect( | ||||
|                 'mongoenginetest', alias='test1', | ||||
|                 host='mongodb://username2:password@localhost/mongoenginetest' | ||||
|             ) | ||||
|             self.assertRaises(OperationFailure, test_conn.server_info) | ||||
|         else: | ||||
|             self.assertRaises( | ||||
|                 MongoEngineConnectionError, | ||||
|                 connect, 'mongoenginetest', alias='test1', | ||||
|                 host='mongodb://username2:password@localhost/mongoenginetest' | ||||
|             ) | ||||
|             self.assertRaises(MongoEngineConnectionError, get_db, 'test1') | ||||
|  | ||||
|         # Authentication succeeds with "authSource" | ||||
|         authd_conn = connect( | ||||
|             'mongoenginetest', alias='test2', | ||||
|             host=('mongodb://username2:password@localhost/' | ||||
|                   'mongoenginetest?authSource=admin') | ||||
|         ) | ||||
|         db = get_db('test2') | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'mongoenginetest') | ||||
|  | ||||
|         # Clear all users | ||||
|         authd_conn.admin.system.users.remove({}) | ||||
|  | ||||
|     def test_register_connection(self): | ||||
|         """Ensure that connections with different aliases may be registered. | ||||
|         """ | ||||
|         register_connection('testdb', 'mongoenginetest2') | ||||
|  | ||||
|         self.assertRaises(ConnectionError, get_connection) | ||||
|         self.assertRaises(MongoEngineConnectionError, get_connection) | ||||
|         conn = get_connection('testdb') | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
| @@ -98,17 +261,95 @@ class ConnectionTest(unittest.TestCase): | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'mongoenginetest2') | ||||
|  | ||||
|     def test_connection_kwargs(self): | ||||
|         """Ensure that connection kwargs get passed to pymongo. | ||||
|     def test_register_connection_defaults(self): | ||||
|         """Ensure that defaults are used when the host and port are None. | ||||
|         """ | ||||
|         register_connection('testdb', 'mongoenginetest', host=None, port=None) | ||||
|  | ||||
|         conn = get_connection('testdb') | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
|     def test_connection_kwargs(self): | ||||
|         """Ensure that connection kwargs get passed to pymongo.""" | ||||
|         connect('mongoenginetest', alias='t1', tz_aware=True) | ||||
|         conn = get_connection('t1') | ||||
|  | ||||
|         self.assertTrue(conn.tz_aware) | ||||
|         self.assertTrue(get_tz_awareness(conn)) | ||||
|  | ||||
|         connect('mongoenginetest2', alias='t2') | ||||
|         conn = get_connection('t2') | ||||
|         self.assertFalse(conn.tz_aware) | ||||
|         self.assertFalse(get_tz_awareness(conn)) | ||||
|  | ||||
|     def test_connection_pool_via_kwarg(self): | ||||
|         """Ensure we can specify a max connection pool size using | ||||
|         a connection kwarg. | ||||
|         """ | ||||
|         # Use "max_pool_size" or "maxpoolsize" depending on PyMongo version | ||||
|         # (former was changed to the latter as described in | ||||
|         # https://jira.mongodb.org/browse/PYTHON-854). | ||||
|         # TODO remove once PyMongo < 3.0 support is dropped | ||||
|         if pymongo.version_tuple[0] >= 3: | ||||
|             pool_size_kwargs = {'maxpoolsize': 100} | ||||
|         else: | ||||
|             pool_size_kwargs = {'max_pool_size': 100} | ||||
|  | ||||
|         conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) | ||||
|         self.assertEqual(conn.max_pool_size, 100) | ||||
|  | ||||
|     def test_connection_pool_via_uri(self): | ||||
|         """Ensure we can specify a max connection pool size using | ||||
|         an option in a connection URI. | ||||
|         """ | ||||
|         if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9: | ||||
|             raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+') | ||||
|  | ||||
|         conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') | ||||
|         self.assertEqual(conn.max_pool_size, 100) | ||||
|  | ||||
|     def test_write_concern(self): | ||||
|         """Ensure write concern can be specified in connect() via | ||||
|         a kwarg or as part of the connection URI. | ||||
|         """ | ||||
|         conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') | ||||
|         conn2 = connect('testing', alias='conn2', w=1, j=True) | ||||
|         if IS_PYMONGO_3: | ||||
|             self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) | ||||
|             self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) | ||||
|         else: | ||||
|             self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True}) | ||||
|             self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True}) | ||||
|  | ||||
|     def test_connect_with_replicaset_via_uri(self): | ||||
|         """Ensure connect() works when specifying a replicaSet via the | ||||
|         MongoDB URI. | ||||
|         """ | ||||
|         if IS_PYMONGO_3: | ||||
|             c = connect(host='mongodb://localhost/test?replicaSet=local-rs') | ||||
|             db = get_db() | ||||
|             self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|             self.assertEqual(db.name, 'test') | ||||
|         else: | ||||
|             # PyMongo < v3.x raises an exception: | ||||
|             # "localhost:27017 is not a member of replica set local-rs" | ||||
|             with self.assertRaises(MongoEngineConnectionError): | ||||
|                 c = connect(host='mongodb://localhost/test?replicaSet=local-rs') | ||||
|  | ||||
|     def test_connect_with_replicaset_via_kwargs(self): | ||||
|         """Ensure connect() works when specifying a replicaSet via the | ||||
|         connection kwargs | ||||
|         """ | ||||
|         if IS_PYMONGO_3: | ||||
|             c = connect(replicaset='local-rs') | ||||
|             self.assertEqual(c._MongoClient__options.replica_set_name, | ||||
|                              'local-rs') | ||||
|             db = get_db() | ||||
|             self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|             self.assertEqual(db.name, 'test') | ||||
|         else: | ||||
|             # PyMongo < v3.x raises an exception: | ||||
|             # "localhost:27017 is not a member of replica set local-rs" | ||||
|             with self.assertRaises(MongoEngineConnectionError): | ||||
|                 c = connect(replicaset='local-rs') | ||||
|  | ||||
|     def test_datetime(self): | ||||
|         connect('mongoenginetest', tz_aware=True) | ||||
| @@ -123,6 +364,27 @@ class ConnectionTest(unittest.TestCase): | ||||
|         date_doc = DateDoc.objects.first() | ||||
|         self.assertEqual(d, date_doc.the_date) | ||||
|  | ||||
|     def test_multiple_connection_settings(self): | ||||
|         connect('mongoenginetest', alias='t1', host="localhost") | ||||
|  | ||||
|         connect('mongoenginetest2', alias='t2', host="127.0.0.1") | ||||
|  | ||||
|         mongo_connections = mongoengine.connection._connections | ||||
|         self.assertEqual(len(mongo_connections.items()), 2) | ||||
|         self.assertTrue('t1' in mongo_connections.keys()) | ||||
|         self.assertTrue('t2' in mongo_connections.keys()) | ||||
|         if not IS_PYMONGO_3: | ||||
|             self.assertEqual(mongo_connections['t1'].host, 'localhost') | ||||
|             self.assertEqual(mongo_connections['t2'].host, '127.0.0.1') | ||||
|         else: | ||||
|             # Handle PyMongo 3+ Async Connection | ||||
|             # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. | ||||
|             # Purposely not catching exception to fail test if thrown. | ||||
|             mongo_connections['t1'].server_info() | ||||
|             mongo_connections['t2'].server_info() | ||||
|             self.assertEqual(mongo_connections['t1'].address[0], 'localhost') | ||||
|             self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1') | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -79,7 +77,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         user = User.objects.first() | ||||
| @@ -117,7 +115,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         user = User.objects.first() | ||||
| @@ -195,7 +193,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(0, q) | ||||
|  | ||||
|             for i in xrange(1, 51): | ||||
|             for i in range(1, 51): | ||||
|                 db.test.find({}).count() | ||||
|  | ||||
|             self.assertEqual(50, q) | ||||
|   | ||||
							
								
								
									
										119
									
								
								tests/test_datastructures.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										119
									
								
								tests/test_datastructures.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,119 @@ | ||||
| import unittest | ||||
|  | ||||
| from mongoengine.base.datastructures import StrictDict, SemiStrictDict | ||||
|  | ||||
|  | ||||
| class TestStrictDict(unittest.TestCase): | ||||
|     def strict_dict_class(self, *args, **kwargs): | ||||
|         return StrictDict.create(*args, **kwargs) | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.dtype = self.strict_dict_class(("a", "b", "c")) | ||||
|  | ||||
|     def test_init(self): | ||||
|         d = self.dtype(a=1, b=1, c=1) | ||||
|         self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) | ||||
|  | ||||
|     def test_repr(self): | ||||
|         d = self.dtype(a=1, b=2, c=3) | ||||
|         self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') | ||||
|  | ||||
|         # make sure quotes are escaped properly | ||||
|         d = self.dtype(a='"', b="'", c="") | ||||
|         self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}') | ||||
|  | ||||
|     def test_init_fails_on_nonexisting_attrs(self): | ||||
|         with self.assertRaises(AttributeError): | ||||
|             self.dtype(a=1, b=2, d=3) | ||||
|  | ||||
|     def test_eq(self): | ||||
|         d = self.dtype(a=1, b=1, c=1) | ||||
|         dd = self.dtype(a=1, b=1, c=1) | ||||
|         e = self.dtype(a=1, b=1, c=3) | ||||
|         f = self.dtype(a=1, b=1) | ||||
|         g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1) | ||||
|         h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) | ||||
|         i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) | ||||
|  | ||||
|         self.assertEqual(d, dd) | ||||
|         self.assertNotEqual(d, e) | ||||
|         self.assertNotEqual(d, f) | ||||
|         self.assertNotEqual(d, g) | ||||
|         self.assertNotEqual(f, d) | ||||
|         self.assertEqual(d, h) | ||||
|         self.assertNotEqual(d, i) | ||||
|  | ||||
|     def test_setattr_getattr(self): | ||||
|         d = self.dtype() | ||||
|         d.a = 1 | ||||
|         self.assertEqual(d.a, 1) | ||||
|         self.assertRaises(AttributeError, getattr, d, 'b') | ||||
|  | ||||
|     def test_setattr_raises_on_nonexisting_attr(self): | ||||
|         d = self.dtype() | ||||
|         with self.assertRaises(AttributeError): | ||||
|             d.x = 1 | ||||
|  | ||||
|     def test_setattr_getattr_special(self): | ||||
|         d = self.strict_dict_class(["items"]) | ||||
|         d.items = 1 | ||||
|         self.assertEqual(d.items, 1) | ||||
|  | ||||
|     def test_get(self): | ||||
|         d = self.dtype(a=1) | ||||
|         self.assertEqual(d.get('a'), 1) | ||||
|         self.assertEqual(d.get('b', 'bla'), 'bla') | ||||
|  | ||||
|     def test_items(self): | ||||
|         d = self.dtype(a=1) | ||||
|         self.assertEqual(d.items(), [('a', 1)]) | ||||
|         d = self.dtype(a=1, b=2) | ||||
|         self.assertEqual(d.items(), [('a', 1), ('b', 2)]) | ||||
|  | ||||
|     def test_mappings_protocol(self): | ||||
|         d = self.dtype(a=1, b=2) | ||||
|         assert dict(d) == {'a': 1, 'b': 2} | ||||
|         assert dict(**d) == {'a': 1, 'b': 2} | ||||
|  | ||||
|  | ||||
| class TestSemiSrictDict(TestStrictDict): | ||||
|     def strict_dict_class(self, *args, **kwargs): | ||||
|         return SemiStrictDict.create(*args, **kwargs) | ||||
|  | ||||
|     def test_init_fails_on_nonexisting_attrs(self): | ||||
|         # disable irrelevant test | ||||
|         pass | ||||
|  | ||||
|     def test_setattr_raises_on_nonexisting_attr(self): | ||||
|         # disable irrelevant test | ||||
|         pass | ||||
|  | ||||
|     def test_setattr_getattr_nonexisting_attr_succeeds(self): | ||||
|         d = self.dtype() | ||||
|         d.x = 1 | ||||
|         self.assertEqual(d.x, 1) | ||||
|  | ||||
|     def test_init_succeeds_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2)) | ||||
|  | ||||
|     def test_iter_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual(list(d), ['a', 'b', 'c', 'x']) | ||||
|  | ||||
|     def test_iteritems_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)]) | ||||
|  | ||||
|     def tets_cmp_with_strict_dicts(self): | ||||
|         d = self.dtype(a=1, b=1, c=1) | ||||
|         dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1) | ||||
|         self.assertEqual(d, dd) | ||||
|  | ||||
|     def test_cmp_with_strict_dict_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual(d, dd) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,20 +1,27 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from bson import DBRef, ObjectId | ||||
| from collections import OrderedDict | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.context_managers import query_counter | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
| from mongoengine.base import TopLevelDocumentMetaclass | ||||
| if IS_PYMONGO_3: | ||||
|     from bson import CodecOptions | ||||
|  | ||||
|  | ||||
| class FieldTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|     @classmethod | ||||
|     def setUpClass(cls): | ||||
|         cls.db = connect(db='mongoenginetest') | ||||
|  | ||||
|     @classmethod | ||||
|     def tearDownClass(cls): | ||||
|         cls.db.drop_database('mongoenginetest') | ||||
|  | ||||
|     def test_list_item_dereference(self): | ||||
|         """Ensure that DBRef items in ListFields are dereferenced. | ||||
| @@ -28,7 +35,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -86,7 +93,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -158,7 +165,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 26): | ||||
|         for i in range(1, 26): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -291,9 +298,38 @@ class FieldTest(unittest.TestCase): | ||||
|                 self.assertEqual(employee.friends, friends) | ||||
|                 self.assertEqual(q, 2) | ||||
|  | ||||
|     def test_list_of_lists_of_references(self): | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Post(Document): | ||||
|             user_lists = ListField(ListField(ReferenceField(User))) | ||||
|  | ||||
|         class SimpleList(Document): | ||||
|             users = ListField(ReferenceField(User)) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         Post.drop_collection() | ||||
|         SimpleList.drop_collection() | ||||
|  | ||||
|         u1 = User.objects.create(name='u1') | ||||
|         u2 = User.objects.create(name='u2') | ||||
|         u3 = User.objects.create(name='u3') | ||||
|  | ||||
|         SimpleList.objects.create(users=[u1, u2, u3]) | ||||
|         self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3]) | ||||
|  | ||||
|         Post.objects.create(user_lists=[[u1, u2], [u3]]) | ||||
|         self.assertEqual(Post.objects.all()[0].user_lists, [[u1, u2], [u3]]) | ||||
|  | ||||
|     def test_circular_reference(self): | ||||
|         """Ensure you can handle circular references | ||||
|         """ | ||||
|         class Relation(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             person = ReferenceField('Person') | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             relations = ListField(EmbeddedDocumentField('Relation')) | ||||
| @@ -301,10 +337,6 @@ class FieldTest(unittest.TestCase): | ||||
|             def __repr__(self): | ||||
|                 return "<Person: %s>" % self.name | ||||
|  | ||||
|         class Relation(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             person = ReferenceField('Person') | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         mother = Person(name="Mother") | ||||
|         daughter = Person(name="Daughter") | ||||
| @@ -411,7 +443,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -502,7 +534,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -585,15 +617,15 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|             members.append(user) | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -658,7 +690,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -670,9 +702,9 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|             members += [a, b, c] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -754,16 +786,16 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
|             members += [a] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -837,7 +869,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -849,9 +881,9 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|             members += [a, b, c] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -923,6 +955,8 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         class Asset(Document): | ||||
|             name = StringField(max_length=250, required=True) | ||||
|             path = StringField() | ||||
|             title = StringField() | ||||
|             parent = GenericReferenceField(default=None) | ||||
|             parents = ListField(GenericReferenceField()) | ||||
|             children = ListField(GenericReferenceField()) | ||||
| @@ -1000,6 +1034,43 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertEqual(type(foo.bar), Bar) | ||||
|         self.assertEqual(type(foo.baz), Baz) | ||||
|  | ||||
|  | ||||
|     def test_document_reload_reference_integrity(self): | ||||
|         """ | ||||
|         Ensure reloading a document with multiple similar id | ||||
|         in different collections doesn't mix them. | ||||
|         """ | ||||
|         class Topic(Document): | ||||
|             id = IntField(primary_key=True) | ||||
|         class User(Document): | ||||
|             id = IntField(primary_key=True) | ||||
|             name = StringField() | ||||
|         class Message(Document): | ||||
|             id = IntField(primary_key=True) | ||||
|             topic = ReferenceField(Topic) | ||||
|             author = ReferenceField(User) | ||||
|  | ||||
|         Topic.drop_collection() | ||||
|         User.drop_collection() | ||||
|         Message.drop_collection() | ||||
|  | ||||
|         # All objects share the same id, but each in a different collection | ||||
|         topic = Topic(id=1).save() | ||||
|         user = User(id=1, name='user-name').save() | ||||
|         Message(id=1, topic=topic, author=user).save() | ||||
|  | ||||
|         concurrent_change_user = User.objects.get(id=1) | ||||
|         concurrent_change_user.name = 'new-name' | ||||
|         concurrent_change_user.save() | ||||
|         self.assertNotEqual(user.name, 'new-name') | ||||
|  | ||||
|         msg = Message.objects.get(id=1) | ||||
|         msg.reload() | ||||
|         self.assertEqual(msg.topic, topic) | ||||
|         self.assertEqual(msg.author, user) | ||||
|         self.assertEqual(msg.author.name, 'new-name') | ||||
|  | ||||
|  | ||||
|     def test_list_lookup_not_checked_in_map(self): | ||||
|         """Ensure we dereference list data correctly | ||||
|         """ | ||||
| @@ -1035,7 +1106,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         Group(name="Test", members=User.objects).save() | ||||
| @@ -1064,7 +1135,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         Group(name="Test", members=User.objects).save() | ||||
| @@ -1101,7 +1172,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i).save() | ||||
|             b = UserB(name='User B %s' % i).save() | ||||
|             c = UserC(name='User C %s' % i).save() | ||||
| @@ -1195,6 +1266,96 @@ class FieldTest(unittest.TestCase): | ||||
|         page = Page.objects.first() | ||||
|         self.assertEqual(page.tags[0], page.posts[0].tags[0]) | ||||
|  | ||||
|     def test_select_related_follows_embedded_referencefields(self): | ||||
|  | ||||
|         class Song(Document): | ||||
|             title = StringField() | ||||
|  | ||||
|         class PlaylistItem(EmbeddedDocument): | ||||
|             song = ReferenceField("Song") | ||||
|  | ||||
|         class Playlist(Document): | ||||
|             items = ListField(EmbeddedDocumentField("PlaylistItem")) | ||||
|  | ||||
|         Playlist.drop_collection() | ||||
|         Song.drop_collection() | ||||
|  | ||||
|         songs = [Song.objects.create(title="song %d" % i) for i in range(3)] | ||||
|         items = [PlaylistItem(song=song) for song in songs] | ||||
|         playlist = Playlist.objects.create(items=items) | ||||
|  | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
|  | ||||
|             playlist = Playlist.objects.first().select_related() | ||||
|             songs = [item.song for item in playlist.items] | ||||
|  | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|     def test_dynamic_field_dereference(self): | ||||
|         class Merchandise(Document): | ||||
|             name = StringField() | ||||
|             price = IntField() | ||||
|  | ||||
|         class Store(Document): | ||||
|             merchandises = DynamicField() | ||||
|  | ||||
|         Merchandise.drop_collection() | ||||
|         Store.drop_collection() | ||||
|  | ||||
|         merchandises = { | ||||
|             '#1': Merchandise(name='foo', price=100).save(), | ||||
|             '#2': Merchandise(name='bar', price=120).save(), | ||||
|             '#3': Merchandise(name='baz', price=110).save(), | ||||
|         } | ||||
|         Store(merchandises=merchandises).save() | ||||
|  | ||||
|         store = Store.objects().first() | ||||
|         for obj in store.merchandises.values(): | ||||
|             self.assertFalse(isinstance(obj, Merchandise)) | ||||
|  | ||||
|         store.select_related() | ||||
|         for obj in store.merchandises.values(): | ||||
|             self.assertTrue(isinstance(obj, Merchandise)) | ||||
|  | ||||
|     def test_dynamic_field_dereference_with_ordering_guarantee_on_pymongo3(self): | ||||
|         # This is because 'codec_options' is supported on pymongo3 or later | ||||
|         if IS_PYMONGO_3: | ||||
|             class OrderedDocument(Document): | ||||
|                 my_metaclass = TopLevelDocumentMetaclass | ||||
|                 __metaclass__ = TopLevelDocumentMetaclass | ||||
|  | ||||
|                 @classmethod | ||||
|                 def _get_collection(cls): | ||||
|                     collection = super(OrderedDocument, cls)._get_collection() | ||||
|                     opts = CodecOptions(document_class=OrderedDict) | ||||
|  | ||||
|                     return collection.with_options(codec_options=opts) | ||||
|  | ||||
|             class Merchandise(Document): | ||||
|                 name = StringField() | ||||
|                 price = IntField() | ||||
|  | ||||
|             class Store(OrderedDocument): | ||||
|                 merchandises = DynamicField(container_class=OrderedDict) | ||||
|  | ||||
|             Merchandise.drop_collection() | ||||
|             Store.drop_collection() | ||||
|  | ||||
|             merchandises = OrderedDict() | ||||
|             merchandises['#1'] = Merchandise(name='foo', price=100).save() | ||||
|             merchandises['#2'] = Merchandise(name='bar', price=120).save() | ||||
|             merchandises['#3'] = Merchandise(name='baz', price=110).save() | ||||
|  | ||||
|             Store(merchandises=merchandises).save() | ||||
|  | ||||
|             store = Store.objects().first() | ||||
|  | ||||
|             store.select_related() | ||||
|  | ||||
|             # confirms that the load data order is same with the one at storing | ||||
|             self.assertTrue(type(store.merchandises), OrderedDict) | ||||
|             self.assertEqual(','.join(store.merchandises.keys()), '#1,#2,#3') | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|  | ||||
|   | ||||
| @@ -1,296 +0,0 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine import * | ||||
|  | ||||
|  | ||||
| from mongoengine.django.shortcuts import get_document_or_404 | ||||
|  | ||||
| from django.http import Http404 | ||||
| from django.template import Context, Template | ||||
| from django.conf import settings | ||||
| from django.core.paginator import Paginator | ||||
|  | ||||
| settings.configure( | ||||
|     USE_TZ=True, | ||||
|     INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), | ||||
|     AUTH_USER_MODEL=('mongo_auth.MongoUser'), | ||||
| ) | ||||
|  | ||||
| try: | ||||
|     from django.contrib.auth import authenticate, get_user_model | ||||
|     from mongoengine.django.auth import User | ||||
|     from mongoengine.django.mongo_auth.models import ( | ||||
|         MongoUser, | ||||
|         MongoUserManager, | ||||
|         get_user_document, | ||||
|     ) | ||||
|     DJ15 = True | ||||
| except Exception: | ||||
|     DJ15 = False | ||||
| from django.contrib.sessions.tests import SessionTestsMixin | ||||
| from mongoengine.django.sessions import SessionStore, MongoSession | ||||
| from datetime import tzinfo, timedelta | ||||
| ZERO = timedelta(0) | ||||
|  | ||||
|  | ||||
| class FixedOffset(tzinfo): | ||||
|     """Fixed offset in minutes east from UTC.""" | ||||
|  | ||||
|     def __init__(self, offset, name): | ||||
|         self.__offset = timedelta(minutes=offset) | ||||
|         self.__name = name | ||||
|  | ||||
|     def utcoffset(self, dt): | ||||
|         return self.__offset | ||||
|  | ||||
|     def tzname(self, dt): | ||||
|         return self.__name | ||||
|  | ||||
|     def dst(self, dt): | ||||
|         return ZERO | ||||
|  | ||||
|  | ||||
| def activate_timezone(tz): | ||||
|     """Activate Django timezone support if it is available. | ||||
|     """ | ||||
|     try: | ||||
|         from django.utils import timezone | ||||
|         timezone.deactivate() | ||||
|         timezone.activate(tz) | ||||
|     except ImportError: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|         self.Person = Person | ||||
|  | ||||
|     def test_order_by_in_django_template(self): | ||||
|         """Ensure that QuerySets are properly ordered in Django template. | ||||
|         """ | ||||
|         self.Person.drop_collection() | ||||
|  | ||||
|         self.Person(name="A", age=20).save() | ||||
|         self.Person(name="D", age=10).save() | ||||
|         self.Person(name="B", age=40).save() | ||||
|         self.Person(name="C", age=30).save() | ||||
|  | ||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") | ||||
|  | ||||
|         d = {"ol": self.Person.objects.order_by('-name')} | ||||
|         self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') | ||||
|         d = {"ol": self.Person.objects.order_by('+name')} | ||||
|         self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') | ||||
|         d = {"ol": self.Person.objects.order_by('-age')} | ||||
|         self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') | ||||
|         d = {"ol": self.Person.objects.order_by('+age')} | ||||
|         self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|  | ||||
|     def test_q_object_filter_in_template(self): | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|  | ||||
|         self.Person(name="A", age=20).save() | ||||
|         self.Person(name="D", age=10).save() | ||||
|         self.Person(name="B", age=40).save() | ||||
|         self.Person(name="C", age=30).save() | ||||
|  | ||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") | ||||
|  | ||||
|         d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} | ||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') | ||||
|  | ||||
|         # Check double rendering doesn't throw an error | ||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') | ||||
|  | ||||
|     def test_get_document_or_404(self): | ||||
|         p = self.Person(name="G404") | ||||
|         p.save() | ||||
|  | ||||
|         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') | ||||
|         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) | ||||
|  | ||||
|     def test_pagination(self): | ||||
|         """Ensure that Pagination works as expected | ||||
|         """ | ||||
|         class Page(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Page.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 11): | ||||
|             Page(name=str(i)).save() | ||||
|  | ||||
|         paginator = Paginator(Page.objects.all(), 2) | ||||
|  | ||||
|         t = Template("{% for i in page.object_list  %}{{ i.name }}:{% endfor %}") | ||||
|         for p in paginator.page_range: | ||||
|             d = {"page": paginator.page(p)} | ||||
|             end = p * 2 | ||||
|             start = end - 1 | ||||
|             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) | ||||
|  | ||||
|     def test_nested_queryset_template_iterator(self): | ||||
|         # Try iterating the same queryset twice, nested, in a Django template. | ||||
|         names = ['A', 'B', 'C', 'D'] | ||||
|  | ||||
|         class CustomUser(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             def __unicode__(self): | ||||
|                 return self.name | ||||
|  | ||||
|         CustomUser.drop_collection() | ||||
|  | ||||
|         for name in names: | ||||
|             CustomUser(name=name).save() | ||||
|  | ||||
|         users = CustomUser.objects.all().order_by('name') | ||||
|         template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}") | ||||
|         rendered = template.render(Context({'users': users})) | ||||
|         self.assertEqual(rendered, 'AB ABCD CD') | ||||
|  | ||||
|     def test_filter(self): | ||||
|         """Ensure that a queryset and filters work as expected | ||||
|         """ | ||||
|  | ||||
|         class Note(Document): | ||||
|             text = StringField() | ||||
|  | ||||
|         Note.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 101): | ||||
|             Note(name="Note: %s" % i).save() | ||||
|  | ||||
|         # Check the count | ||||
|         self.assertEqual(Note.objects.count(), 100) | ||||
|  | ||||
|         # Get the first 10 and confirm | ||||
|         notes = Note.objects[:10] | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|  | ||||
|         # Test djangos template filters | ||||
|         # self.assertEqual(length(notes), 10) | ||||
|         t = Template("{{ notes.count }}") | ||||
|         c = Context({"notes": notes}) | ||||
|         self.assertEqual(t.render(c), "10") | ||||
|  | ||||
|         # Test with skip | ||||
|         notes = Note.objects.skip(90) | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|  | ||||
|         # Test djangos template filters | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|         t = Template("{{ notes.count }}") | ||||
|         c = Context({"notes": notes}) | ||||
|         self.assertEqual(t.render(c), "10") | ||||
|  | ||||
|         # Test with limit | ||||
|         notes = Note.objects.skip(90) | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|  | ||||
|         # Test djangos template filters | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|         t = Template("{{ notes.count }}") | ||||
|         c = Context({"notes": notes}) | ||||
|         self.assertEqual(t.render(c), "10") | ||||
|  | ||||
|         # Test with skip and limit | ||||
|         notes = Note.objects.skip(10).limit(10) | ||||
|  | ||||
|         # Test djangos template filters | ||||
|         self.assertEqual(notes.count(), 10) | ||||
|         t = Template("{{ notes.count }}") | ||||
|         c = Context({"notes": notes}) | ||||
|         self.assertEqual(t.render(c), "10") | ||||
|  | ||||
|  | ||||
| class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||
|     backend = SessionStore | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         MongoSession.drop_collection() | ||||
|         super(MongoDBSessionTest, self).setUp() | ||||
|  | ||||
|     def assertIn(self, first, second, msg=None): | ||||
|         self.assertTrue(first in second, msg) | ||||
|  | ||||
|     def assertNotIn(self, first, second, msg=None): | ||||
|         self.assertFalse(first in second, msg) | ||||
|  | ||||
|     def test_first_save(self): | ||||
|         session = SessionStore() | ||||
|         session['test'] = True | ||||
|         session.save() | ||||
|         self.assertTrue('test' in session) | ||||
|  | ||||
|     def test_session_expiration_tz(self): | ||||
|         activate_timezone(FixedOffset(60, 'UTC+1')) | ||||
|         # create and save new session | ||||
|         session = SessionStore() | ||||
|         session.set_expiry(600)  # expire in 600 seconds | ||||
|         session['test_expire'] = True | ||||
|         session.save() | ||||
|         # reload session with key | ||||
|         key = session.session_key | ||||
|         session = SessionStore(key) | ||||
|         self.assertTrue('test_expire' in session, 'Session has expired before it is expected') | ||||
|  | ||||
|  | ||||
| class MongoAuthTest(unittest.TestCase): | ||||
|     user_data = { | ||||
|         'username': 'user', | ||||
|         'email': 'user@example.com', | ||||
|         'password': 'test', | ||||
|     } | ||||
|  | ||||
|     def setUp(self): | ||||
|         if not DJ15: | ||||
|             raise SkipTest('mongo_auth requires Django 1.5') | ||||
|         connect(db='mongoenginetest') | ||||
|         User.drop_collection() | ||||
|         super(MongoAuthTest, self).setUp() | ||||
|  | ||||
|     def test_get_user_model(self): | ||||
|         self.assertEqual(get_user_model(), MongoUser) | ||||
|  | ||||
|     def test_get_user_document(self): | ||||
|         self.assertEqual(get_user_document(), User) | ||||
|  | ||||
|     def test_user_manager(self): | ||||
|         manager = get_user_model()._default_manager | ||||
|         self.assertTrue(isinstance(manager, MongoUserManager)) | ||||
|  | ||||
|     def test_user_manager_exception(self): | ||||
|         manager = get_user_model()._default_manager | ||||
|         self.assertRaises(MongoUser.DoesNotExist, manager.get, | ||||
|                           username='not found') | ||||
|  | ||||
|     def test_create_user(self): | ||||
|         manager = get_user_model()._default_manager | ||||
|         user = manager.create_user(**self.user_data) | ||||
|         self.assertTrue(isinstance(user, User)) | ||||
|         db_user = User.objects.get(username='user') | ||||
|         self.assertEqual(user.id, db_user.id) | ||||
|  | ||||
|     def test_authenticate(self): | ||||
|         get_user_model()._default_manager.create_user(**self.user_data) | ||||
|         user = authenticate(username='user', password='fail') | ||||
|         self.assertEqual(None, user) | ||||
|         user = authenticate(username='user', password='test') | ||||
|         db_user = User.objects.get(username='user') | ||||
|         self.assertEqual(user.id, db_user.id) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,47 +0,0 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| import jinja2 | ||||
|  | ||||
|  | ||||
| class TemplateFilterTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|     def test_jinja2(self): | ||||
|         env = jinja2.Environment() | ||||
|  | ||||
|         class TestData(Document): | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|         TestData.drop_collection() | ||||
|  | ||||
|         examples = [('A', '1'), | ||||
|                     ('B', '2'), | ||||
|                     ('C', '3')] | ||||
|  | ||||
|         for title, description in examples: | ||||
|             TestData(title=title, description=description).save() | ||||
|  | ||||
|         tmpl = """ | ||||
| {%- for record in content -%} | ||||
|     {%- if loop.first -%}{ {%- endif -%} | ||||
|     "{{ record.title }}": "{{ record.description }}" | ||||
|     {%- if loop.last -%} }{%- else -%},{% endif -%} | ||||
| {%- endfor -%} | ||||
| """ | ||||
|         ctx = {'content': TestData.objects} | ||||
|         template = env.from_string(tmpl) | ||||
|         rendered = template.render(**ctx) | ||||
|  | ||||
|         self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user