Compare commits
	
		
			1106 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 99637151b5 | ||
|  | a8e787c120 | ||
|  | 53339c7c72 | ||
|  | 3534bf7d70 | ||
|  | 1cf3989664 | ||
|  | fd296918da | ||
|  | 8ad1f03dc5 | ||
|  | fe7e17dbd5 | ||
|  | d582394a42 | ||
|  | 02ef0df019 | ||
|  | 0dfd6aa518 | ||
|  | 0b23bc9cf2 | ||
|  | f108c4288e | ||
|  | 9b9696aefd | ||
|  | 576e198ece | ||
|  | 52f85aab18 | ||
|  | ab60fd0490 | ||
|  | d79ae30f31 | ||
|  | f27debe7f9 | ||
|  | 735e043ff6 | ||
|  | 6e7f2b73cf | ||
|  | d645ce9745 | ||
|  | 7c08c140da | ||
|  | 81d402dc17 | ||
|  | 966fa12358 | ||
|  | 87792e1921 | ||
|  | 4c8296acc6 | ||
|  | 9989da07ed | ||
|  | 1c5e6a3425 | ||
|  | eedf908770 | ||
|  | 5c9ef41403 | ||
|  | 0bf2ad5b67 | ||
|  | a0e3f382cd | ||
|  | f09c39b5d7 | ||
|  | 89c67bf259 | ||
|  | ea666d4607 | ||
|  | b8af154439 | ||
|  | f594ece32a | ||
|  | 03beb6852a | ||
|  | ab9e9a3329 | ||
|  | a4b09344af | ||
|  | 8cb8aa392c | ||
|  | 3255519792 | ||
|  | 7e64bb2503 | ||
|  | 86a78402c3 | ||
|  | ba276452fb | ||
|  | 4ffa8d0124 | ||
|  | 4bc5082681 | ||
|  | 0e3c34e1da | ||
|  | 658b3784ae | ||
|  | 0526f577ff | ||
|  | bb1b9bc1d3 | ||
|  | b1eeb77ddc | ||
|  | 999d4a7676 | ||
|  | 1b80193aac | ||
|  | be8d39a48c | ||
|  | a2f3d70f28 | ||
|  | 676a7bf712 | ||
|  | e990a6c70c | ||
|  | 90fa0f6c4a | ||
|  | 22010d7d95 | ||
|  | 66279bd90f | ||
|  | 19da228855 | ||
|  | 9e67941bad | ||
|  | 0454fc74e9 | ||
|  | 2f6b1c7611 | ||
|  | f00bed6058 | ||
|  | 529c522594 | ||
|  | 2bb9493fcf | ||
|  | 839ed8a64a | ||
|  | 017a31ffd0 | ||
|  | 83b961c84d | ||
|  | fa07423ca5 | ||
|  | dd4af2df81 | ||
|  | 44bd8cb85b | ||
|  | 52d80ac23c | ||
|  | 43a5d73e14 | ||
|  | abc764951d | ||
|  | 9cc6164026 | ||
|  | 475488b9f2 | ||
|  | 95b1783834 | ||
|  | 12c8b5c0b9 | ||
|  | f99b7a811b | ||
|  | 0575abab23 | ||
|  | 9eebcf7beb | ||
|  | ed74477150 | ||
|  | 2801b38c75 | ||
|  | dc3fea875e | ||
|  | aab8c2b687 | ||
|  | 3577773af3 | ||
|  | dd023edc0f | ||
|  | 8ac9e6dc19 | ||
|  | f45d4d781d | ||
|  | c95652d6a8 | ||
|  | 97b37f75d3 | ||
|  | 95dae48778 | ||
|  | 73635033bd | ||
|  | c1619d2a62 | ||
|  | b87ef982f6 | ||
|  | 91aa90ad4a | ||
|  | 4b3cea9e78 | ||
|  | 2420b5e937 | ||
|  | f23a976bea | ||
|  | 4226cd08f1 | ||
|  | 7a230f1693 | ||
|  | a43d0d4612 | ||
|  | 78a40a0c70 | ||
|  | 2c69d8f0b0 | ||
|  | 0018c38b83 | ||
|  | 8df81571fc | ||
|  | d1add62a06 | ||
|  | c419f3379a | ||
|  | 69d57209f7 | ||
|  | 7ca81d6fb8 | ||
|  | 8a046bfa5d | ||
|  | 3628a7653c | ||
|  | 48f988acd7 | ||
|  | 6526923345 | ||
|  | 24fd1acce6 | ||
|  | cbb9235dc5 | ||
|  | 19ec2c9bc9 | ||
|  | 6459d4c0b6 | ||
|  | 1304f2721f | ||
|  | 8bde0c0e53 | ||
|  | 598ffd3e5c | ||
|  | 1a4533a9cf | ||
|  | 601f0eb168 | ||
|  | 3070e0bf5d | ||
|  | 83c11a9834 | ||
|  | 5c912b930e | ||
|  | 1b17fb0ae7 | ||
|  | d83e67c121 | ||
|  | ae39ed94c9 | ||
|  | 1e51180d42 | ||
|  | 87ba69d02e | ||
|  | 8879d5560b | ||
|  | c1621ee39c | ||
|  | b0aa98edb4 | ||
|  | a7a2fe0216 | ||
|  | 8e50f5fa3c | ||
|  | 31793520bf | ||
|  | 0b6b0368c5 | ||
|  | d1d30a9280 | ||
|  | 420c6f2d1e | ||
|  | 34f06c4971 | ||
|  | 9cc4bbd49d | ||
|  | f66b312869 | ||
|  | 2405ba8708 | ||
|  | a91b6bff8b | ||
|  | 450dc11a68 | ||
|  | 1ce2f84ce5 | ||
|  | f55b241cfa | ||
|  | 34d08ce8ef | ||
|  | 4f5aa8c43b | ||
|  | 27b375060d | ||
|  | cbfdc401f7 | ||
|  | b58bf3e0ce | ||
|  | 1fff7e9aca | ||
|  | 494b981b13 | ||
|  | dd93995bd0 | ||
|  | b3bb4add9c | ||
|  | d305e71c27 | ||
|  | 0d92baa670 | ||
|  | 7a1b110f62 | ||
|  | db8df057ce | ||
|  | 5d8ffded40 | ||
|  | 07f3e5356d | ||
|  | 1ece62f960 | ||
|  | 056c604dc3 | ||
|  | 2d08eec093 | ||
|  | 614b590551 | ||
|  | 6d90ce250a | ||
|  | ea31846a19 | ||
|  | e6317776c1 | ||
|  | efeaba39a4 | ||
|  | 1a97dfd479 | ||
|  | 9fecf2b303 | ||
|  | 3d0d2f48ad | ||
|  | 581605e0e2 | ||
|  | 45d3a7f6ff | ||
|  | 7ca2ea0766 | ||
|  | 89220c142b | ||
|  | c73ce3d220 | ||
|  | b0f127af4e | ||
|  | 766d54795f | ||
|  | bd41c6eea4 | ||
|  | 2435786713 | ||
|  | 9e7ea64bd2 | ||
|  | 89a6eee6af | ||
|  | 2ec1476e50 | ||
|  | 2d9b581f34 | ||
|  | 5bb63f645b | ||
|  | a856c7cc37 | ||
|  | 26db9d8a9d | ||
|  | 8060179f6d | ||
|  | 77ebd87fed | ||
|  | e4bc92235d | ||
|  | 27a4d83ce8 | ||
|  | ece9b902f8 | ||
|  | 65a2f8a68b | ||
|  | 9c212306b8 | ||
|  | 1fdc7ce6bb | ||
|  | 0b22c140c5 | ||
|  | 944aa45459 | ||
|  | c9842ba13a | ||
|  | 8840680303 | ||
|  | 376b9b1316 | ||
|  | 54bb1cb3d9 | ||
|  | 43468b474e | ||
|  | 28a957c684 | ||
|  | ec5ddbf391 | ||
|  | bab186e195 | ||
|  | bc7e874476 | ||
|  | 97114b5948 | ||
|  | 45e015d71d | ||
|  | 0ff6531953 | ||
|  | ba298c3cfc | ||
|  | 0479bea40b | ||
|  | a536097804 | ||
|  | bbefd0fdf9 | ||
|  | 2aa8b04c21 | ||
|  | aeebdfec51 | ||
|  | debfcdf498 | ||
|  | 5c4b33e8e6 | ||
|  | eb54037b66 | ||
|  | f48af8db3b | ||
|  | 97c5b957dd | ||
|  | 95e7397803 | ||
|  | 43a989978a | ||
|  | 27734a7c26 | ||
|  | dd786d6fc4 | ||
|  | be1c28fc45 | ||
|  | 20e41b3523 | ||
|  | e07ecc5cf8 | ||
|  | 3360b72531 | ||
|  | 233b13d670 | ||
|  | 5bcbb4fdaa | ||
|  | dbe2f5f2b8 | ||
|  | ca8b58d66d | ||
|  | f80f0b416f | ||
|  | d7765511ee | ||
|  | 0240a09056 | ||
|  | ab15c4eec9 | ||
|  | 4ce1ba81a6 | ||
|  | 530440b333 | ||
|  | b80fda36af | ||
|  | 42d24263ef | ||
|  | 1e2797e7ce | ||
|  | f7075766fc | ||
|  | 5647ca70bb | ||
|  | 2b8aa6bafc | ||
|  | 410443471c | ||
|  | 0bb9781b91 | ||
|  | 2769d6d7ca | ||
|  | 120b9433c2 | ||
|  | 605092bd88 | ||
|  | a4a8c94374 | ||
|  | 0e93f6c0db | ||
|  | aa2add39ad | ||
|  | a928047147 | ||
|  | c474ca0f13 | ||
|  | 88dc64653e | ||
|  | 5f4b70f3a9 | ||
|  | 51b429e5b0 | ||
|  | 360624eb6e | ||
|  | d9d2291837 | ||
|  | cbdf816232 | ||
|  | 2d71eb8a18 | ||
|  | 64d2532ce9 | ||
|  | 0376910f33 | ||
|  | 6d503119a1 | ||
|  | bfae93e57e | ||
|  | 49a66ba81a | ||
|  | a1d43fecd9 | ||
|  | d0e42a4798 | ||
|  | 2a34358abc | ||
|  | fd2bb8ea45 | ||
|  | 98e5daa0e0 | ||
|  | ad2e119282 | ||
|  | c20c30d8d1 | ||
|  | 66d215c9c1 | ||
|  | 46e088d379 | ||
|  | bbdd15161a | ||
|  | ea9dc8cfb8 | ||
|  | 6bd2ccc9bf | ||
|  | 56327c6b58 | ||
|  | 712e8a51e4 | ||
|  | 421f324f9e | ||
|  | 8fe4a70299 | ||
|  | 3af6d0dbfd | ||
|  | e2bef076d3 | ||
|  | 1bf9f28f4b | ||
|  | f1e7b97a93 | ||
|  | 8cfe13ad90 | ||
|  | 0f420abc8e | ||
|  | 3b5b715567 | ||
|  | 520051af25 | ||
|  | 7e376b40bb | ||
|  | fd18a48608 | ||
|  | 64860c6287 | ||
|  | 58635b24ba | ||
|  | 3ec9dfc108 | ||
|  | bd1572f11a | ||
|  | 540a0cc59c | ||
|  | 83eb4f6b16 | ||
|  | 95c58bd793 | ||
|  | 65591c7727 | ||
|  | 737cbf5f60 | ||
|  | 4c67cbb4b7 | ||
|  | ed2cc2a60b | ||
|  | 859e9b3cc4 | ||
|  | c34e79fad9 | ||
|  | 82446d641e | ||
|  | 9451c9f331 | ||
|  | 61411bb259 | ||
|  | fcdb0eff8f | ||
|  | 30d9347272 | ||
|  | 7564bbdee8 | ||
|  | 69251e5000 | ||
|  | 6ecdc7b59d | ||
|  | b7d0d8f0cc | ||
|  | df52ed1162 | ||
|  | aa6370dd5d | ||
|  | c272b7901f | ||
|  | c61de6540a | ||
|  | 3c7bf50089 | ||
|  | 32fc4152a7 | ||
|  | bdf7187d5c | ||
|  | 1639576203 | ||
|  | ae20c785ea | ||
|  | a2eb876f8c | ||
|  | 5a1eaa0a98 | ||
|  | 398fd4a548 | ||
|  | 44b9fb66e1 | ||
|  | 2afa2171f9 | ||
|  | 1d7ea71c0d | ||
|  | 2a391f0f16 | ||
|  | e9b8093dac | ||
|  | 6a229cfbc5 | ||
|  | 3300f409ba | ||
|  | 4466005363 | ||
|  | 296ef5bddf | ||
|  | 1f2a432e82 | ||
|  | 855933ab2a | ||
|  | ece8d25187 | ||
|  | 589a720162 | ||
|  | a59b518cf2 | ||
|  | a15352a4f8 | ||
|  | df65f3fc3f | ||
|  | 734986c1b5 | ||
|  | 4a9ed5f2f2 | ||
|  | 088f229865 | ||
|  | cb2cb851e2 | ||
|  | d3962c4f7d | ||
|  | 0301135f96 | ||
|  | f59aa922ea | ||
|  | f60a49d6f6 | ||
|  | 9a190eb00d | ||
|  | 6bad4bd415 | ||
|  | 50d9b0b796 | ||
|  | 12f884e3ac | ||
|  | 02b1aa7355 | ||
|  | 90bfa608dd | ||
|  | 13f38b1c1d | ||
|  | 1afe7240f4 | ||
|  | 7a41155178 | ||
|  | 39a20ea471 | ||
|  | d8855a4a0f | ||
|  | de8da78042 | ||
|  | 318b42dff2 | ||
|  | 0018674b62 | ||
|  | 82913e8d69 | ||
|  | 0d867a108d | ||
|  | 5ee4b4a5ac | ||
|  | 62219d9648 | ||
|  | 6d9bfff19c | ||
|  | 7614b92197 | ||
|  | 7c1afd0031 | ||
|  | ca7b2371fb | ||
|  | ed5fba6b0f | ||
|  | 2b3b3bf652 | ||
|  | 11daf706df | ||
|  | 4a269eb2c4 | ||
|  | 9b3899476c | ||
|  | febb3d7e3d | ||
|  | 83e3c5c7d8 | ||
|  | 3c271845c9 | ||
|  | 56c4292164 | ||
|  | 2531ade3bb | ||
|  | 3e2f035400 | ||
|  | e7bcb5e366 | ||
|  | 112e921ce2 | ||
|  | 216f15602b | ||
|  | fbe1901e65 | ||
|  | 8d2bc444bb | ||
|  | cf4a45da11 | ||
|  | be78209f94 | ||
|  | 45b5bf73fe | ||
|  | 84f9e44b6c | ||
|  | 700bc1b4bb | ||
|  | beef2ede25 | ||
|  | 9bfc838029 | ||
|  | e9d7353294 | ||
|  | a6948771d8 | ||
|  | 403977cd49 | ||
|  | 153538cef9 | ||
|  | 9f1196e982 | ||
|  | 6419a8d09a | ||
|  | 769cee3d64 | ||
|  | fc460b775e | ||
|  | ba59e498de | ||
|  | 939bd2bb1f | ||
|  | e231f71b4a | ||
|  | d06c5f036b | ||
|  | 071562d755 | ||
|  | 391f659af1 | ||
|  | 8a44232bfc | ||
|  | 9188f9bf62 | ||
|  | 0187a0e113 | ||
|  | beacfae400 | ||
|  | fdc385ea33 | ||
|  | 8b97808931 | ||
|  | 179c4a10c8 | ||
|  | 6cef571bfb | ||
|  | fbe8b28b2e | ||
|  | a8d91a56bf | ||
|  | 8d7291506e | ||
|  | d9005ac2fc | ||
|  | c775c0a80c | ||
|  | 700e2cd93d | ||
|  | 083f00be84 | ||
|  | d00859ecfd | ||
|  | 4e73566c11 | ||
|  | 208a467b24 | ||
|  | e1bb453f32 | ||
|  | 4607b08be5 | ||
|  | aa5c776f3d | ||
|  | 0075c0a1e8 | ||
|  | 83fff80b0f | ||
|  | 5e553ffaf7 | ||
|  | 6d185b7f7a | ||
|  | e80144e9f2 | ||
|  | fa4b820931 | ||
|  | 63c5a4dd65 | ||
|  | 34646a414c | ||
|  | 5aeee9deb2 | ||
|  | 4c1509a62a | ||
|  | bfdaae944d | ||
|  | 4e44198bbd | ||
|  | a4e8177b76 | ||
|  | 81bf5cb78b | ||
|  | a9fc476fb8 | ||
|  | 26f0c06624 | ||
|  | 59bd72a888 | ||
|  | 7d808b483e | ||
|  | 3ee60affa9 | ||
|  | 558b8123b5 | ||
|  | ecdf2ae5c7 | ||
|  | aa9ed614ad | ||
|  | 1acdb880fc | ||
|  | 7cd22aaf83 | ||
|  | 5eb63cfa30 | ||
|  | 5dc998ed52 | ||
|  | 8074094568 | ||
|  | 56d1139d71 | ||
|  | 165cdc8840 | ||
|  | c42aef74de | ||
|  | 634e1f661f | ||
|  | a1db437c42 | ||
|  | b8e2bdc99f | ||
|  | 52d4ea7d78 | ||
|  | 7db5335420 | ||
|  | 62480fe940 | ||
|  | 3d7b30da77 | ||
|  | 8e87648d53 | ||
|  | f842c90007 | ||
|  | 7f2b686ab5 | ||
|  | b09c52fc7e | ||
|  | 202d6e414f | ||
|  | 3d817f145c | ||
|  | 181e191fee | ||
|  | 79ecf027dd | ||
|  | 76d771d20f | ||
|  | 4d5f602ee7 | ||
|  | 452bbcc19b | ||
|  | 24b8650026 | ||
|  | 269e6e29d6 | ||
|  | c4b0002ddb | ||
|  | 53598781b8 | ||
|  | 0624cdd6e4 | ||
|  | 5fb9d61d28 | ||
|  | 7b1860d17b | ||
|  | 8797565606 | ||
|  | 3d97c41fe9 | ||
|  | 5edfeb2e29 | ||
|  | 268908b3b2 | ||
|  | fb70b47acb | ||
|  | 591149b1f0 | ||
|  | 9a0a0b1bd4 | ||
|  | 219d316b49 | ||
|  | 3aa2233b5d | ||
|  | d59862ae6e | ||
|  | 0a03f9a31a | ||
|  | dca135190a | ||
|  | aedcf3dc81 | ||
|  | 6961a9494f | ||
|  | 6d70ef1a08 | ||
|  | e1fc15875d | ||
|  | 94ae1388b1 | ||
|  | 17728d4e74 | ||
|  | 417aa743ca | ||
|  | 2f26f7a827 | ||
|  | 09f9c59b3d | ||
|  | bec6805296 | ||
|  | d99c7c20cc | ||
|  | 60b6ad3fcf | ||
|  | 9b4d0f6450 | ||
|  | 1a2c74391c | ||
|  | 08288e591c | ||
|  | 823cf421fa | ||
|  | 3799f27734 | ||
|  | a7edd8602c | ||
|  | c081aca794 | ||
|  | 2ca6648227 | ||
|  | 1af54f93f5 | ||
|  | a9cacd2e06 | ||
|  | f7fbb3d2f6 | ||
|  | adb7bbeea0 | ||
|  | 89c44cd14e | ||
|  | 8105bfd8b3 | ||
|  | de5b678da3 | ||
|  | 66c53f949b | ||
|  | fdc34869ca | ||
|  | 88b1a29719 | ||
|  | b91db87ae0 | ||
|  | 050542c29b | ||
|  | 60f0491f62 | ||
|  | b8a5791de6 | ||
|  | 2bc3948726 | ||
|  | ee7d370751 | ||
|  | a6449a7b2c | ||
|  | bc9a09f52e | ||
|  | 1631788ab6 | ||
|  | dd49d1d4bb | ||
|  | d6c54c7c2a | ||
|  | bda716ef9d | ||
|  | d83d226396 | ||
|  | 91a0e499d9 | ||
|  | f549d8c0bc | ||
|  | 10c30f2224 | ||
|  | 11621c6f5a | ||
|  | 97ac7e5476 | ||
|  | b037fb3e21 | ||
|  | 10bc93dfa6 | ||
|  | 88cb8f3963 | ||
|  | bd005575c4 | ||
|  | ca3b004921 | ||
|  | 8071b23bff | ||
|  | 4bfed7e719 | ||
|  | b76590dc01 | ||
|  | 4e462ffdb5 | ||
|  | 3c8cbcfee7 | ||
|  | 2a8543b3b7 | ||
|  | fd2e40d735 | ||
|  | 5f05843403 | ||
|  | 8bdb42827c | ||
|  | f6961ae9c1 | ||
|  | 3f301f6b0f | ||
|  | 89ad7ef1ab | ||
|  | 81b69648ef | ||
|  | 672a5f190b | ||
|  | 447dd62c03 | ||
|  | c4db3b6cf2 | ||
|  | 2b1eb620fc | ||
|  | 4abfcb0188 | ||
|  | 048826f6f0 | ||
|  | 5446476d99 | ||
|  | 331f8b8ae7 | ||
|  | 63ee4fef1a | ||
|  | 376ca717fa | ||
|  | 7913ed1841 | ||
|  | 3f3f93b0fa | ||
|  | 6471c6e133 | ||
|  | e3cbeb9df0 | ||
|  | 130fb9916d | ||
|  | ac72722e57 | ||
|  | 382b9a61a8 | ||
|  | 13afead9fb | ||
|  | 72aa191e70 | ||
|  | 0d1804461d | ||
|  | 273412fda1 | ||
|  | 49764b51dc | ||
|  | 5834fa840c | ||
|  | 5eb895b952 | ||
|  | d5fb3a9167 | ||
|  | cb324595ef | ||
|  | fa39789bac | ||
|  | bbd3a6961e | ||
|  | 6eb0387a78 | ||
|  | b3ef67a544 | ||
|  | 72995a4b3e | ||
|  | 7395ce5b22 | ||
|  | a4c197a83c | ||
|  | 7a3412dc13 | ||
|  | e079924632 | ||
|  | 7f0d3638ba | ||
|  | cace665858 | ||
|  | 2a8d001213 | ||
|  | a2b0266e01 | ||
|  | 1452d3fac5 | ||
|  | 031c507fde | ||
|  | 0fb629e24c | ||
|  | 0847687fd1 | ||
|  | 859de712b4 | ||
|  | 803164a993 | ||
|  | 147e33c3ca | ||
|  | dc5a613bc7 | ||
|  | 16390c1dec | ||
|  | 4e6f91ae77 | ||
|  | 556e620c7a | ||
|  | 8e1d701c27 | ||
|  | d51d95a28e | ||
|  | 3d15a3b3e2 | ||
|  | 84e611b91e | ||
|  | 4036e9fe34 | ||
|  | b039a2293f | ||
|  | 87f486c4f1 | ||
|  | 14be7ba2e2 | ||
|  | 09c32a63ce | ||
|  | 08ba51f714 | ||
|  | e3cd398f70 | ||
|  | f41c5217c6 | ||
|  | 1b0323bc22 | ||
|  | e04e5f42ef | ||
|  | c24bc77c17 | ||
|  | 99f923e27f | ||
|  | f3d265bbe0 | ||
|  | 5e7efcc8c2 | ||
|  | 62c8823e64 | ||
|  | 5cc9188c5b | ||
|  | 5e8604967c | ||
|  | cae3f3eeff | ||
|  | 22a7ee5885 | ||
|  | 658b85d327 | ||
|  | 967e72723b | ||
|  | 5411cc5573 | ||
|  | ffb3e8b7b9 | ||
|  | 94cad89e32 | ||
|  | 0338ac17b1 | ||
|  | cb1dfdfac6 | ||
|  | 576db9ca88 | ||
|  | 4c2b83d9ca | ||
|  | 7cb24446ec | ||
|  | 0ed79a839d | ||
|  | e518c51de3 | ||
|  | ea35fb1c54 | ||
|  | 7b29378319 | ||
|  | 82fbe7128f | ||
|  | c1fadcac85 | ||
|  | fd7f882011 | ||
|  | fb09fde209 | ||
|  | b2848b8519 | ||
|  | 417bb1b35d | ||
|  | 199b4eb860 | ||
|  | a66417e9d0 | ||
|  | b9255f73c3 | ||
|  | 4b9bacf731 | ||
|  | 602d7dad00 | ||
|  | d32dd9ff62 | ||
|  | 28b7ef2304 | ||
|  | 6dc2672dba | ||
|  | 9a949984ee | ||
|  | aa32d43014 | ||
|  | 4174918476 | ||
|  | 6081fc6faf | ||
|  | 7c62fdc0b8 | ||
|  | 3c88faa889 | ||
|  | d15f5ccbf4 | ||
|  | cfcd77b193 | ||
|  | 525c25b9f6 | ||
|  | c059ad47f2 | ||
|  | 48fd6c1344 | ||
|  | 1ee50922d9 | ||
|  | d63bf0abde | ||
|  | 711db45c02 | ||
|  | 55e20bda12 | ||
|  | 56f00a64d7 | ||
|  | 8553022b0e | ||
|  | 74b5043ef9 | ||
|  | 0e45078116 | ||
|  | 7e87ed79ab | ||
|  | 7312db5c25 | ||
|  | ec7effa0ef | ||
|  | 9a2cf206b2 | ||
|  | 40df08c74c | ||
|  | 5d778648e6 | ||
|  | 1fa47206aa | ||
|  | 6f5bd7b0b9 | ||
|  | c903af032f | ||
|  | 9dd3504765 | ||
|  | 97a1310344 | ||
|  | bf6f03a412 | ||
|  | 5ab13518db | ||
|  | eb892241ee | ||
|  | fac3f038a8 | ||
|  | b1cdd1eb26 | ||
|  | 60c8254f58 | ||
|  | 2ce70448b0 | ||
|  | 3861103585 | ||
|  | 0708d1bedc | ||
|  | c3a8840435 | ||
|  | 3246cf8bdd | ||
|  | 7ecf84395a | ||
|  | 32bab13a8a | ||
|  | 088c40f9f2 | ||
|  | 305fd4b232 | ||
|  | fe5111743d | ||
|  | 8427877bd2 | ||
|  | 118c0deb7a | ||
|  | 1126c85903 | ||
|  | 13935fc335 | ||
|  | 36034ee15f | ||
|  | 1b72ea9cc1 | ||
|  | 04953351f1 | ||
|  | 07e71d9ce9 | ||
|  | 5f53cda3ab | ||
|  | 9260ff9e83 | ||
|  | 08d1689268 | ||
|  | 40b69baa29 | ||
|  | b3251818cc | ||
|  | da8a057ede | ||
|  | efba9ef52a | ||
|  | fb61c9a765 | ||
|  | 95c2643f63 | ||
|  | fc2aff342b | ||
|  | 371dbf009f | ||
|  | 5d5a84dbcf | ||
|  | 7526272f84 | ||
|  | 5cbc76ea81 | ||
|  | 7ba40062d3 | ||
|  | 1781c4638b | ||
|  | 1a049ee49d | ||
|  | 31521ccff5 | ||
|  | e3b4563c2b | ||
|  | c3f5ed0e0e | ||
|  | 378b52321b | ||
|  | 98436f271e | ||
|  | a76008e440 | ||
|  | 6cf0cf9e7d | ||
|  | 608f08c267 | ||
|  | bd3340c73f | ||
|  | c379ff883a | ||
|  | 3b7a8ce449 | ||
|  | e9ad04f763 | ||
|  | f0277736e2 | ||
|  | 49c978ad9e | ||
|  | eeae1b4aea | ||
|  | 9432d1a194 | ||
|  | 3b2dbf1897 | ||
|  | 9c1ad5f631 | ||
|  | c2fef4e791 | ||
|  | 76cbb66843 | ||
|  | 96dbeea171 | ||
|  | 829df581f0 | ||
|  | bd84d08b95 | ||
|  | 2c7469c62a | ||
|  | 6f7d7537f2 | ||
|  | d7c9694be0 | ||
|  | 69171282e9 | ||
|  | 53d66b7267 | ||
|  | ba9813e5a3 | ||
|  | ce8b3ea0a1 | ||
|  | 559fc46037 | ||
|  | 10c0b035ae | ||
|  | 37818d2d72 | ||
|  | 357dd0e7cc | ||
|  | 34b923b7ac | ||
|  | 846f5a868f | ||
|  | 0acb2d904d | ||
|  | 03a757bc6e | ||
|  | 0f68df3b4a | ||
|  | 07ef58c1a7 | ||
|  | 52f5deb456 | ||
|  | e05e6b89f3 | ||
|  | ffc8b21f67 | ||
|  | 16e1f72e65 | ||
|  | 620f4a222e | ||
|  | f30fd71c5e | ||
|  | 3b55deb472 | ||
|  | 4d5164c580 | ||
|  | 5b118f64ec | ||
|  | 07dae64d66 | ||
|  | 501f033712 | ||
|  | a68cb20266 | ||
|  | 3c98a4bff5 | ||
|  | 20eb920cb4 | ||
|  | b06d794854 | ||
|  | f3da5bc092 | ||
|  | d21434dfd6 | ||
|  | ad1aa5bd3e | ||
|  | dd21ce9eac | ||
|  | bba3aeb4fa | ||
|  | 86233bcdf5 | ||
|  | 4f3eacd72c | ||
|  | 67fcdca6d4 | ||
|  | 62cc8d2ab3 | ||
|  | 3a0523dd79 | ||
|  | cec8b67b08 | ||
|  | ca8c3981c4 | ||
|  | ca56785cbc | ||
|  | b12c34334c | ||
|  | 9c8411b251 | ||
|  | 66baa4eb61 | ||
|  | 89646439e7 | ||
|  | bda4776a18 | ||
|  | c6058fafed | ||
|  | 11950eabea | ||
|  | e1282028a5 | ||
|  | 6b880aa8b3 | ||
|  | a3830be4c9 | ||
|  | ef15733efe | ||
|  | f0c5dd1bce | ||
|  | e868f37c60 | ||
|  | 18baa2dd7a | ||
|  | 2560145551 | ||
|  | 3b88a4f728 | ||
|  | 69989365c7 | ||
|  | 2b9c526b47 | ||
|  | d7c42861fb | ||
|  | e9d478ed9f | ||
|  | d6cb5b9abe | ||
|  | 5580b003b5 | ||
|  | 67736c849d | ||
|  | 39e27735cc | ||
|  | 0902b95764 | ||
|  | dc7181a3fd | ||
|  | e93c4c87d8 | ||
|  | dcec61e9b2 | ||
|  | 007f116bfa | ||
|  | 6817f3b7ba | ||
|  | 36993029ad | ||
|  | 012352cf24 | ||
|  | 26723992e3 | ||
|  | 3591593ac7 | ||
|  | d3c2dfbaee | ||
|  | b2b4456f74 | ||
|  | f666141981 | ||
|  | fb4c4e3e08 | ||
|  | 34fa5cd241 | ||
|  | 833fa3d94d | ||
|  | 92471445ec | ||
|  | 3acfd90720 | ||
|  | 4742328b90 | ||
|  | b4c54b1b62 | ||
|  | 76cb851c40 | ||
|  | 3fcc0e9789 | ||
|  | 8e65154201 | ||
|  | c0f7c4ca2d | ||
|  | db2f64c290 | ||
|  | a3c46fec07 | ||
|  | 62388cb740 | ||
|  | 9c9903664a | ||
|  | 556eed0151 | ||
|  | 4012722a8d | ||
|  | 4c68bc6c96 | ||
|  | 159923fae2 | ||
|  | 72c7a010ff | ||
|  | 2c8f004103 | ||
|  | 67a9b358a0 | ||
|  | b5eb3ea1cd | ||
|  | 98bc0a7c10 | ||
|  | bb24879149 | ||
|  | 3d6ee0ce00 | ||
|  | ee72845701 | ||
|  | d158727154 | ||
|  | 73092dcb33 | ||
|  | 91ddd310ba | ||
|  | 20dd7562e0 | ||
|  | b7e84031e3 | ||
|  | f11ee1f9cf | ||
|  | 449f5a00dc | ||
|  | bd1bf9ba24 | ||
|  | 2af5f3c56e | ||
|  | 1849f75ad0 | ||
|  | 32e66b29f4 | ||
|  | 69012e8ad1 | ||
|  | 17642c8a8c | ||
|  | f1aec68f23 | ||
|  | 3e30d71263 | ||
|  | 266f33adc4 | ||
|  | dcc8d22cec | ||
|  | 9540555b26 | ||
|  | 185e7a6a7e | ||
|  | c39f315ddc | ||
|  | 5b230b90b9 | ||
|  | 1ed9a36d0a | ||
|  | e0911a5fe0 | ||
|  | 3297578e8d | ||
|  | 954d5c16d8 | ||
|  | 95efa39b52 | ||
|  | c27ccc91d2 | ||
|  | 4fb6fcabef | ||
|  | 2635e41f69 | ||
|  | ba01817ee3 | ||
|  | 1e1d7073c8 | ||
|  | 40eb23a97a | ||
|  | f4711699e4 | ||
|  | 3b62cf80cd | ||
|  | d99c5973c3 | ||
|  | 7de9adc6b1 | ||
|  | 17addbefe2 | ||
|  | d274576b47 | ||
|  | 6373e20696 | ||
|  | 809fe44b43 | ||
|  | 198ccc028a | ||
|  | b96e27a7e4 | ||
|  | 1147ac4350 | ||
|  | 21d267cb11 | ||
|  | 6791f205af | ||
|  | 7ab2e21c10 | ||
|  | 2f991ac6f1 | ||
|  | 9411b38508 | ||
|  | 386c48b116 | ||
|  | 9d82911f63 | ||
|  | 51065e7a4d | ||
|  | 327452622e | ||
|  | 13316e5380 | ||
|  | 9f98025b8c | ||
|  | 564f950037 | ||
|  | be651caa68 | ||
|  | aa00feb6a5 | ||
|  | 03c0fd9ada | ||
|  | 6093e88eeb | ||
|  | ec519f20fa | ||
|  | d3495896fa | ||
|  | 323c86308a | ||
|  | f9057e1a28 | ||
|  | 9596a25bb9 | ||
|  | 47bfeec115 | ||
|  | 6bfd6c322b | ||
|  | 0512dd4c25 | ||
|  | acbc741037 | ||
|  | c2163ecee5 | ||
|  | 71689fcf23 | ||
|  | 1c334141ee | ||
|  | b89d71bfa5 | ||
|  | 3179c4e4ac | ||
|  | f5e39c0064 | ||
|  | 86e2797c57 | ||
|  | 39b749432a | ||
|  | 0ad343484f | ||
|  | 196606438c | ||
|  | 6896818bfd | ||
|  | eb4f0ad7fb | ||
|  | 467e61bcc1 | ||
|  | a2c78c9063 | ||
|  | b23353e376 | ||
|  | b8e9790de3 | ||
|  | e37e8d9e65 | ||
|  | f657432be3 | ||
|  | 80c2895e56 | ||
|  | 88da998532 | ||
|  | 225972e151 | ||
|  | 4972bdb383 | ||
|  | 11c7a15067 | ||
|  | 9df725165b | ||
|  | 682326c130 | ||
|  | 86575cb035 | ||
|  | eecc6188a7 | ||
|  | 3b4df4615a | ||
|  | edfda6ad5b | ||
|  | 3c7e8be2e7 | ||
|  | 416fcba846 | ||
|  | e196e229cd | ||
|  | da57572409 | ||
|  | ef172712da | ||
|  | 170c56bcb9 | ||
|  | f3ca9fa4c5 | ||
|  | 48facec524 | ||
|  | ee0c75a26d | ||
|  | e9c92f30ba | ||
|  | 0a074e52e0 | ||
|  | da3f4c30e2 | ||
|  | 2b08ca7c99 | ||
|  | c8e466a160 | ||
|  | a39685d98c | ||
|  | 90200dbe9c | ||
|  | 2304dac8e3 | ||
|  | 38b2919c0d | ||
|  | 207fd9fcb7 | ||
|  | fbcf58c48f | ||
|  | 8f4a579df9 | ||
|  | 600ca3bcf9 | ||
|  | a4d2f22fd2 | ||
|  | 00c8d7e6f5 | ||
|  | 0d89e967f2 | ||
|  | 447f8d0113 | ||
|  | 60802796cb | ||
|  | 5b42578cb1 | ||
|  | 25a0a5364a | ||
|  | 047cc218a6 | ||
|  | 39fc862676 | ||
|  | f47d926f29 | ||
|  | f4d0938e3d | ||
|  | f156da4ec2 | ||
|  | 0c1e5da9a8 | ||
|  | d6b317c552 | ||
|  | 01826c6876 | ||
|  | 0b62c9d2f6 | ||
|  | 72161a9b71 | ||
|  | df8f4e7251 | ||
|  | aa13ab37c4 | ||
|  | acda64a837 | ||
|  | 49a001a93a | ||
|  | 22a6ec7794 | ||
|  | 26c6e4997c | ||
|  | d7086fc4a3 | ||
|  | 92150e07d3 | ||
|  | ac3c857e1a | ||
|  | 48e313fb44 | ||
|  | 5390117275 | ||
|  | 0b3af2052f | ||
|  | bb19ba3eb6 | ||
|  | 879bf08d18 | ||
|  | b99421e7ee | ||
|  | 3b6d8fab47 | ||
|  | 53c0cdc0c1 | ||
|  | 58f877de1a | ||
|  | 95a7b33fb4 | ||
|  | 81dd5adccf | ||
|  | 94e86a0be1 | ||
|  | 5b2dbfe007 | ||
|  | 4451843a39 | ||
|  | 5e2c5fa97b | ||
|  | 018b206177 | ||
|  | 03d31b1890 | ||
|  | 265776566e | ||
|  | 6e77e32855 | ||
|  | 0b1c506626 | ||
|  | 719a653375 | ||
|  | 66520c77f8 | ||
|  | ab2d019349 | ||
|  | d0e0b291df | ||
|  | 200e9eca92 | ||
|  | 634f771547 | ||
|  | 2996f8919d | ||
|  | 1b68efe7c7 | ||
|  | a19a7b976c | ||
|  | 145b0c33fc | ||
|  | 8b1a39f2c1 | ||
|  | 6dbc051409 | ||
|  | c148a5bbfc | ||
|  | 90d9bd9723 | ||
|  | bc7e6ccf53 | ||
|  | 6cab002214 | ||
|  | 3762a69537 | ||
|  | 348f7b5dfc | ||
|  | 008a62e4e9 | ||
|  | a4c5fa57e0 | ||
|  | 9be6c41af7 | ||
|  | 5c311eefb1 | ||
|  | d0ceb74a2e | ||
|  | ea1fe6a538 | ||
|  | a93509c9b3 | ||
|  | 210e9e23af | ||
|  | c4513f0286 | ||
|  | 1114572b47 | ||
|  | b2588d1c4f | ||
|  | 69d3e0c4b6 | ||
|  | e2414d8fea | ||
|  | 24db0d1499 | ||
|  | 89f505bb13 | ||
|  | df5b1f3806 | ||
|  | 755deb3ffe | ||
|  | 59f8c9f38e | ||
|  | 69e9b5d55e | ||
|  | a2d8b0ffbe | ||
|  | 0bbf3a3d76 | ||
|  | 10de19d38b | ||
|  | 73aff806f3 | ||
|  | 963a223e7e | ||
|  | bbfc2f416e | ||
|  | e05d31eaaf | ||
|  | 431f006751 | ||
|  | ffc9d7b152 | ||
|  | 79604180db | ||
|  | 7d6e117f68 | ||
|  | b3cc2f990a | ||
|  | 8d953f0bcb | ||
|  | 5cac52720c | ||
|  | bca6119db8 | ||
|  | 568000805f | ||
|  | 3fb6307596 | ||
|  | 7aa0031dec | ||
|  | 2585f1b724 | ||
|  | 470e08f616 | ||
|  | f1e51f9708 | ||
|  | e0becc109d | ||
|  | 47e4dd40cd | ||
|  | c38faebc25 | ||
|  | b0b8e11c60 | ||
|  | 7e0fcb9e65 | ||
|  | 972235cf06 | ||
|  | b3c9a76619 | ||
|  | 5f84d6f8f8 | ||
|  | 1cdeb8130d | 
							
								
								
									
										17
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +1,17 @@ | |||||||
| *.pyc | .* | ||||||
| .*.swp | !.gitignore | ||||||
|  | *~ | ||||||
|  | *.py[co] | ||||||
|  | .*.sw[po] | ||||||
|  | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
|  | build/ | ||||||
|  | dist/ | ||||||
|  | mongoengine.egg-info/ | ||||||
|  | env/ | ||||||
|  | .settings | ||||||
|  | .project | ||||||
|  | .pydevproject | ||||||
|  | tests/test_bugfix.py | ||||||
|  | htmlcov/ | ||||||
							
								
								
									
										28
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | |||||||
|  | # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||||
|  | language: python | ||||||
|  | python: | ||||||
|  |     - 2.5 | ||||||
|  |     - 2.6 | ||||||
|  |     - 2.7 | ||||||
|  |     - 3.1 | ||||||
|  |     - 3.2 | ||||||
|  | env: | ||||||
|  |   - PYMONGO=dev | ||||||
|  |   - PYMONGO=2.3 | ||||||
|  |   - PYMONGO=2.2 | ||||||
|  | install: | ||||||
|  |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi | ||||||
|  |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi | ||||||
|  |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||||
|  |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||||
|  |     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||||
|  |     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||||
|  |     - python setup.py install | ||||||
|  | script: | ||||||
|  |     - python setup.py test | ||||||
|  | notifications: | ||||||
|  |   irc: "irc.freenode.org#mongoengine" | ||||||
|  | branches: | ||||||
|  |   only: | ||||||
|  |     - master | ||||||
|  |     - 0.7 | ||||||
							
								
								
									
										125
									
								
								AUTHORS
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										125
									
								
								AUTHORS
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,125 @@ | |||||||
|  | The PRIMARY AUTHORS are (and/or have been): | ||||||
|  |  | ||||||
|  | Ross Lawley <ross.lawley@gmail.com> | ||||||
|  | Harry Marr <harry@hmarr.com> | ||||||
|  | Matt Dennewitz <mattdennewitz@gmail.com> | ||||||
|  | Deepak Thukral <iapain@yahoo.com> | ||||||
|  | Florian Schlachter <flori@n-schlachter.de> | ||||||
|  | Steve Challis <steve@stevechallis.com> | ||||||
|  | Wilson Júnior <wilsonpjunior@gmail.com> | ||||||
|  | Dan Crosta https://github.com/dcrosta | ||||||
|  | Laine Herron https://github.com/LaineHerron | ||||||
|  |  | ||||||
|  | CONTRIBUTORS | ||||||
|  |  | ||||||
|  | Dervived from the git logs, inevitably incomplete but all of whom and others | ||||||
|  | have submitted patches, reported bugs and generally helped make MongoEngine | ||||||
|  | that much better: | ||||||
|  |  | ||||||
|  |  * Harry Marr | ||||||
|  |  * Ross Lawley | ||||||
|  |  * blackbrrr | ||||||
|  |  * Florian Schlachter | ||||||
|  |  * Vincent Driessen | ||||||
|  |  * Steve Challis | ||||||
|  |  * flosch | ||||||
|  |  * Deepak Thukral | ||||||
|  |  * Colin Howe | ||||||
|  |  * Wilson Júnior | ||||||
|  |  * Alistair Roche | ||||||
|  |  * Dan Crosta | ||||||
|  |  * Viktor Kerkez | ||||||
|  |  * Stephan Jaekel | ||||||
|  |  * Rached Ben Mustapha | ||||||
|  |  * Greg Turner | ||||||
|  |  * Daniel Hasselrot | ||||||
|  |  * Mircea Pasoi | ||||||
|  |  * Matt Chisholm | ||||||
|  |  * James Punteney | ||||||
|  |  * TimothéePeignier | ||||||
|  |  * Stuart Rackham | ||||||
|  |  * Serge Matveenko | ||||||
|  |  * Matt Dennewitz | ||||||
|  |  * Don Spaulding | ||||||
|  |  * Ales Zoulek | ||||||
|  |  * sshwsfc | ||||||
|  |  * sib | ||||||
|  |  * Samuel Clay | ||||||
|  |  * Nick Vlku | ||||||
|  |  * martin | ||||||
|  |  * Flavio Amieiro | ||||||
|  |  * Анхбаяр Лхагвадорж | ||||||
|  |  * Zak Johnson | ||||||
|  |  * Victor Farazdagi | ||||||
|  |  * vandersonmota | ||||||
|  |  * Theo Julienne | ||||||
|  |  * sp | ||||||
|  |  * Slavi Pantaleev | ||||||
|  |  * Richard Henry | ||||||
|  |  * Nicolas Perriault | ||||||
|  |  * Nick Vlku Jr | ||||||
|  |  * Michael Henson | ||||||
|  |  * Leo Honkanen | ||||||
|  |  * kuno | ||||||
|  |  * Josh Ourisman | ||||||
|  |  * Jaime | ||||||
|  |  * Igor Ivanov | ||||||
|  |  * Gregg Lind | ||||||
|  |  * Gareth Lloyd | ||||||
|  |  * Albert Choi | ||||||
|  |  * John Arnfield | ||||||
|  |  * grubberr | ||||||
|  |  * Paul Aliagas | ||||||
|  |  * Paul Cunnane | ||||||
|  |  * Julien Rebetez | ||||||
|  |  * Marc Tamlyn | ||||||
|  |  * Karim Allah | ||||||
|  |  * Adam Parrish | ||||||
|  |  * jpfarias | ||||||
|  |  * jonrscott | ||||||
|  |  * Alice Zoë Bevan-McGregor | ||||||
|  |  * Stephen Young | ||||||
|  |  * tkloc | ||||||
|  |  * aid | ||||||
|  |  * yamaneko1212 | ||||||
|  |  * dave mankoff | ||||||
|  |  * Alexander G. Morano | ||||||
|  |  * jwilder | ||||||
|  |  * Joe Shaw | ||||||
|  |  * Adam Flynn | ||||||
|  |  * Ankhbayar | ||||||
|  |  * Jan Schrewe | ||||||
|  |  * David Koblas | ||||||
|  |  * Crittercism | ||||||
|  |  * Alvin Liang | ||||||
|  |  * andrewmlevy | ||||||
|  |  * Chris Faulkner | ||||||
|  |  * Ashwin Purohit | ||||||
|  |  * Shalabh Aggarwal | ||||||
|  |  * Chris Williams | ||||||
|  |  * Robert Kajic | ||||||
|  |  * Jacob Peddicord | ||||||
|  |  * Nils Hasenbanck | ||||||
|  |  * mostlystatic | ||||||
|  |  * Greg Banks | ||||||
|  |  * swashbuckler | ||||||
|  |  * Adam Reeve | ||||||
|  |  * Anthony Nemitz | ||||||
|  |  * deignacio | ||||||
|  |  * shaunduncan | ||||||
|  |  * Meir Kriheli | ||||||
|  |  * Andrey Fedoseev | ||||||
|  |  * aparajita | ||||||
|  |  * Tristan Escalada | ||||||
|  |  * Alexander Koshelev | ||||||
|  |  * Jaime Irurzun | ||||||
|  |  * Alexandre González | ||||||
|  |  * Thomas Steinacher | ||||||
|  |  * Tommi Komulainen | ||||||
|  |  * Peter Landry | ||||||
|  |  * biszkoptwielki | ||||||
|  |  * Anton Kolechkin | ||||||
|  |  * Sergey Nikitin | ||||||
|  |  * psychogenic | ||||||
|  |  * Stefan Wójcik | ||||||
|  |  * dimonb | ||||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | |||||||
| Copyright (c) 2009-2010 Harry Marr | Copyright (c) 2009-2012 See AUTHORS | ||||||
|  |  | ||||||
| Permission is hereby granted, free of charge, to any person | Permission is hereby granted, free of charge, to any person | ||||||
| obtaining a copy of this software and associated documentation | obtaining a copy of this software and associated documentation | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
|  | include MANIFEST.in | ||||||
| include README.rst | include README.rst | ||||||
| include LICENSE | include LICENSE | ||||||
|  | include AUTHORS | ||||||
| recursive-include docs * | recursive-include docs * | ||||||
| prune docs/_build/* | prune docs/_build | ||||||
| recursive-include tests * |  | ||||||
| recursive-exclude * *.pyc *.swp |  | ||||||
|   | |||||||
							
								
								
									
										36
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								README.rst
									
									
									
									
									
								
							| @@ -2,26 +2,31 @@ | |||||||
| MongoEngine | MongoEngine | ||||||
| =========== | =========== | ||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
|  | :Repository: https://github.com/MongoEngine/mongoengine | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
|  | :Maintainer: Ross Lawley (http://github.com/rozza) | ||||||
|  |  | ||||||
|  | .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master | ||||||
|  |   :target: http://travis-ci.org/MongoEngine/mongoengine | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||||
| Documentation available at http://hmarr.com/mongoengine/ - there is currently  | Documentation available at http://mongoengine-odm.rtfd.org - there is currently | ||||||
| a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide  | a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | ||||||
| <http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference | <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference | ||||||
| <http://hmarr.com/mongoengine/apireference.html>`_. | <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| you can use ``easy_install mongoengine``. Otherwise, you can download the | you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||||
| source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python | source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | ||||||
| setup.py install``. | setup.py install``. | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| - pymongo 1.1+ | - pymongo 2.1.1+ | ||||||
| - sphinx (optional - for documentation generation) | - sphinx (optional - for documentation generation) | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| @@ -58,11 +63,6 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
|     ...         print 'Link:', post.url |     ...         print 'Link:', post.url | ||||||
|     ...     print |     ...     print | ||||||
|     ... |     ... | ||||||
|     === Using MongoEngine === |  | ||||||
|     See the tutorial |  | ||||||
|  |  | ||||||
|     === MongoEngine Docs === |  | ||||||
|     Link: hmarr.com/mongoengine |  | ||||||
|  |  | ||||||
|     >>> len(BlogPost.objects) |     >>> len(BlogPost.objects) | ||||||
|     2 |     2 | ||||||
| @@ -80,10 +80,18 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
| Tests | Tests | ||||||
| ===== | ===== | ||||||
| To run the test suite, ensure you are running a local instance of MongoDB on | To run the test suite, ensure you are running a local instance of MongoDB on | ||||||
| the standard port, and run ``python setup.py test``. | the standard port, and run: ``python setup.py test``. | ||||||
|  |  | ||||||
|  | Community | ||||||
|  | ========= | ||||||
|  | - `MongoEngine Users mailing list | ||||||
|  |   <http://groups.google.com/group/mongoengine-users>`_ | ||||||
|  | - `MongoEngine Developers mailing list | ||||||
|  |   <http://groups.google.com/group/mongoengine-dev>`_ | ||||||
|  | - `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_ | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to | The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | ||||||
| contribute to the project, fork it on GitHub and send a pull request, all | contribute to the project, fork it on GitHub and send a pull request, all | ||||||
| contributions and suggestions are welcome! | contributions and suggestions are welcome! | ||||||
|   | |||||||
							
								
								
									
										199
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,199 @@ | |||||||
|  | #!/usr/bin/env python | ||||||
|  |  | ||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def cprofile_main(): | ||||||
|  |     from pymongo import Connection | ||||||
|  |     connection = Connection() | ||||||
|  |     connection.drop_database('timeit_test') | ||||||
|  |     connection.disconnect() | ||||||
|  |  | ||||||
|  |     from mongoengine import Document, DictField, connect | ||||||
|  |     connect("timeit_test") | ||||||
|  |  | ||||||
|  |     class Noddy(Document): | ||||||
|  |         fields = DictField() | ||||||
|  |  | ||||||
|  |     for i in xrange(1): | ||||||
|  |         noddy = Noddy() | ||||||
|  |         for j in range(20): | ||||||
|  |             noddy.fields["key" + str(j)] = "value " + str(j) | ||||||
|  |         noddy.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     """ | ||||||
|  |     0.4 Performance Figures ... | ||||||
|  |  | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     3.86744189262 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     6.23374891281 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     5.33027005196 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     pass - No Cascade | ||||||
|  |  | ||||||
|  |     0.5.X | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     3.89597702026 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     21.7735359669 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     19.8670389652 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     pass - No Cascade | ||||||
|  |  | ||||||
|  |     0.6.X | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     3.81559205055 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     10.0446798801 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     9.51354718208 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     9.02567505836 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|  |     8.44933390617 | ||||||
|  |  | ||||||
|  |     0.7.X | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     3.78801012039 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     9.73050498962 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     8.33456707001 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     8.37778115273 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|  |     8.36906409264 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  |  | ||||||
|  | db = connection.timeit_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in xrange(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - Pymongo""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | connection.disconnect() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("timeit_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(safe=False, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(safe=False, validate=False, cascade=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, safe=False, validate=False, cascade=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, force=True""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -6,6 +6,7 @@ Connecting | |||||||
| ========== | ========== | ||||||
|  |  | ||||||
| .. autofunction:: mongoengine.connect | .. autofunction:: mongoengine.connect | ||||||
|  | .. autofunction:: mongoengine.register_connection | ||||||
|  |  | ||||||
| Documents | Documents | ||||||
| ========= | ========= | ||||||
| @@ -21,6 +22,18 @@ Documents | |||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DynamicDocument | ||||||
|  |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|  |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|  |   :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ValidationError | ||||||
|  |   :members: | ||||||
|  |  | ||||||
| Querying | Querying | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
| @@ -34,20 +47,28 @@ Querying | |||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.StringField | .. autoclass:: mongoengine.BinaryField | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.IntField |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.FloatField |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.BooleanField | .. autoclass:: mongoengine.BooleanField | ||||||
|  | .. autoclass:: mongoengine.ComplexDateTimeField | ||||||
| .. autoclass:: mongoengine.DateTimeField | .. autoclass:: mongoengine.DateTimeField | ||||||
|  | .. autoclass:: mongoengine.DecimalField | ||||||
|  | .. autoclass:: mongoengine.DictField | ||||||
|  | .. autoclass:: mongoengine.DynamicField | ||||||
|  | .. autoclass:: mongoengine.EmailField | ||||||
| .. autoclass:: mongoengine.EmbeddedDocumentField | .. autoclass:: mongoengine.EmbeddedDocumentField | ||||||
|  | .. autoclass:: mongoengine.FileField | ||||||
|  | .. autoclass:: mongoengine.FloatField | ||||||
|  | .. autoclass:: mongoengine.GenericEmbeddedDocumentField | ||||||
|  | .. autoclass:: mongoengine.GenericReferenceField | ||||||
|  | .. autoclass:: mongoengine.GeoPointField | ||||||
|  | .. autoclass:: mongoengine.ImageField | ||||||
|  | .. autoclass:: mongoengine.IntField | ||||||
| .. autoclass:: mongoengine.ListField | .. autoclass:: mongoengine.ListField | ||||||
|  | .. autoclass:: mongoengine.MapField | ||||||
| .. autoclass:: mongoengine.ObjectIdField | .. autoclass:: mongoengine.ObjectIdField | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.ReferenceField | .. autoclass:: mongoengine.ReferenceField | ||||||
|  | .. autoclass:: mongoengine.SequenceField | ||||||
|  | .. autoclass:: mongoengine.SortedListField | ||||||
|  | .. autoclass:: mongoengine.StringField | ||||||
|  | .. autoclass:: mongoengine.URLField | ||||||
|  | .. autoclass:: mongoengine.UUIDField | ||||||
|   | |||||||
| @@ -2,6 +2,373 @@ | |||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | Changes in 0.7.2 | ||||||
|  | ================ | ||||||
|  | - Update index spec generation so its not destructive (MongoEngine/mongoengine#113) | ||||||
|  |  | ||||||
|  | Changes in 0.7.1 | ||||||
|  | ================= | ||||||
|  | - Fixed index spec inheritance (MongoEngine/mongoengine#111) | ||||||
|  |  | ||||||
|  | Changes in 0.7.0 | ||||||
|  | ================= | ||||||
|  | - Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) | ||||||
|  | - Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) | ||||||
|  | - Fixed Q object merge edge case (MongoEngine/mongoengine#109) | ||||||
|  | - Fixed reloading on sharded documents (hmarr/mongoengine#569) | ||||||
|  | - Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62) | ||||||
|  | - Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92) | ||||||
|  | - Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88) | ||||||
|  | - Updated ReferenceField's to optionally store ObjectId strings | ||||||
|  |   this will become the default in 0.8 (MongoEngine/mongoengine#89) | ||||||
|  | - Added FutureWarning - save will default to `cascade=False` in 0.8 | ||||||
|  | - Added example of indexing embedded document fields (MongoEngine/mongoengine#75) | ||||||
|  | - Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80) | ||||||
|  | - Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) | ||||||
|  | - Embedded Documents no longer handle meta definitions | ||||||
|  | - Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) | ||||||
|  | - Improved queryset filtering (hmarr/mongoengine#554) | ||||||
|  | - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) | ||||||
|  | - Fixed abstract classes and shard keys (MongoEngine/mongoengine#64) | ||||||
|  | - Fixed Python 2.5 support | ||||||
|  | - Added Python 3 support (thanks to Laine Heron) | ||||||
|  |  | ||||||
|  | Changes in 0.6.20 | ||||||
|  | ================= | ||||||
|  | - Added support for distinct and db_alias (MongoEngine/mongoengine#59) | ||||||
|  | - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) | ||||||
|  | - Fixed BinaryField lookup re (MongoEngine/mongoengine#48) | ||||||
|  |  | ||||||
|  | Changes in 0.6.19 | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | - Added Binary support to UUID (MongoEngine/mongoengine#47) | ||||||
|  | - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) | ||||||
|  | - Fixed BinaryField python value issue (MongoEngine/mongoengine#48) | ||||||
|  | - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) | ||||||
|  | - Fixed queryset manager issue (MongoEngine/mongoengine#52) | ||||||
|  | - Fixed FileField comparision (hmarr/mongoengine#547) | ||||||
|  |  | ||||||
|  | Changes in 0.6.18 | ||||||
|  | ================= | ||||||
|  | - Fixed recursion loading bug in _get_changed_fields | ||||||
|  |  | ||||||
|  | Changes in 0.6.17 | ||||||
|  | ================= | ||||||
|  | - Fixed issue with custom queryset manager expecting explict variable names | ||||||
|  |  | ||||||
|  | Changes in 0.6.16 | ||||||
|  | ================= | ||||||
|  | - Fixed issue where db_alias wasn't inherited | ||||||
|  |  | ||||||
|  | Changes in 0.6.15 | ||||||
|  | ================= | ||||||
|  | - Updated validation error messages | ||||||
|  | - Added support for null / zero / false values in item_frequencies | ||||||
|  | - Fixed cascade save edge case | ||||||
|  | - Fixed geo index creation through reference fields | ||||||
|  | - Added support for args / kwargs when using @queryset_manager | ||||||
|  | - Deref list custom id fix | ||||||
|  |  | ||||||
|  | Changes in 0.6.14 | ||||||
|  | ================= | ||||||
|  | - Fixed error dict with nested validation | ||||||
|  | - Fixed Int/Float fields and not equals None | ||||||
|  | - Exclude tests from installation | ||||||
|  | - Allow tuples for index meta | ||||||
|  | - Fixed use of str in instance checks | ||||||
|  | - Fixed unicode support in transform update | ||||||
|  | - Added support for add_to_set and each | ||||||
|  |  | ||||||
|  | Changes in 0.6.13 | ||||||
|  | ================= | ||||||
|  | - Fixed EmbeddedDocument db_field validation issue | ||||||
|  | - Fixed StringField unicode issue | ||||||
|  | - Fixes __repr__ modifying the cursor | ||||||
|  |  | ||||||
|  | Changes in 0.6.12 | ||||||
|  | ================= | ||||||
|  | - Fixes scalar lookups for primary_key | ||||||
|  | - Fixes error with _delta handling DBRefs | ||||||
|  |  | ||||||
|  | Changes in 0.6.11 | ||||||
|  | ================== | ||||||
|  | - Fixed inconsistency handling None values field attrs | ||||||
|  | - Fixed map_field embedded db_field issue | ||||||
|  | - Fixed .save() _delta issue with DbRefs | ||||||
|  | - Fixed Django TestCase | ||||||
|  | - Added cmp to Embedded Document | ||||||
|  | - Added PULL reverse_delete_rule | ||||||
|  | - Fixed CASCADE delete bug | ||||||
|  | - Fixed db_field data load error | ||||||
|  | - Fixed recursive save with FileField | ||||||
|  |  | ||||||
|  | Changes in 0.6.10 | ||||||
|  | ================= | ||||||
|  | - Fixed basedict / baselist to return super(..) | ||||||
|  | - Promoted BaseDynamicField to DynamicField | ||||||
|  |  | ||||||
|  | Changes in 0.6.9 | ||||||
|  | ================ | ||||||
|  | - Fixed sparse indexes on inherited docs | ||||||
|  | - Removed FileField auto deletion, needs more work maybe 0.7 | ||||||
|  |  | ||||||
|  | Changes in 0.6.8 | ||||||
|  | ================ | ||||||
|  | - Fixed FileField losing reference when no default set | ||||||
|  | - Removed possible race condition from FileField (grid_file) | ||||||
|  | - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | ||||||
|  | - Added support for pull operations on nested EmbeddedDocuments | ||||||
|  | - Added support for choices with GenericReferenceFields | ||||||
|  | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
|  | - Fixed Django 1.4 sessions first save data loss | ||||||
|  | - FileField now automatically delete files on .delete() | ||||||
|  | - Fix for GenericReference to_mongo method | ||||||
|  | - Fixed connection regression | ||||||
|  | - Updated Django User document, now allows inheritance | ||||||
|  |  | ||||||
|  | Changes in 0.6.7 | ||||||
|  | ================ | ||||||
|  | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
|  | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
|  | - Cleaned up the Validation Error - docs and code | ||||||
|  | - Added meta `auto_create_index` so you can disable index creation | ||||||
|  | - Added write concern options to inserts | ||||||
|  | - Fixed typo in meta for index options | ||||||
|  | - Bug fix Read preference now passed correctly | ||||||
|  | - Added support for File like objects for GridFS | ||||||
|  | - Fix for #473 - Dereferencing abstracts | ||||||
|  |  | ||||||
|  | Changes in 0.6.6 | ||||||
|  | ================ | ||||||
|  | - Django 1.4 fixed (finally) | ||||||
|  | - Added tests for Django | ||||||
|  |  | ||||||
|  | Changes in 0.6.5 | ||||||
|  | ================ | ||||||
|  | - More Django updates | ||||||
|  |  | ||||||
|  | Changes in 0.6.4 | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | - Refactored connection / fixed replicasetconnection | ||||||
|  | - Bug fix for unknown connection alias error message | ||||||
|  | - Sessions support Django 1.3 and Django 1.4 | ||||||
|  | - Minor fix for ReferenceField | ||||||
|  |  | ||||||
|  | Changes in 0.6.3 | ||||||
|  | ================ | ||||||
|  | - Updated sessions for Django 1.4 | ||||||
|  | - Bug fix for updates where listfields contain embedded documents | ||||||
|  | - Bug fix for collection naming and mixins | ||||||
|  |  | ||||||
|  | Changes in 0.6.2 | ||||||
|  | ================ | ||||||
|  | - Updated documentation for ReplicaSet connections | ||||||
|  | - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems. | ||||||
|  |  | ||||||
|  | Changes in 0.6.1 | ||||||
|  | ================ | ||||||
|  | - Fix for replicaSet connections | ||||||
|  |  | ||||||
|  | Changes in 0.6 | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||||
|  | - Added support for covered indexes when inheritance is off | ||||||
|  | - No longer always upsert on save for items with a '_id' | ||||||
|  | - Error raised if update doesn't have an operation | ||||||
|  | - DeReferencing is now thread safe | ||||||
|  | - Errors raised if trying to perform a join in a query | ||||||
|  | - Updates can now take __raw__ queries | ||||||
|  | - Added custom 2D index declarations | ||||||
|  | - Added replicaSet connection support | ||||||
|  | - Updated deprecated imports from pymongo (safe for pymongo 2.2) | ||||||
|  | - Added uri support for connections | ||||||
|  | - Added scalar for efficiently returning partial data values (aliased to values_list) | ||||||
|  | - Fixed limit skip bug | ||||||
|  | - Improved Inheritance / Mixin | ||||||
|  | - Added sharding support | ||||||
|  | - Added pymongo 2.1 support | ||||||
|  | - Fixed Abstract documents can now declare indexes | ||||||
|  | - Added db_alias support to individual documents | ||||||
|  | - Fixed GridFS documents can now be pickled | ||||||
|  | - Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field | ||||||
|  | - Added InvalidQueryError when calling with_id with a filter | ||||||
|  | - Added support for DBRefs in distinct() | ||||||
|  | - Fixed issue saving False booleans | ||||||
|  | - Fixed issue with dynamic documents deltas | ||||||
|  | - Added Reverse Delete Rule support to ListFields - MapFields aren't supported | ||||||
|  | - Added customisable cascade kwarg options | ||||||
|  | - Fixed Handle None values for non-required fields | ||||||
|  | - Removed Document._get_subclasses() - no longer required | ||||||
|  | - Fixed bug requiring subclasses when not actually needed | ||||||
|  | - Fixed deletion of dynamic data | ||||||
|  | - Added support for the $elementMatch operator | ||||||
|  | - Added reverse option to SortedListFields | ||||||
|  | - Fixed dereferencing - multi directional list dereferencing | ||||||
|  | - Fixed issue creating indexes with recursive embedded documents | ||||||
|  | - Fixed recursive lookup in _unique_with_indexes | ||||||
|  | - Fixed passing ComplexField defaults to constructor for ReferenceFields | ||||||
|  | - Fixed validation of DictField Int keys | ||||||
|  | - Added optional cascade saving | ||||||
|  | - Fixed dereferencing - max_depth now taken into account | ||||||
|  | - Fixed document mutation saving issue | ||||||
|  | - Fixed positional operator when replacing embedded documents | ||||||
|  | - Added Non-Django Style choices back (you can have either) | ||||||
|  | - Fixed __repr__ of a sliced queryset | ||||||
|  | - Added recursive validation error of documents / complex fields | ||||||
|  | - Fixed breaking during queryset iteration | ||||||
|  | - Added pre and post bulk-insert signals | ||||||
|  | - Added ImageField - requires PIL | ||||||
|  | - Fixed Reference Fields can be None in get_or_create / queries | ||||||
|  | - Fixed accessing pk on an embedded document | ||||||
|  | - Fixed calling a queryset after drop_collection now recreates the collection | ||||||
|  | - Add field name to validation exception messages | ||||||
|  | - Added UUID field | ||||||
|  | - Improved efficiency of .get() | ||||||
|  | - Updated ComplexFields so if required they won't accept empty lists / dicts | ||||||
|  | - Added spec file for rpm-based distributions | ||||||
|  | - Fixed ListField so it doesnt accept strings | ||||||
|  | - Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas | ||||||
|  |  | ||||||
|  | Changes in v0.5.2 | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | - A Robust Circular reference bugfix | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Changes in v0.5.1 | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | - Fixed simple circular reference bug | ||||||
|  |  | ||||||
|  | Changes in v0.5 | ||||||
|  | =============== | ||||||
|  |  | ||||||
|  | - Added InvalidDocumentError - so Document core methods can't be overwritten | ||||||
|  | - Added GenericEmbeddedDocument - so you can embed any type of embeddable document | ||||||
|  | - Added within_polygon support - for those with mongodb 1.9 | ||||||
|  | - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments | ||||||
|  | - Added where() - filter to allowing users to specify query expressions as Javascript | ||||||
|  | - Added SequenceField - for creating sequential counters | ||||||
|  | - Added update() convenience method to a document | ||||||
|  | - Added cascading saves - so changes to Referenced documents are saved on .save() | ||||||
|  | - Added select_related() support | ||||||
|  | - Added support for the positional operator | ||||||
|  | - Updated geo index checking to be recursive and check in embedded documents | ||||||
|  | - Updated default collection naming convention | ||||||
|  | - Added Document Mixin support | ||||||
|  | - Fixed queryet __repr__ mid iteration | ||||||
|  | - Added hint() support, so cantell Mongo the proper index to use for the query | ||||||
|  | - Fixed issue with inconsitent setting of _cls breaking inherited referencing | ||||||
|  | - Added help_text and verbose_name to fields to help with some form libs | ||||||
|  | - Updated item_frequencies to handle embedded document lookups | ||||||
|  | - Added delta tracking now only sets / unsets explicitly changed fields | ||||||
|  | - Fixed saving so sets updated values rather than overwrites | ||||||
|  | - Added ComplexDateTimeField - Handles datetimes correctly with microseconds | ||||||
|  | - Added ComplexBaseField - for improved flexibility and performance | ||||||
|  | - Added get_FIELD_display() method for easy choice field displaying | ||||||
|  | - Added queryset.slave_okay(enabled) method | ||||||
|  | - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable | ||||||
|  | - Added insert method for bulk inserts | ||||||
|  | - Added blinker signal support | ||||||
|  | - Added query_counter context manager for tests | ||||||
|  | - Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments) | ||||||
|  | - Added inline_map_reduce option to map_reduce | ||||||
|  | - Updated connection exception so it provides more info on the cause. | ||||||
|  | - Added searching multiple levels deep in ``DictField`` | ||||||
|  | - Added ``DictField`` entries containing strings to use matching operators | ||||||
|  | - Added ``MapField``, similar to ``DictField`` | ||||||
|  | - Added Abstract Base Classes | ||||||
|  | - Added Custom Objects Managers | ||||||
|  | - Added sliced subfields updating | ||||||
|  | - Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry | ||||||
|  | - Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create`` | ||||||
|  | - Added slicing / subarray fetching controls | ||||||
|  | - Fixed various unique index and other index issues | ||||||
|  | - Fixed threaded connection issues | ||||||
|  | - Added spherical geospatial query operators | ||||||
|  | - Updated queryset to handle latest version of pymongo | ||||||
|  |   map_reduce now requires an output. | ||||||
|  | - Added ``Document`` __hash__, __ne__ for pickling | ||||||
|  | - Added ``FileField`` optional size arg for read method | ||||||
|  | - Fixed ``FileField`` seek and tell methods for reading files | ||||||
|  | - Added ``QuerySet.clone`` to support copying querysets | ||||||
|  | - Fixed item_frequencies when using name thats the same as a native js function | ||||||
|  | - Added reverse delete rules | ||||||
|  | - Fixed issue with unset operation | ||||||
|  | - Fixed Q-object bug | ||||||
|  | - Added ``QuerySet.all_fields`` resets previous .only() and .exclude() | ||||||
|  | - Added ``QuerySet.exclude`` | ||||||
|  | - Added django style choices | ||||||
|  | - Fixed order and filter issue | ||||||
|  | - Added ``QuerySet.only`` subfield support | ||||||
|  | - Added creation_counter to ``BaseField`` allowing fields to be sorted in the | ||||||
|  |   way the user has specified them | ||||||
|  | - Fixed various errors | ||||||
|  | - Added many tests | ||||||
|  |  | ||||||
|  | Changes in v0.4 | ||||||
|  | =============== | ||||||
|  | - Added ``GridFSStorage`` Django storage backend | ||||||
|  | - Added ``FileField`` for GridFS support | ||||||
|  | - New Q-object implementation, which is no longer based on Javascript | ||||||
|  | - Added ``SortedListField`` | ||||||
|  | - Added ``EmailField`` | ||||||
|  | - Added ``GeoPointField`` | ||||||
|  | - Added ``exact`` and ``iexact`` match operators to ``QuerySet`` | ||||||
|  | - Added ``get_document_or_404`` and ``get_list_or_404`` Django shortcuts | ||||||
|  | - Added new query operators for Geo queries | ||||||
|  | - Added ``not`` query operator | ||||||
|  | - Added new update operators: ``pop`` and ``add_to_set`` | ||||||
|  | - Added ``__raw__`` query parameter | ||||||
|  | - Added support for custom querysets | ||||||
|  | - Fixed document inheritance primary key issue | ||||||
|  | - Added support for querying by array element position | ||||||
|  | - Base class can now be defined for ``DictField`` | ||||||
|  | - Fixed MRO error that occured on document inheritance | ||||||
|  | - Added ``QuerySet.distinct``, ``QuerySet.create``, ``QuerySet.snapshot``, | ||||||
|  |   ``QuerySet.timeout`` and ``QuerySet.all`` | ||||||
|  | - Subsequent calls to ``connect()`` now work | ||||||
|  | - Introduced ``min_length`` for ``StringField`` | ||||||
|  | - Fixed multi-process connection issue | ||||||
|  | - Other minor fixes | ||||||
|  |  | ||||||
|  | Changes in v0.3 | ||||||
|  | =============== | ||||||
|  | - Added MapReduce support | ||||||
|  | - Added ``contains``, ``startswith`` and ``endswith`` query operators (and | ||||||
|  |   case-insensitive versions that are prefixed with 'i') | ||||||
|  | - Deprecated fields' ``name`` parameter, replaced with ``db_field`` | ||||||
|  | - Added ``QuerySet.only`` for only retrieving specific fields | ||||||
|  | - Added ``QuerySet.in_bulk()`` for bulk querying using ids | ||||||
|  | - ``QuerySet``\ s now have a ``rewind()`` method, which is called automatically | ||||||
|  |   when the iterator is exhausted, allowing ``QuerySet``\ s to be reused | ||||||
|  | - Added ``DictField`` | ||||||
|  | - Added ``URLField`` | ||||||
|  | - Added ``DecimalField`` | ||||||
|  | - Added ``BinaryField`` | ||||||
|  | - Added ``GenericReferenceField`` | ||||||
|  | - Added ``get()`` and ``get_or_create()`` methods to ``QuerySet`` | ||||||
|  | - ``ReferenceField``\ s may now reference the document they are defined on | ||||||
|  |   (recursive references) and documents that have not yet been defined | ||||||
|  | - ``Document`` objects may now be compared for equality (equal if _ids are | ||||||
|  |   equal and documents are of same type) | ||||||
|  | - ``QuerySet`` update methods now have an ``upsert`` parameter | ||||||
|  | - Added field name substitution for Javascript code (allows the user to use the | ||||||
|  |   Python names for fields in JS, which are later substituted for the real field | ||||||
|  |   names) | ||||||
|  | - ``Q`` objects now support regex querying | ||||||
|  | - Fixed bug where referenced documents within lists weren't properly | ||||||
|  |   dereferenced | ||||||
|  | - ``ReferenceField``\ s may now be queried using their _id | ||||||
|  | - Fixed bug where ``EmbeddedDocuments`` couldn't be non-polymorphic | ||||||
|  | - ``queryset_manager`` functions now accept two arguments -- the document class | ||||||
|  |   as the first and the queryset as the second | ||||||
|  | - Fixed bug where ``QuerySet.exec_js`` ignored ``Q`` objects | ||||||
|  | - Other minor fixes | ||||||
|  |  | ||||||
| Changes in v0.2.2 | Changes in v0.2.2 | ||||||
| ================= | ================= | ||||||
| - Fixed bug that prevented indexes from being used on ``ListField``\ s | - Fixed bug that prevented indexes from being used on ``ListField``\ s | ||||||
|   | |||||||
| @@ -22,7 +22,7 @@ sys.path.append(os.path.abspath('..')) | |||||||
|  |  | ||||||
| # Add any Sphinx extension module names here, as strings. They can be extensions | # Add any Sphinx extension module names here, as strings. They can be extensions | ||||||
| # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | ||||||
| extensions = ['sphinx.ext.autodoc'] | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] | ||||||
|  |  | ||||||
| # Add any paths that contain templates here, relative to this directory. | # Add any paths that contain templates here, relative to this directory. | ||||||
| templates_path = ['_templates'] | templates_path = ['_templates'] | ||||||
| @@ -38,7 +38,7 @@ master_doc = 'index' | |||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = u'MongoEngine' | ||||||
| copyright = u'2009, Harry Marr' | copyright = u'2009-2012, MongoEngine Authors' | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| @@ -121,7 +121,7 @@ html_theme_path = ['_themes'] | |||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| html_static_path = ['_static'] | #html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
|   | |||||||
| @@ -2,6 +2,8 @@ | |||||||
| Using MongoEngine with Django | Using MongoEngine with Django | ||||||
| ============================= | ============================= | ||||||
|  |  | ||||||
|  | .. note :: Updated to support Django 1.4 | ||||||
|  |  | ||||||
| Connecting | Connecting | ||||||
| ========== | ========== | ||||||
| In your **settings.py** file, ignore the standard database settings (unless you | In your **settings.py** file, ignore the standard database settings (unless you | ||||||
| @@ -44,3 +46,45 @@ into you settings module:: | |||||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' |     SESSION_ENGINE = 'mongoengine.django.sessions' | ||||||
|  |  | ||||||
| .. versionadded:: 0.2.1 | .. versionadded:: 0.2.1 | ||||||
|  |  | ||||||
|  | Storage | ||||||
|  | ======= | ||||||
|  | With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, | ||||||
|  | it is useful to have a Django file storage backend that wraps this. The new | ||||||
|  | storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. | ||||||
|  | Using it is very similar to using the default FileSystemStorage.:: | ||||||
|  |  | ||||||
|  |     from mongoengine.django.storage import GridFSStorage | ||||||
|  |     fs = GridFSStorage() | ||||||
|  |  | ||||||
|  |     filename = fs.save('hello.txt', 'Hello, World!') | ||||||
|  |  | ||||||
|  | All of the `Django Storage API methods | ||||||
|  | <http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been | ||||||
|  | implemented except :func:`path`. If the filename provided already exists, an | ||||||
|  | underscore and a number (before # the file extension, if one exists) will be | ||||||
|  | appended to the filename until the generated filename doesn't exist. The | ||||||
|  | :func:`save` method will return the new filename.:: | ||||||
|  |  | ||||||
|  |     >>> fs.exists('hello.txt') | ||||||
|  |     True | ||||||
|  |     >>> fs.open('hello.txt').read() | ||||||
|  |     'Hello, World!' | ||||||
|  |     >>> fs.size('hello.txt') | ||||||
|  |     13 | ||||||
|  |     >>> fs.url('hello.txt') | ||||||
|  |     'http://your_media_url/hello.txt' | ||||||
|  |     >>> fs.open('hello.txt').name | ||||||
|  |     'hello.txt' | ||||||
|  |     >>> fs.listdir() | ||||||
|  |     ([], [u'hello.txt']) | ||||||
|  |  | ||||||
|  | All files will be saved and retrieved in GridFS via the :class::`FileDocument` | ||||||
|  | document, allowing easy access to the files without the GridFSStorage | ||||||
|  | backend.:: | ||||||
|  |  | ||||||
|  |     >>> from mongoengine.django.storage import FileDocument | ||||||
|  |     >>> FileDocument.objects() | ||||||
|  |     [<FileDocument: FileDocument object>] | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ | |||||||
| ===================== | ===================== | ||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | To connect to a running instance of :program:`mongod`, use the | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | :func:`~mongoengine.connect` function. The first argument is the name of the | ||||||
| database to connect to. If the database does not exist, it will be created. If | database to connect to. If the database does not exist, it will be created. If | ||||||
| @@ -18,3 +19,47 @@ provide :attr:`host` and :attr:`port` arguments to | |||||||
| :func:`~mongoengine.connect`:: | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
|  | Uri style connections are also supported as long as you include the database | ||||||
|  | name - just supply the uri as the :attr:`host` to | ||||||
|  | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|  |     connect('project1', host='mongodb://localhost/database_name') | ||||||
|  |  | ||||||
|  | ReplicaSets | ||||||
|  | =========== | ||||||
|  |  | ||||||
|  | MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` | ||||||
|  | to use them please use a URI style connection and provide the `replicaSet` name in the | ||||||
|  | connection kwargs. | ||||||
|  |  | ||||||
|  | Multiple Databases | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | Multiple database support was added in MongoEngine 0.6. To use multiple | ||||||
|  | databases you can use :func:`~mongoengine.connect` and provide an `alias` name | ||||||
|  | for the connection - if no `alias` is provided then "default" is used. | ||||||
|  |  | ||||||
|  | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
|  | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
|  | Individual documents can also support multiple databases by providing a | ||||||
|  | `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects | ||||||
|  | to point across databases and collections.  Below is an example schema, using | ||||||
|  | 3 different databases to store data:: | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "user-db"} | ||||||
|  |  | ||||||
|  |         class Book(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "book-db"} | ||||||
|  |  | ||||||
|  |         class AuthorBooks(Document): | ||||||
|  |             author = ReferenceField(User) | ||||||
|  |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "users-books-db"} | ||||||
|   | |||||||
| @@ -22,7 +22,35 @@ objects** as class attributes to the document class:: | |||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.now) |         date_modified = DateTimeField(default=datetime.datetime.now) | ||||||
|  |  | ||||||
|  | Dynamic document schemas | ||||||
|  | ======================== | ||||||
|  | One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | ||||||
|  | should be planned and organised (after all explicit is better than implicit!) | ||||||
|  | there are scenarios where having dynamic / expando style documents is desirable. | ||||||
|  |  | ||||||
|  | :class:`~mongoengine.DynamicDocument` documents work in the same way as | ||||||
|  | :class:`~mongoengine.Document` but any data / attributes set to them will also | ||||||
|  | be saved :: | ||||||
|  |  | ||||||
|  |     from mongoengine import * | ||||||
|  |  | ||||||
|  |     class Page(DynamicDocument): | ||||||
|  |         title = StringField(max_length=200, required=True) | ||||||
|  |  | ||||||
|  |     # Create a new page and add tags | ||||||
|  |     >>> page = Page(title='Using MongoEngine') | ||||||
|  |     >>> page.tags = ['mongodb', 'mongoengine'] | ||||||
|  |     >>> page.save() | ||||||
|  |  | ||||||
|  |     >>> Page.objects(tags='mongoengine').count() | ||||||
|  |     >>> 1 | ||||||
|  |  | ||||||
|  | ..note:: | ||||||
|  |  | ||||||
|  |    There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|  |  | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
| @@ -34,17 +62,114 @@ not provided. Default values may optionally be a callable, which will be called | |||||||
| to retrieve the value (such as in the above example). The field types available | to retrieve the value (such as in the above example). The field types available | ||||||
| are as follows: | are as follows: | ||||||
|  |  | ||||||
| * :class:`~mongoengine.StringField` | * :class:`~mongoengine.BinaryField` | ||||||
| * :class:`~mongoengine.IntField` | * :class:`~mongoengine.BooleanField` | ||||||
| * :class:`~mongoengine.FloatField` | * :class:`~mongoengine.ComplexDateTimeField` | ||||||
| * :class:`~mongoengine.DateTimeField` | * :class:`~mongoengine.DateTimeField` | ||||||
| * :class:`~mongoengine.ListField` | * :class:`~mongoengine.DecimalField` | ||||||
| * :class:`~mongoengine.ObjectIdField` | * :class:`~mongoengine.DictField` | ||||||
|  | * :class:`~mongoengine.DynamicField` | ||||||
|  | * :class:`~mongoengine.EmailField` | ||||||
| * :class:`~mongoengine.EmbeddedDocumentField` | * :class:`~mongoengine.EmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.FileField` | ||||||
|  | * :class:`~mongoengine.FloatField` | ||||||
|  | * :class:`~mongoengine.GenericEmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.GenericReferenceField` | ||||||
|  | * :class:`~mongoengine.GeoPointField` | ||||||
|  | * :class:`~mongoengine.ImageField` | ||||||
|  | * :class:`~mongoengine.IntField` | ||||||
|  | * :class:`~mongoengine.ListField` | ||||||
|  | * :class:`~mongoengine.MapField` | ||||||
|  | * :class:`~mongoengine.ObjectIdField` | ||||||
| * :class:`~mongoengine.ReferenceField` | * :class:`~mongoengine.ReferenceField` | ||||||
|  | * :class:`~mongoengine.SequenceField` | ||||||
|  | * :class:`~mongoengine.SortedListField` | ||||||
|  | * :class:`~mongoengine.StringField` | ||||||
|  | * :class:`~mongoengine.URLField` | ||||||
|  | * :class:`~mongoengine.UUIDField` | ||||||
|  |  | ||||||
|  | Field arguments | ||||||
|  | --------------- | ||||||
|  | Each field type can be customized by keyword arguments.  The following keyword | ||||||
|  | arguments can be set on all fields: | ||||||
|  |  | ||||||
|  | :attr:`db_field` (Default: None) | ||||||
|  |     The MongoDB field name. | ||||||
|  |  | ||||||
|  | :attr:`name` (Default: None) | ||||||
|  |     The mongoengine field name. | ||||||
|  |  | ||||||
|  | :attr:`required` (Default: False) | ||||||
|  |     If set to True and the field is not set on the document instance, a | ||||||
|  |     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||||
|  |     validated. | ||||||
|  |  | ||||||
|  | :attr:`default` (Default: None) | ||||||
|  |     A value to use when no value is set for this field. | ||||||
|  |  | ||||||
|  |     The definion of default parameters follow `the general rules on Python | ||||||
|  |     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||||
|  |     which means that some care should be taken when dealing with default mutable objects | ||||||
|  |     (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: | ||||||
|  |  | ||||||
|  |         class ExampleFirst(Document): | ||||||
|  |             # Default an empty list | ||||||
|  |             values = ListField(IntField(), default=list) | ||||||
|  |  | ||||||
|  |         class ExampleSecond(Document): | ||||||
|  |             # Default a set of values | ||||||
|  |             values = ListField(IntField(), default=lambda: [1,2,3]) | ||||||
|  |  | ||||||
|  |         class ExampleDangerous(Document): | ||||||
|  |             # This can make an .append call to  add values to the default (and all the following objects), | ||||||
|  |             # instead to just an object | ||||||
|  |             values = ListField(IntField(), default=[1,2,3]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | :attr:`unique` (Default: False) | ||||||
|  |     When True, no documents in the collection will have the same value for this | ||||||
|  |     field. | ||||||
|  |  | ||||||
|  | :attr:`unique_with` (Default: None) | ||||||
|  |     A field name (or list of field names) that when taken together with this | ||||||
|  |     field, will not have two documents in the collection with the same value. | ||||||
|  |  | ||||||
|  | :attr:`primary_key` (Default: False) | ||||||
|  |     When True, use this field as a primary key for the collection. | ||||||
|  |  | ||||||
|  | :attr:`choices` (Default: None) | ||||||
|  |     An iterable (e.g. a list or tuple) of choices to which the value of this | ||||||
|  |     field should be limited. | ||||||
|  |  | ||||||
|  |     Can be either be a nested tuples of value (stored in mongo) and a | ||||||
|  |     human readable key :: | ||||||
|  |  | ||||||
|  |         SIZE = (('S', 'Small'), | ||||||
|  |                 ('M', 'Medium'), | ||||||
|  |                 ('L', 'Large'), | ||||||
|  |                 ('XL', 'Extra Large'), | ||||||
|  |                 ('XXL', 'Extra Extra Large')) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
|  |     Or a flat iterable just containing values :: | ||||||
|  |  | ||||||
|  |         SIZE = ('S', 'M', 'L', 'XL', 'XXL') | ||||||
|  |  | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
|  | :attr:`help_text` (Default: None) | ||||||
|  |     Optional help text to output with the field - used by form libraries | ||||||
|  |  | ||||||
|  | :attr:`verbose_name` (Default: None) | ||||||
|  |     Optional human-readable name for the field - used by form libraries | ||||||
|  |  | ||||||
|  |  | ||||||
| List fields | List fields | ||||||
| ^^^^^^^^^^^ | ----------- | ||||||
| MongoDB allows the storage of lists of items. To add a list of items to a | MongoDB allows the storage of lists of items. To add a list of items to a | ||||||
| :class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field | :class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field | ||||||
| type. :class:`~mongoengine.ListField` takes another field object as its first | type. :class:`~mongoengine.ListField` takes another field object as its first | ||||||
| @@ -54,7 +179,7 @@ argument, which specifies which type elements may be stored within the list:: | |||||||
|         tags = ListField(StringField(max_length=50)) |         tags = ListField(StringField(max_length=50)) | ||||||
|  |  | ||||||
| Embedded documents | Embedded documents | ||||||
| ^^^^^^^^^^^^^^^^^^ | ------------------ | ||||||
| MongoDB has the ability to embed documents within other documents. Schemata may | MongoDB has the ability to embed documents within other documents. Schemata may | ||||||
| be defined for these embedded documents, just as they may be for regular | be defined for these embedded documents, just as they may be for regular | ||||||
| documents. To create an embedded document, just define a document as usual, but | documents. To create an embedded document, just define a document as usual, but | ||||||
| @@ -71,12 +196,32 @@ document class as the first argument:: | |||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|     comment1 = Comment('Good work!') |     comment1 = Comment(content='Good work!') | ||||||
|     comment2 = Comment('Nice article!') |     comment2 = Comment(content='Nice article!') | ||||||
|     page = Page(comments=[comment1, comment2]) |     page = Page(comments=[comment1, comment2]) | ||||||
|  |  | ||||||
|  | Dictionary Fields | ||||||
|  | ----------------- | ||||||
|  | Often, an embedded document may be used instead of a dictionary -- generally | ||||||
|  | this is recommended as dictionaries don't support validation or custom field | ||||||
|  | types. However, sometimes you will not know the structure of what you want to | ||||||
|  | store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | ||||||
|  |  | ||||||
|  |     class SurveyResponse(Document): | ||||||
|  |         date = DateTimeField() | ||||||
|  |         user = ReferenceField(User) | ||||||
|  |         answers = DictField() | ||||||
|  |  | ||||||
|  |     survey_response = SurveyResponse(date=datetime.now(), user=request.user) | ||||||
|  |     response_form = ResponseForm(request.POST) | ||||||
|  |     survey_response.answers = response_form.cleaned_data() | ||||||
|  |     survey_response.save() | ||||||
|  |  | ||||||
|  | Dictionaries can store complex data, other dictionaries, lists, references to | ||||||
|  | other objects, so are the most flexible field type available. | ||||||
|  |  | ||||||
| Reference fields | Reference fields | ||||||
| ^^^^^^^^^^^^^^^^ | ---------------- | ||||||
| References may be stored to other documents in the database using the | References may be stored to other documents in the database using the | ||||||
| :class:`~mongoengine.ReferenceField`. Pass in another document class as the | :class:`~mongoengine.ReferenceField`. Pass in another document class as the | ||||||
| first argument to the constructor, then simply assign document objects to the | first argument to the constructor, then simply assign document objects to the | ||||||
| @@ -99,8 +244,140 @@ field:: | |||||||
| The :class:`User` object is automatically turned into a reference behind the | The :class:`User` object is automatically turned into a reference behind the | ||||||
| scenes, and dereferenced when the :class:`Page` object is retrieved. | scenes, and dereferenced when the :class:`Page` object is retrieved. | ||||||
|  |  | ||||||
|  | To add a :class:`~mongoengine.ReferenceField` that references the document | ||||||
|  | being defined, use the string ``'self'`` in place of the document class as the | ||||||
|  | argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a | ||||||
|  | document that has not yet been defined, use the name of the undefined document | ||||||
|  | as the constructor's argument:: | ||||||
|  |  | ||||||
|  |     class Employee(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         boss = ReferenceField('self') | ||||||
|  |         profile_page = ReferenceField('ProfilePage') | ||||||
|  |  | ||||||
|  |     class ProfilePage(Document): | ||||||
|  |         content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. _one-to-many-with-listfields: | ||||||
|  |  | ||||||
|  | One to Many with ListFields | ||||||
|  | ''''''''''''''''''''''''''' | ||||||
|  |  | ||||||
|  | If you are implementing a one to many relationship via a list of references, | ||||||
|  | then the references are stored as DBRefs and to query you need to pass an | ||||||
|  | instance of the object to the query:: | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         content = StringField() | ||||||
|  |         authors = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |     bob = User(name="Bob Jones").save() | ||||||
|  |     john = User(name="John Smith").save() | ||||||
|  |  | ||||||
|  |     Page(content="Test Page", authors=[bob, john]).save() | ||||||
|  |     Page(content="Another Page", authors=[john]).save() | ||||||
|  |  | ||||||
|  |     # Find all pages Bob authored | ||||||
|  |     Page.objects(authors__in=[bob]) | ||||||
|  |  | ||||||
|  |     # Find all pages that both Bob and John have authored | ||||||
|  |     Page.objects(authors__all=[bob, john]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Dealing with deletion of referred documents | ||||||
|  | ''''''''''''''''''''''''''''''''''''''''''' | ||||||
|  | By default, MongoDB doesn't check the integrity of your data, so deleting | ||||||
|  | documents that other documents still hold references to will lead to consistency | ||||||
|  | issues.  Mongoengine's :class:`ReferenceField` adds some functionality to | ||||||
|  | safeguard against these kinds of database integrity problems, providing each | ||||||
|  | reference with a delete rule specification.  A delete rule is specified by | ||||||
|  | supplying the :attr:`reverse_delete_rule` attributes on the | ||||||
|  | :class:`ReferenceField` definition, like this:: | ||||||
|  |  | ||||||
|  |     class Employee(Document): | ||||||
|  |         ... | ||||||
|  |         profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) | ||||||
|  |  | ||||||
|  | The declaration in this example means that when an :class:`Employee` object is | ||||||
|  | removed, the :class:`ProfilePage` that belongs to that employee is removed as | ||||||
|  | well.  If a whole batch of employees is removed, all profile pages that are | ||||||
|  | linked are removed as well. | ||||||
|  |  | ||||||
|  | Its value can take any of the following constants: | ||||||
|  |  | ||||||
|  | :const:`mongoengine.DO_NOTHING` | ||||||
|  |   This is the default and won't do anything.  Deletes are fast, but may cause | ||||||
|  |   database inconsistency or dangling references. | ||||||
|  | :const:`mongoengine.DENY` | ||||||
|  |   Deletion is denied if there still exist references to the object being | ||||||
|  |   deleted. | ||||||
|  | :const:`mongoengine.NULLIFY` | ||||||
|  |   Any object's fields still referring to the object being deleted are removed | ||||||
|  |   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||||
|  | :const:`mongoengine.CASCADE` | ||||||
|  |   Any object containing fields that are refererring to the object being deleted | ||||||
|  |   are deleted first. | ||||||
|  | :const:`mongoengine.PULL` | ||||||
|  |   Removes the reference to the object (using MongoDB's "pull" operation) | ||||||
|  |   from any object's fields of | ||||||
|  |   :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |    A safety note on setting up these delete rules!  Since the delete rules are | ||||||
|  |    not recorded on the database level by MongoDB itself, but instead at runtime, | ||||||
|  |    in-memory, by the MongoEngine module, it is of the upmost importance | ||||||
|  |    that the module that declares the relationship is loaded **BEFORE** the | ||||||
|  |    delete is invoked. | ||||||
|  |  | ||||||
|  |    If, for example, the :class:`Employee` object lives in the | ||||||
|  |    :mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people` | ||||||
|  |    app, it is extremely important that the :mod:`people` app is loaded | ||||||
|  |    before any employee is removed, because otherwise, MongoEngine could | ||||||
|  |    never know this relationship exists. | ||||||
|  |  | ||||||
|  |    In Django, be sure to put all apps that have such delete rule declarations in | ||||||
|  |    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Generic reference fields | ||||||
|  | '''''''''''''''''''''''' | ||||||
|  | A second kind of reference field also exists, | ||||||
|  | :class:`~mongoengine.GenericReferenceField`. This allows you to reference any | ||||||
|  | kind of :class:`~mongoengine.Document`, and hence doesn't take a | ||||||
|  | :class:`~mongoengine.Document` subclass as a constructor argument:: | ||||||
|  |  | ||||||
|  |     class Link(Document): | ||||||
|  |         url = StringField() | ||||||
|  |  | ||||||
|  |     class Post(Document): | ||||||
|  |         title = StringField() | ||||||
|  |  | ||||||
|  |     class Bookmark(Document): | ||||||
|  |         bookmark_object = GenericReferenceField() | ||||||
|  |  | ||||||
|  |     link = Link(url='http://hmarr.com/mongoengine/') | ||||||
|  |     link.save() | ||||||
|  |  | ||||||
|  |     post = Post(title='Using MongoEngine') | ||||||
|  |     post.save() | ||||||
|  |  | ||||||
|  |     Bookmark(bookmark_object=link).save() | ||||||
|  |     Bookmark(bookmark_object=post).save() | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less | ||||||
|  |    efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if | ||||||
|  |    you will only be referencing one document type, prefer the standard | ||||||
|  |    :class:`~mongoengine.ReferenceField`. | ||||||
|  |  | ||||||
| Uniqueness constraints | Uniqueness constraints | ||||||
| ^^^^^^^^^^^^^^^^^^^^^^ | ---------------------- | ||||||
| MongoEngine allows you to specify that a field should be unique across a | MongoEngine allows you to specify that a field should be unique across a | ||||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | ||||||
| constructor. If you try to save a document that has the same value for a unique | constructor. If you try to save a document that has the same value for a unique | ||||||
| @@ -112,7 +389,21 @@ either a single field name, or a list or tuple of field names:: | |||||||
|     class User(Document): |     class User(Document): | ||||||
|         username = StringField(unique=True) |         username = StringField(unique=True) | ||||||
|         first_name = StringField() |         first_name = StringField() | ||||||
|         last_name = StringField(unique_with='last_name') |         last_name = StringField(unique_with='first_name') | ||||||
|  |  | ||||||
|  | Skipping Document validation on save | ||||||
|  | ------------------------------------ | ||||||
|  | You can also skip the whole document validation process by setting | ||||||
|  | ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` | ||||||
|  | method:: | ||||||
|  |  | ||||||
|  |     class Recipient(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         email = EmailField() | ||||||
|  |  | ||||||
|  |     recipient = Recipient(name='admin', email='root@localhost') | ||||||
|  |     recipient.save()               # will raise a ValidationError while | ||||||
|  |     recipient.save(validate=False) # won't | ||||||
|  |  | ||||||
| Document collections | Document collections | ||||||
| ==================== | ==================== | ||||||
| @@ -130,7 +421,7 @@ document class to use:: | |||||||
|         meta = {'collection': 'cmsPage'} |         meta = {'collection': 'cmsPage'} | ||||||
|  |  | ||||||
| Capped collections | Capped collections | ||||||
| ^^^^^^^^^^^^^^^^^^ | ------------------ | ||||||
| A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | ||||||
| :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | ||||||
| :attr:`max_documents` is the maximum number of documents that is allowed to be | :attr:`max_documents` is the maximum number of documents that is allowed to be | ||||||
| @@ -149,9 +440,10 @@ Indexes | |||||||
| You can specify indexes on collections to make querying faster. This is done | You can specify indexes on collections to make querying faster. This is done | ||||||
| by creating a list of index specifications called :attr:`indexes` in the | by creating a list of index specifications called :attr:`indexes` in the | ||||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||||
| either be a single field name, or a tuple containing multiple field names. A | either be a single field name, a tuple containing multiple field names, or a | ||||||
| direction may be specified on fields by prefixing the field name with a **+** | dictionary containing a full index definition. A direction may be specified on | ||||||
| or a **-** sign. Note that direction only matters on multi-field indexes. :: | fields by prefixing the field name with a **+** or a **-** sign. Note that | ||||||
|  | direction only matters on multi-field indexes. :: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField() |         title = StringField() | ||||||
| @@ -160,6 +452,52 @@ or a **-** sign. Note that direction only matters on multi-field indexes. :: | |||||||
|             'indexes': ['title', ('title', '-rating')] |             'indexes': ['title', ('title', '-rating')] | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | If a dictionary is passed then the following options are available: | ||||||
|  |  | ||||||
|  | :attr:`fields` (Default: None) | ||||||
|  |     The fields to index. Specified in the same format as described above. | ||||||
|  |  | ||||||
|  | :attr:`types` (Default: True) | ||||||
|  |     Whether the index should have the :attr:`_types` field added automatically | ||||||
|  |     to the start of the index. | ||||||
|  |  | ||||||
|  | :attr:`sparse` (Default: False) | ||||||
|  |     Whether the index should be sparse. | ||||||
|  |  | ||||||
|  | :attr:`unique` (Default: False) | ||||||
|  |     Whether the index should be sparse. | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |  | ||||||
|  |     To index embedded files / dictionary fields use 'dot' notation eg: | ||||||
|  |     `rank.title` | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |  | ||||||
|  |     Inheritance adds extra indices. | ||||||
|  |     If don't need inheritance for a document turn inheritance off - | ||||||
|  |     see :ref:`document-inheritance`. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Geospatial indexes | ||||||
|  | --------------------------- | ||||||
|  | Geospatial indexes will be automatically created for all | ||||||
|  | :class:`~mongoengine.GeoPointField`\ s | ||||||
|  |  | ||||||
|  | It is also possible to explicitly define geospatial indexes. This is | ||||||
|  | useful if you need to define a geospatial index on a subfield of a | ||||||
|  | :class:`~mongoengine.DictField` or a custom field that contains a | ||||||
|  | point. To create a geospatial index you must prefix the field with the | ||||||
|  | ***** sign. :: | ||||||
|  |  | ||||||
|  |     class Place(Document): | ||||||
|  |         location = DictField() | ||||||
|  |         meta = { | ||||||
|  |             'indexes': [ | ||||||
|  |                 '*location.point', | ||||||
|  |             ], | ||||||
|  |         } | ||||||
|  |  | ||||||
| Ordering | Ordering | ||||||
| ======== | ======== | ||||||
| A default ordering can be specified for your | A default ordering can be specified for your | ||||||
| @@ -179,13 +517,13 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
|         } |         } | ||||||
|  |  | ||||||
|     blog_post_1 = BlogPost(title="Blog Post #1") |     blog_post_1 = BlogPost(title="Blog Post #1") | ||||||
|     blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0)) |     blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0) | ||||||
|  |  | ||||||
|     blog_post_2 = BlogPost(title="Blog Post #2") |     blog_post_2 = BlogPost(title="Blog Post #2") | ||||||
|     blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0)) |     blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0) | ||||||
|  |  | ||||||
|     blog_post_3 = BlogPost(title="Blog Post #3") |     blog_post_3 = BlogPost(title="Blog Post #3") | ||||||
|     blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0)) |     blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0) | ||||||
|  |  | ||||||
|     blog_post_1.save() |     blog_post_1.save() | ||||||
|     blog_post_2.save() |     blog_post_2.save() | ||||||
| @@ -194,14 +532,37 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
|     # get the "first" BlogPost using default ordering |     # get the "first" BlogPost using default ordering | ||||||
|     # from BlogPost.meta.ordering |     # from BlogPost.meta.ordering | ||||||
|     latest_post = BlogPost.objects.first() |     latest_post = BlogPost.objects.first() | ||||||
|     self.assertEqual(latest_post.title, "Blog Post #3") |     assert latest_post.title == "Blog Post #3" | ||||||
|  |  | ||||||
|     # override default ordering, order BlogPosts by "published_date" |     # override default ordering, order BlogPosts by "published_date" | ||||||
|     first_post = BlogPost.objects.order_by("+published_date").first() |     first_post = BlogPost.objects.order_by("+published_date").first() | ||||||
|     self.assertEqual(first_post.title, "Blog Post #1") |     assert first_post.title == "Blog Post #1" | ||||||
|  |  | ||||||
|  | Shard keys | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | If your collection is sharded, then you need to specify the shard key as a tuple, | ||||||
|  | using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. | ||||||
|  | This ensures that the shard key is sent with the query when calling the | ||||||
|  | :meth:`~mongoengine.document.Document.save` or | ||||||
|  | :meth:`~mongoengine.document.Document.update` method on an existing | ||||||
|  | :class:`-mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|  |     class LogEntry(Document): | ||||||
|  |         machine = StringField() | ||||||
|  |         app = StringField() | ||||||
|  |         timestamp = DateTimeField() | ||||||
|  |         data = StringField() | ||||||
|  |  | ||||||
|  |         meta = { | ||||||
|  |             'shard_key': ('machine', 'timestamp',) | ||||||
|  |         } | ||||||
|  |  | ||||||
|  | .. _document-inheritance: | ||||||
|  |  | ||||||
| Document inheritance | Document inheritance | ||||||
| ==================== | ==================== | ||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this is new class is not a direct subclass of | ||||||
| @@ -213,12 +574,17 @@ convenient and efficient retrieval of related documents:: | |||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|  |  | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|     # Also stored in the collection named 'page' |     # Also stored in the collection named 'page' | ||||||
|     class DatedPage(Page): |     class DatedPage(Page): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|  |  | ||||||
|  | .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. | ||||||
|  |  | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ | -------------------------- | ||||||
| To enable correct retrieval of documents involved in this kind of heirarchy, | To enable correct retrieval of documents involved in this kind of heirarchy, | ||||||
| two extra attributes are stored on each document in the database: :attr:`_cls` | two extra attributes are stored on each document in the database: :attr:`_cls` | ||||||
| and :attr:`_types`. These are hidden from the user through the MongoEngine | and :attr:`_types`. These are hidden from the user through the MongoEngine | ||||||
|   | |||||||
| @@ -17,21 +17,43 @@ attribute syntax:: | |||||||
|     'Example Page' |     'Example Page' | ||||||
|  |  | ||||||
| Saving and deleting documents | Saving and deleting documents | ||||||
| ----------------------------- | ============================= | ||||||
| To save the document to the database, call the | MongoEngine tracks changes to documents to provide efficient saving.  To save | ||||||
| :meth:`~mongoengine.Document.save` method. If the document does not exist in | the document to the database, call the :meth:`~mongoengine.Document.save` method. | ||||||
| the database, it will be created. If it does already exist, it will be | If the document does not exist in the database, it will be created. If it does | ||||||
| updated. | already exist, then any changes will be updated atomically.  For example:: | ||||||
|  |  | ||||||
| To delete a document, call the :meth:`~mongoengine.Document.delete` method. |     >>> page = Page(title="Test Page") | ||||||
| Note that this will only work if the document exists in the database and has a |     >>> page.save()  # Performs an insert | ||||||
| valide :attr:`id`. |     >>> page.title = "My Page" | ||||||
|  |     >>> page.save()  # Performs an atomic set on the title field. | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |     Changes to documents are tracked and on the whole perform `set` operations. | ||||||
|  |  | ||||||
|  |     * ``list_field.pop(0)`` - *sets* the resulting list | ||||||
|  |     * ``del(list_field)``   - *unsets* whole list | ||||||
|  |  | ||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
|  | Cascading Saves | ||||||
|  | --------------- | ||||||
|  | If your document contains :class:`~mongoengine.ReferenceField` or | ||||||
|  | :class:`~mongoengine.GenericReferenceField` objects, then by default the | ||||||
|  | :meth:`~mongoengine.Document.save` method will automatically save any changes to | ||||||
|  | those objects as well.  If this is not desired passing :attr:`cascade` as False | ||||||
|  | to the save method turns this feature off. | ||||||
|  |  | ||||||
|  | Deleting documents | ||||||
|  | ------------------ | ||||||
|  | To delete a document, call the :meth:`~mongoengine.Document.delete` method. | ||||||
|  | Note that this will only work if the document exists in the database and has a | ||||||
|  | valid :attr:`id`. | ||||||
|  |  | ||||||
| Document IDs | Document IDs | ||||||
| ------------ | ============ | ||||||
| Each document in the database has a unique id. This may be accessed through the | Each document in the database has a unique id. This may be accessed through the | ||||||
| :attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id | :attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id | ||||||
| will be generated automatically by the database server when the object is save, | will be generated automatically by the database server when the object is save, | ||||||
| @@ -59,7 +81,15 @@ you may still use :attr:`id` to access the primary key if you want:: | |||||||
|     >>> bob.id == bob.email == 'bob@example.com' |     >>> bob.id == bob.email == 'bob@example.com' | ||||||
|     True |     True | ||||||
|  |  | ||||||
|  | You can also access the document's "primary key" using the :attr:`pk` field; in | ||||||
|  | is an alias to :attr:`id`:: | ||||||
|  |  | ||||||
|  |     >>> page = Page(title="Another Test Page") | ||||||
|  |     >>> page.save() | ||||||
|  |     >>> page.id == page.pk | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If you define your own primary key field, the field implicitly becomes |    If you define your own primary key field, the field implicitly becomes | ||||||
|    required, so a :class:`ValidationError` will be thrown if you don't provide |    required, so a :class:`~mongoengine.ValidationError` will be thrown if | ||||||
|    it. |    you don't provide it. | ||||||
|   | |||||||
							
								
								
									
										84
									
								
								docs/guide/gridfs.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								docs/guide/gridfs.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | |||||||
|  | ====== | ||||||
|  | GridFS | ||||||
|  | ====== | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
|  | Writing | ||||||
|  | ------- | ||||||
|  |  | ||||||
|  | GridFS support comes in the form of the :class:`~mongoengine.FileField` field | ||||||
|  | object. This field acts as a file-like object and provides a couple of | ||||||
|  | different ways of inserting and retrieving data. Arbitrary metadata such as | ||||||
|  | content type can also be stored alongside the files. In the following example, | ||||||
|  | a document is created to store details about animals, including a photo:: | ||||||
|  |  | ||||||
|  |     class Animal(Document): | ||||||
|  |         genus = StringField() | ||||||
|  |         family = StringField() | ||||||
|  |         photo = FileField() | ||||||
|  |  | ||||||
|  |     marmot = Animal('Marmota', 'Sciuridae') | ||||||
|  |  | ||||||
|  |     marmot_photo = open('marmot.jpg', 'r')      # Retrieve a photo from disk | ||||||
|  |     marmot.photo = marmot_photo                 # Store photo in the document | ||||||
|  |     marmot.photo.content_type = 'image/jpeg'    # Store metadata | ||||||
|  |  | ||||||
|  |     marmot.save() | ||||||
|  |  | ||||||
|  | Another way of writing to a :class:`~mongoengine.FileField` is to use the | ||||||
|  | :func:`put` method. This allows for metadata to be stored in the same call as | ||||||
|  | the file:: | ||||||
|  |  | ||||||
|  |     marmot.photo.put(marmot_photo, content_type='image/jpeg') | ||||||
|  |  | ||||||
|  |     marmot.save() | ||||||
|  |  | ||||||
|  | Retrieval | ||||||
|  | --------- | ||||||
|  |  | ||||||
|  | So using the :class:`~mongoengine.FileField` is just like using any other | ||||||
|  | field. The file can also be retrieved just as easily:: | ||||||
|  |  | ||||||
|  |     marmot = Animal.objects(genus='Marmota').first() | ||||||
|  |     photo = marmot.photo.read() | ||||||
|  |     content_type = marmot.photo.content_type | ||||||
|  |  | ||||||
|  | Streaming | ||||||
|  | --------- | ||||||
|  |  | ||||||
|  | Streaming data into a :class:`~mongoengine.FileField` is achieved in a | ||||||
|  | slightly different manner.  First, a new file must be created by calling the | ||||||
|  | :func:`new_file` method. Data can then be written using :func:`write`:: | ||||||
|  |  | ||||||
|  |     marmot.photo.new_file() | ||||||
|  |     marmot.photo.write('some_image_data') | ||||||
|  |     marmot.photo.write('some_more_image_data') | ||||||
|  |     marmot.photo.close() | ||||||
|  |  | ||||||
|  |     marmot.photo.save() | ||||||
|  |  | ||||||
|  | Deletion | ||||||
|  | -------- | ||||||
|  |  | ||||||
|  | Deleting stored files is achieved with the :func:`delete` method:: | ||||||
|  |  | ||||||
|  |     marmot.photo.delete() | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |  | ||||||
|  |     The FileField in a Document actually only stores the ID of a file in a | ||||||
|  |     separate GridFS collection. This means that deleting a document | ||||||
|  |     with a defined FileField does not actually delete the file. You must be | ||||||
|  |     careful to delete any files in a Document as above before deleting the | ||||||
|  |     Document itself. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Replacing files | ||||||
|  | --------------- | ||||||
|  |  | ||||||
|  | Files can be replaced with the :func:`replace` method. This works just like | ||||||
|  | the :func:`put` method so even metadata can (and should) be replaced:: | ||||||
|  |  | ||||||
|  |     another_marmot = open('another_marmot.png', 'r') | ||||||
|  |     marmot.photo.replace(another_marmot, content_type='image/png') | ||||||
| @@ -10,3 +10,5 @@ User Guide | |||||||
|    defining-documents |    defining-documents | ||||||
|    document-instances |    document-instances | ||||||
|    querying |    querying | ||||||
|  |    gridfs | ||||||
|  |    signals | ||||||
|   | |||||||
| @@ -1,31 +1,31 @@ | |||||||
| ====================== | ====================== | ||||||
| Installing MongoEngine | Installing MongoEngine | ||||||
| ====================== | ====================== | ||||||
|  |  | ||||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | ||||||
| and ensure it is running in an accessible location. You will also need | and ensure it is running in an accessible location. You will also need | ||||||
| `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | ||||||
| install MongoEngine using setuptools, then the dependencies will be handled for | install MongoEngine using setuptools, then the dependencies will be handled for | ||||||
| you. | you. | ||||||
|  |  | ||||||
| MongoEngine is available on PyPI, so to use it you can use  | MongoEngine is available on PyPI, so to use it you can use :program:`pip`: | ||||||
| :program:`easy_install`: |  | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # easy_install mongoengine |     $ pip install mongoengine | ||||||
|  |  | ||||||
| Alternatively, if you don't have setuptools installed, `download it from PyPi | Alternatively, if you don't have setuptools installed, `download it from PyPi | ||||||
| <http://pypi.python.org/pypi/mongoengine/>`_ and run | <http://pypi.python.org/pypi/mongoengine/>`_ and run | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # python setup.py install |     $ python setup.py install | ||||||
|  |  | ||||||
| To use the bleeding-edge version of MongoEngine, you can get the source from | To use the bleeding-edge version of MongoEngine, you can get the source from | ||||||
| `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # git clone git://github.com/hmarr/mongoengine |     $ git clone git://github.com/hmarr/mongoengine | ||||||
|     # cd mongoengine |     $ cd mongoengine | ||||||
|     # python setup.py install |     $ python setup.py install | ||||||
|   | |||||||
| @@ -5,16 +5,24 @@ Querying the database | |||||||
| is used for accessing the objects in the database associated with the class. | is used for accessing the objects in the database associated with the class. | ||||||
| The :attr:`objects` attribute is actually a | The :attr:`objects` attribute is actually a | ||||||
| :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new | :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new | ||||||
| a new :class:`~mongoengine.queryset.QuerySet` object on access. The | :class:`~mongoengine.queryset.QuerySet` object on access. The | ||||||
| :class:`~mongoengine.queryset.QuerySet` object may may be iterated over to | :class:`~mongoengine.queryset.QuerySet` object may be iterated over to | ||||||
| fetch documents from the database:: | fetch documents from the database:: | ||||||
|  |  | ||||||
|     # Prints out the names of all the users in the database |     # Prints out the names of all the users in the database | ||||||
|     for user in User.objects: |     for user in User.objects: | ||||||
|         print user.name |         print user.name | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    Once the iteration finishes (when :class:`StopIteration` is raised), | ||||||
|  |    :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the | ||||||
|  |    :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The | ||||||
|  |    results of the first iteration are *not* cached, so the database will be hit | ||||||
|  |    each time the :class:`~mongoengine.queryset.QuerySet` is iterated over. | ||||||
|  |  | ||||||
| Filtering queries | Filtering queries | ||||||
| ----------------- | ================= | ||||||
| The query may be filtered by calling the | The query may be filtered by calling the | ||||||
| :class:`~mongoengine.queryset.QuerySet` object with field lookup keyword | :class:`~mongoengine.queryset.QuerySet` object with field lookup keyword | ||||||
| arguments. The keys in the keyword arguments correspond to fields on the | arguments. The keys in the keyword arguments correspond to fields on the | ||||||
| @@ -32,8 +40,62 @@ syntax:: | |||||||
|     # been written by a user whose 'country' field is set to 'uk' |     # been written by a user whose 'country' field is set to 'uk' | ||||||
|     uk_pages = Page.objects(author__country='uk') |     uk_pages = Page.objects(author__country='uk') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Query operators | ||||||
|  | =============== | ||||||
|  | Operators other than equality may also be used in queries; just attach the | ||||||
|  | operator name to a key with a double-underscore:: | ||||||
|  |  | ||||||
|  |     # Only find users whose age is 18 or less | ||||||
|  |     young_users = Users.objects(age__lte=18) | ||||||
|  |  | ||||||
|  | Available operators are as follows: | ||||||
|  |  | ||||||
|  | * ``ne`` -- not equal to | ||||||
|  | * ``lt`` -- less than | ||||||
|  | * ``lte`` -- less than or equal to | ||||||
|  | * ``gt`` -- greater than | ||||||
|  | * ``gte`` -- greater than or equal to | ||||||
|  | * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||||
|  |   ``Q(age__not__mod=5)``) | ||||||
|  | * ``in`` -- value is in list (a list of values should be provided) | ||||||
|  | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
|  | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
|  | * ``all`` -- every item in list of values provided is in array | ||||||
|  | * ``size`` -- the size of the array is | ||||||
|  | * ``exists`` -- value for field exists | ||||||
|  |  | ||||||
|  | The following operators are available as shortcuts to querying with regular | ||||||
|  | expressions: | ||||||
|  |  | ||||||
|  | * ``exact`` -- string field exactly matches value | ||||||
|  | * ``iexact`` -- string field exactly matches value (case insensitive) | ||||||
|  | * ``contains`` -- string field contains value | ||||||
|  | * ``icontains`` -- string field contains value (case insensitive) | ||||||
|  | * ``startswith`` -- string field starts with value | ||||||
|  | * ``istartswith`` -- string field starts with value (case insensitive) | ||||||
|  | * ``endswith`` -- string field ends with value | ||||||
|  | * ``iendswith`` -- string field ends with value (case insensitive) | ||||||
|  | * ``match``  -- performs an $elemMatch so you can match an entire document within an array | ||||||
|  |  | ||||||
|  | There are a few special operators for performing geographical queries, that | ||||||
|  | may used with :class:`~mongoengine.GeoPointField`\ s: | ||||||
|  |  | ||||||
|  | * ``within_distance`` -- provide a list containing a point and a maximum | ||||||
|  |   distance (e.g. [(41.342, -87.653), 5]) | ||||||
|  | * ``within_spherical_distance`` -- Same as above but using the spherical geo model | ||||||
|  |   (e.g. [(41.342, -87.653), 5/earth_radius]) | ||||||
|  | * ``near`` -- order the documents by how close they are to a given point | ||||||
|  | * ``near_sphere`` -- Same as above but using the spherical geo model | ||||||
|  | * ``within_box`` -- filter documents to those within a given bounding box (e.g. | ||||||
|  |   [(35.0, -125.0), (40.0, -100.0)]) | ||||||
|  | * ``within_polygon`` -- filter documents to those within a given polygon (e.g. | ||||||
|  |   [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). | ||||||
|  |   .. note:: Requires Mongo Server 2.0 | ||||||
|  |  | ||||||
|  |  | ||||||
| Querying lists | Querying lists | ||||||
| ^^^^^^^^^^^^^^ | -------------- | ||||||
| On most fields, this syntax will look up documents where the field specified | On most fields, this syntax will look up documents where the field specified | ||||||
| matches the given value exactly, but when the field refers to a | matches the given value exactly, but when the field refers to a | ||||||
| :class:`~mongoengine.ListField`, a single item may be provided, in which case | :class:`~mongoengine.ListField`, a single item may be provided, in which case | ||||||
| @@ -46,30 +108,42 @@ lists that contain that item will be matched:: | |||||||
|     # 'tags' list |     # 'tags' list | ||||||
|     Page.objects(tags='coding') |     Page.objects(tags='coding') | ||||||
|  |  | ||||||
| Query operators | It is possible to query by position in a list by using a numerical value as a | ||||||
| --------------- | query operator. So if you wanted to find all pages whose first tag was ``db``, | ||||||
| Operators other than equality may also be used in queries; just attach the | you could use the following query:: | ||||||
| operator name to a key with a double-underscore:: |  | ||||||
|  |  | ||||||
|     # Only find users whose age is 18 or less |     Page.objects(tags__0='db') | ||||||
|     young_users = Users.objects(age__lte=18) |  | ||||||
|  |  | ||||||
| Available operators are as follows: | If you only want to fetch part of a list eg: you want to paginate a list, then | ||||||
|  | the `slice` operator is required:: | ||||||
|  |  | ||||||
| * ``neq`` -- not equal to |     # comments - skip 5, limit 10 | ||||||
| * ``lt`` -- less than |     Page.objects.fields(slice__comments=[5, 10]) | ||||||
| * ``lte`` -- less than or equal to |  | ||||||
| * ``gt`` -- greater than | For updating documents, if you don't know the position in a list, you can use | ||||||
| * ``gte`` -- greater than or equal to | the $ positional operator :: | ||||||
| * ``in`` -- value is in list (a list of values should be provided) |  | ||||||
| * ``nin`` -- value is not in list (a list of values should be provided) |     Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1}) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values |  | ||||||
| * ``all`` -- every item in array is in list of values provided | However, this doesn't map well to the syntax so you can also use a capital S instead :: | ||||||
| * ``size`` -- the size of the array is  |  | ||||||
| * ``exists`` -- value for field exists |     Post.objects(comments__by="joe").update(inc__comments__S__votes=1) | ||||||
|  |  | ||||||
|  |     .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Raw queries | ||||||
|  | ----------- | ||||||
|  | It is possible to provide a raw PyMongo query as a query parameter, which will | ||||||
|  | be integrated directly into the query. This is done using the ``__raw__`` | ||||||
|  | keyword argument:: | ||||||
|  |  | ||||||
|  |     Page.objects(__raw__={'tags': 'coding'}) | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Limiting and skipping results | Limiting and skipping results | ||||||
| ----------------------------- | ============================= | ||||||
| Just as with traditional ORMs, you may limit the number of results returned, or | Just as with traditional ORMs, you may limit the number of results returned, or | ||||||
| skip a number or results in you query. | skip a number or results in you query. | ||||||
| :meth:`~mongoengine.queryset.QuerySet.limit` and | :meth:`~mongoengine.queryset.QuerySet.limit` and | ||||||
| @@ -86,15 +160,110 @@ achieving this is using array-slicing syntax:: | |||||||
|     # 5 users, starting from the 10th user found |     # 5 users, starting from the 10th user found | ||||||
|     users = User.objects[10:15] |     users = User.objects[10:15] | ||||||
|  |  | ||||||
|  | You may also index the query to retrieve a single result. If an item at that | ||||||
|  | index does not exists, an :class:`IndexError` will be raised. A shortcut for | ||||||
|  | retrieving the first result and returning :attr:`None` if no result exists is | ||||||
|  | provided (:meth:`~mongoengine.queryset.QuerySet.first`):: | ||||||
|  |  | ||||||
|  |     >>> # Make sure there are no users | ||||||
|  |     >>> User.drop_collection() | ||||||
|  |     >>> User.objects[0] | ||||||
|  |     IndexError: list index out of range | ||||||
|  |     >>> User.objects.first() == None | ||||||
|  |     True | ||||||
|  |     >>> User(name='Test User').save() | ||||||
|  |     >>> User.objects[0] == User.objects.first() | ||||||
|  |     True | ||||||
|  |  | ||||||
|  | Retrieving unique results | ||||||
|  | ------------------------- | ||||||
|  | To retrieve a result that should be unique in the collection, use | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.get`. This will raise | ||||||
|  | :class:`~mongoengine.queryset.DoesNotExist` if no document matches the query, | ||||||
|  | and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one | ||||||
|  | document matched the query. | ||||||
|  |  | ||||||
|  | A variation of this method exists, | ||||||
|  | :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | ||||||
|  | document with the query arguments if no documents match the query. An | ||||||
|  | additional keyword argument, :attr:`defaults` may be provided, which will be | ||||||
|  | used as default values for the new document, in the case that it should need | ||||||
|  | to be created:: | ||||||
|  |  | ||||||
|  |     >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) | ||||||
|  |     >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) | ||||||
|  |     >>> a.name == b.name and a.age == b.age | ||||||
|  |     True | ||||||
|  |  | ||||||
|  | Default Document queries | ||||||
|  | ======================== | ||||||
|  | By default, the objects :attr:`~mongoengine.Document.objects` attribute on a | ||||||
|  | document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter | ||||||
|  | the collection -- it returns all objects. This may be changed by defining a | ||||||
|  | method on a document that modifies a queryset. The method should accept two | ||||||
|  | arguments -- :attr:`doc_cls` and :attr:`queryset`. The first argument is the | ||||||
|  | :class:`~mongoengine.Document` class that the method is defined on (in this | ||||||
|  | sense, the method is more like a :func:`classmethod` than a regular method), | ||||||
|  | and the second argument is the initial queryset. The method needs to be | ||||||
|  | decorated with :func:`~mongoengine.queryset.queryset_manager` in order for it | ||||||
|  | to be recognised. :: | ||||||
|  |  | ||||||
|  |     class BlogPost(Document): | ||||||
|  |         title = StringField() | ||||||
|  |         date = DateTimeField() | ||||||
|  |  | ||||||
|  |         @queryset_manager | ||||||
|  |         def objects(doc_cls, queryset): | ||||||
|  |             # This may actually also be done by defining a default ordering for | ||||||
|  |             # the document, but this illustrates the use of manager methods | ||||||
|  |             return queryset.order_by('-date') | ||||||
|  |  | ||||||
|  | You don't need to call your method :attr:`objects` -- you may define as many | ||||||
|  | custom manager methods as you like:: | ||||||
|  |  | ||||||
|  |     class BlogPost(Document): | ||||||
|  |         title = StringField() | ||||||
|  |         published = BooleanField() | ||||||
|  |  | ||||||
|  |         @queryset_manager | ||||||
|  |         def live_posts(doc_cls, queryset): | ||||||
|  |             return queryset.filter(published=True) | ||||||
|  |  | ||||||
|  |     BlogPost(title='test1', published=False).save() | ||||||
|  |     BlogPost(title='test2', published=True).save() | ||||||
|  |     assert len(BlogPost.objects) == 2 | ||||||
|  |     assert len(BlogPost.live_posts()) == 1 | ||||||
|  |  | ||||||
|  | Custom QuerySets | ||||||
|  | ================ | ||||||
|  | Should you want to add custom methods for interacting with or filtering | ||||||
|  | documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be | ||||||
|  | the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on | ||||||
|  | a document, set ``queryset_class`` to the custom class in a | ||||||
|  | :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | ||||||
|  |  | ||||||
|  |     class AwesomerQuerySet(QuerySet): | ||||||
|  |  | ||||||
|  |         def get_awesome(self): | ||||||
|  |             return self.filter(awesome=True) | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         meta = {'queryset_class': AwesomerQuerySet} | ||||||
|  |  | ||||||
|  |     # To call: | ||||||
|  |     Page.objects.get_awesome() | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Aggregation | Aggregation | ||||||
| ----------- | =========== | ||||||
| MongoDB provides some aggregation methods out of the box, but there are not as | MongoDB provides some aggregation methods out of the box, but there are not as | ||||||
| many as you typically get with an RDBMS. MongoEngine provides a wrapper around | many as you typically get with an RDBMS. MongoEngine provides a wrapper around | ||||||
| the built-in methods and provides some of its own, which are implemented as | the built-in methods and provides some of its own, which are implemented as | ||||||
| Javascript code that is executed on the database server. | Javascript code that is executed on the database server. | ||||||
|  |  | ||||||
| Counting results | Counting results | ||||||
| ^^^^^^^^^^^^^^^^ | ---------------- | ||||||
| Just as with limiting and skipping results, there is a method on | Just as with limiting and skipping results, there is a method on | ||||||
| :class:`~mongoengine.queryset.QuerySet` objects -- | :class:`~mongoengine.queryset.QuerySet` objects -- | ||||||
| :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | ||||||
| @@ -103,13 +272,14 @@ way of achieving this:: | |||||||
|     num_users = len(User.objects) |     num_users = len(User.objects) | ||||||
|  |  | ||||||
| Further aggregation | Further aggregation | ||||||
| ^^^^^^^^^^^^^^^^^^^ | ------------------- | ||||||
| You may sum over the values of a specific field on documents using | You may sum over the values of a specific field on documents using | ||||||
| :meth:`~mongoengine.queryset.QuerySet.sum`:: | :meth:`~mongoengine.queryset.QuerySet.sum`:: | ||||||
|  |  | ||||||
|     yearly_expense = Employee.objects.sum('salary') |     yearly_expense = Employee.objects.sum('salary') | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If the field isn't present on a document, that document will be ignored from |    If the field isn't present on a document, that document will be ignored from | ||||||
|    the sum. |    the sum. | ||||||
|  |  | ||||||
| @@ -132,8 +302,71 @@ would be generating "tag-clouds":: | |||||||
|     from operator import itemgetter |     from operator import itemgetter | ||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Query efficiency and performance | ||||||
|  | ================================ | ||||||
|  |  | ||||||
|  | There are a couple of methods to improve efficiency when querying, reducing the | ||||||
|  | information returned by the query or efficient dereferencing . | ||||||
|  |  | ||||||
|  | Retrieving a subset of fields | ||||||
|  | ----------------------------- | ||||||
|  |  | ||||||
|  | Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | ||||||
|  | and for efficiency only these should be retrieved from the database. This issue | ||||||
|  | is especially important for MongoDB, as fields may often be extremely large | ||||||
|  | (e.g. a :class:`~mongoengine.ListField` of | ||||||
|  | :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a | ||||||
|  | blog post. To select only a subset of fields, use | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to | ||||||
|  | retrieve as its arguments. Note that if fields that are not downloaded are | ||||||
|  | accessed, their default value (or :attr:`None` if no default value is provided) | ||||||
|  | will be given:: | ||||||
|  |  | ||||||
|  |     >>> class Film(Document): | ||||||
|  |     ...     title = StringField() | ||||||
|  |     ...     year = IntField() | ||||||
|  |     ...     rating = IntField(default=3) | ||||||
|  |     ... | ||||||
|  |     >>> Film(title='The Shawshank Redemption', year=1994, rating=5).save() | ||||||
|  |     >>> f = Film.objects.only('title').first() | ||||||
|  |     >>> f.title | ||||||
|  |     'The Shawshank Redemption' | ||||||
|  |     >>> f.year   # None | ||||||
|  |     >>> f.rating # default value | ||||||
|  |     3 | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |     The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of | ||||||
|  |     :meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field. | ||||||
|  |  | ||||||
|  | If you later need the missing fields, just call | ||||||
|  | :meth:`~mongoengine.Document.reload` on your document. | ||||||
|  |  | ||||||
|  | Getting related data | ||||||
|  | -------------------- | ||||||
|  |  | ||||||
|  | When iterating the results of :class:`~mongoengine.ListField` or | ||||||
|  | :class:`~mongoengine.DictField` we automatically dereference any | ||||||
|  | :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the | ||||||
|  | number the queries to mongo. | ||||||
|  |  | ||||||
|  | There are times when that efficiency is not enough, documents that have | ||||||
|  | :class:`~mongoengine.ReferenceField` objects or | ||||||
|  | :class:`~mongoengine.GenericReferenceField` objects at the top level are | ||||||
|  | expensive as the number of queries to MongoDB can quickly rise. | ||||||
|  |  | ||||||
|  | To limit the number of queries use | ||||||
|  | :func:`~mongoengine.queryset.QuerySet.select_related` which converts the | ||||||
|  | QuerySet to a list and dereferences as efficiently as possible.  By default | ||||||
|  | :func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any | ||||||
|  | references to the depth of 1 level.  If you have more complicated documents and | ||||||
|  | want to dereference more of the object at once then increasing the :attr:`max_depth` | ||||||
|  | will dereference more levels of the document. | ||||||
|  |  | ||||||
| Advanced queries | Advanced queries | ||||||
| ---------------- | ================ | ||||||
| Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | ||||||
| arguments can't fully express the query you want to use -- for example if you | arguments can't fully express the query you want to use -- for example if you | ||||||
| need to combine a number of constraints using *and* and *or*. This is made | need to combine a number of constraints using *and* and *or*. This is made | ||||||
| @@ -142,8 +375,8 @@ A :class:`~mongoengine.queryset.Q` object represents part of a query, and | |||||||
| can be initialised using the same keyword-argument syntax you use to query | can be initialised using the same keyword-argument syntax you use to query | ||||||
| documents. To build a complex query, you may combine | documents. To build a complex query, you may combine | ||||||
| :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) | :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) | ||||||
| operators. To use :class:`~mongoengine.queryset.Q` objects, pass them in | operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | ||||||
| as positional arguments to :attr:`Document.objects` when you filter it by | first positional argument to :attr:`Document.objects` when you filter it by | ||||||
| calling it with keyword arguments:: | calling it with keyword arguments:: | ||||||
|  |  | ||||||
|     # Get published posts |     # Get published posts | ||||||
| @@ -152,16 +385,10 @@ calling it with keyword arguments:: | |||||||
|     # Get top posts |     # Get top posts | ||||||
|     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) |     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|    Only use these advanced queries if absolutely necessary as they will execute |  | ||||||
|    significantly slower than regular queries. This is because they are not |  | ||||||
|    natively supported by MongoDB -- they are compiled to Javascript and sent |  | ||||||
|    to the server for execution. |  | ||||||
|  |  | ||||||
| .. _guide-atomic-updates: | .. _guide-atomic-updates: | ||||||
|  |  | ||||||
| Atomic updates | Atomic updates | ||||||
| -------------- | ============== | ||||||
| Documents may be updated atomically by using the | Documents may be updated atomically by using the | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update_one` and | :meth:`~mongoengine.queryset.QuerySet.update_one` and | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update` methods on a | :meth:`~mongoengine.queryset.QuerySet.update` methods on a | ||||||
| @@ -172,10 +399,13 @@ that you may use with these methods: | |||||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3+) | * ``unset`` -- delete a particular value (since MongoDB v1.3+) | ||||||
| * ``inc`` -- increment a value by a given amount | * ``inc`` -- increment a value by a given amount | ||||||
| * ``dec`` -- decrement a value by a given amount | * ``dec`` -- decrement a value by a given amount | ||||||
|  | * ``pop`` -- remove the last item from a list | ||||||
| * ``push`` -- append a value to a list | * ``push`` -- append a value to a list | ||||||
| * ``push_all`` -- append several values to a list | * ``push_all`` -- append several values to a list | ||||||
|  | * ``pop`` -- remove the first or last element of a list | ||||||
| * ``pull`` -- remove a value from a list | * ``pull`` -- remove a value from a list | ||||||
| * ``pull_all`` -- remove several values from a list | * ``pull_all`` -- remove several values from a list | ||||||
|  | * ``add_to_set`` -- add value to a list only if its not in the list already | ||||||
|  |  | ||||||
| The syntax for atomic updates is similar to the querying syntax, but the | The syntax for atomic updates is similar to the querying syntax, but the | ||||||
| modifier comes before the field, not after it:: | modifier comes before the field, not after it:: | ||||||
| @@ -194,3 +424,127 @@ modifier comes before the field, not after it:: | |||||||
|     >>> post.reload() |     >>> post.reload() | ||||||
|     >>> post.tags |     >>> post.tags | ||||||
|     ['database', 'nosql'] |     ['database', 'nosql'] | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |  | ||||||
|  |     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates | ||||||
|  |     on changed documents by tracking changes to that document. | ||||||
|  |  | ||||||
|  | The positional operator allows you to update list items without knowing the | ||||||
|  | index position, therefore making the update a single atomic operation.  As we | ||||||
|  | cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | ||||||
|  |  | ||||||
|  |     >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) | ||||||
|  |     >>> post.save() | ||||||
|  |     >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') | ||||||
|  |     >>> post.reload() | ||||||
|  |     >>> post.tags | ||||||
|  |     ['database', 'mongodb'] | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |     Currently only top level lists are handled, future versions of mongodb / | ||||||
|  |     pymongo plan to support nested positional operators.  See `The $ positional | ||||||
|  |     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. | ||||||
|  |  | ||||||
|  | Server-side javascript execution | ||||||
|  | ================================ | ||||||
|  | Javascript functions may be written and sent to the server for execution. The | ||||||
|  | result of this is the return value of the Javascript function. This | ||||||
|  | functionality is accessed through the | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.exec_js` method on | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet` objects. Pass in a string containing a | ||||||
|  | Javascript function as the first argument. | ||||||
|  |  | ||||||
|  | The remaining positional arguments are names of fields that will be passed into | ||||||
|  | you Javascript function as its arguments. This allows functions to be written | ||||||
|  | that may be executed on any field in a collection (e.g. the | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.sum` method, which accepts the name of | ||||||
|  | the field to sum over as its argument). Note that field names passed in in this | ||||||
|  | manner are automatically translated to the names used on the database (set | ||||||
|  | using the :attr:`name` keyword argument to a field constructor). | ||||||
|  |  | ||||||
|  | Keyword arguments to :meth:`~mongoengine.queryset.QuerySet.exec_js` are | ||||||
|  | combined into an object called :attr:`options`, which is available in the | ||||||
|  | Javascript function. This may be used for defining specific parameters for your | ||||||
|  | function. | ||||||
|  |  | ||||||
|  | Some variables are made available in the scope of the Javascript function: | ||||||
|  |  | ||||||
|  | * ``collection`` -- the name of the collection that corresponds to the | ||||||
|  |   :class:`~mongoengine.Document` class that is being used; this should be | ||||||
|  |   used to get the :class:`Collection` object from :attr:`db` in Javascript | ||||||
|  |   code | ||||||
|  | * ``query`` -- the query that has been generated by the | ||||||
|  |   :class:`~mongoengine.queryset.QuerySet` object; this may be passed into | ||||||
|  |   the :meth:`find` method on a :class:`Collection` object in the Javascript | ||||||
|  |   function | ||||||
|  | * ``options`` -- an object containing the keyword arguments passed into | ||||||
|  |   :meth:`~mongoengine.queryset.QuerySet.exec_js` | ||||||
|  |  | ||||||
|  | The following example demonstrates the intended usage of | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | ||||||
|  | over a field on a document (this functionality is already available throught | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | ||||||
|  | example):: | ||||||
|  |  | ||||||
|  |     def sum_field(document, field_name, include_negatives=True): | ||||||
|  |         code = """ | ||||||
|  |         function(sumField) { | ||||||
|  |             var total = 0.0; | ||||||
|  |             db[collection].find(query).forEach(function(doc) { | ||||||
|  |                 var val = doc[sumField]; | ||||||
|  |                 if (val >= 0.0 || options.includeNegatives) { | ||||||
|  |                     total += val; | ||||||
|  |                 } | ||||||
|  |             }); | ||||||
|  |             return total; | ||||||
|  |         } | ||||||
|  |         """ | ||||||
|  |         options = {'includeNegatives': include_negatives} | ||||||
|  |         return document.objects.exec_js(code, field_name, **options) | ||||||
|  |  | ||||||
|  | As fields in MongoEngine may use different names in the database (set using the | ||||||
|  | :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism | ||||||
|  | exists for replacing MongoEngine field names with the database field names in | ||||||
|  | Javascript code. When accessing a field on a collection object, use | ||||||
|  | square-bracket notation, and prefix the MongoEngine field name with a tilde. | ||||||
|  | The field name that follows the tilde will be translated to the name used in | ||||||
|  | the database. Note that when referring to fields on embedded documents, | ||||||
|  | the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, | ||||||
|  | should be used before the name of the field on the embedded document. The | ||||||
|  | following example shows how the substitutions are made:: | ||||||
|  |  | ||||||
|  |     class Comment(EmbeddedDocument): | ||||||
|  |         content = StringField(db_field='body') | ||||||
|  |  | ||||||
|  |     class BlogPost(Document): | ||||||
|  |         title = StringField(db_field='doctitle') | ||||||
|  |         comments = ListField(EmbeddedDocumentField(Comment), name='cs') | ||||||
|  |  | ||||||
|  |     # Returns a list of dictionaries. Each dictionary contains a value named | ||||||
|  |     # "document", which corresponds to the "title" field on a BlogPost, and | ||||||
|  |     # "comment", which corresponds to an individual comment. The substitutions | ||||||
|  |     # made are shown in the comments. | ||||||
|  |     BlogPost.objects.exec_js(""" | ||||||
|  |     function() { | ||||||
|  |         var comments = []; | ||||||
|  |         db[collection].find(query).forEach(function(doc) { | ||||||
|  |             // doc[~comments] -> doc["cs"] | ||||||
|  |             var docComments = doc[~comments]; | ||||||
|  |  | ||||||
|  |             for (var i = 0; i < docComments.length; i++) { | ||||||
|  |                 // doc[~comments][i] -> doc["cs"][i] | ||||||
|  |                 var comment = doc[~comments][i]; | ||||||
|  |  | ||||||
|  |                 comments.push({ | ||||||
|  |                     // doc[~title] -> doc["doctitle"] | ||||||
|  |                     'document': doc[~title], | ||||||
|  |  | ||||||
|  |                     // comment[~comments.content] -> comment["body"] | ||||||
|  |                     'comment': comment[~comments.content] | ||||||
|  |                 }); | ||||||
|  |             } | ||||||
|  |         }); | ||||||
|  |         return comments; | ||||||
|  |     } | ||||||
|  |     """) | ||||||
|   | |||||||
							
								
								
									
										53
									
								
								docs/guide/signals.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								docs/guide/signals.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | |||||||
|  | .. _signals: | ||||||
|  |  | ||||||
|  | Signals | ||||||
|  | ======= | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.5 | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |   Signal support is provided by the excellent `blinker`_ library and | ||||||
|  |   will gracefully fall back if it is not available. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | The following document signals exist in MongoEngine and are pretty self-explanatory: | ||||||
|  |  | ||||||
|  |   * `mongoengine.signals.pre_init` | ||||||
|  |   * `mongoengine.signals.post_init` | ||||||
|  |   * `mongoengine.signals.pre_save` | ||||||
|  |   * `mongoengine.signals.post_save` | ||||||
|  |   * `mongoengine.signals.pre_delete` | ||||||
|  |   * `mongoengine.signals.post_delete` | ||||||
|  |   * `mongoengine.signals.pre_bulk_insert` | ||||||
|  |   * `mongoengine.signals.post_bulk_insert` | ||||||
|  |  | ||||||
|  | Example usage:: | ||||||
|  |  | ||||||
|  |     from mongoengine import * | ||||||
|  |     from mongoengine import signals | ||||||
|  |  | ||||||
|  |     class Author(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |         def __unicode__(self): | ||||||
|  |             return self.name | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def pre_save(cls, sender, document, **kwargs): | ||||||
|  |             logging.debug("Pre Save: %s" % document.name) | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def post_save(cls, sender, document, **kwargs): | ||||||
|  |             logging.debug("Post Save: %s" % document.name) | ||||||
|  |             if 'created' in kwargs: | ||||||
|  |                 if kwargs['created']: | ||||||
|  |                     logging.debug("Created") | ||||||
|  |                 else: | ||||||
|  |                     logging.debug("Updated") | ||||||
|  |  | ||||||
|  |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|  |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. _blinker: http://pypi.python.org/pypi/blinker | ||||||
| @@ -2,27 +2,66 @@ | |||||||
| MongoEngine User Documentation | MongoEngine User Documentation | ||||||
| ============================== | ============================== | ||||||
|  |  | ||||||
| MongoEngine is an Object-Document Mapper, written in Python for working with  | **MongoEngine** is an Object-Document Mapper, written in Python for working with | ||||||
| MongoDB. To install it, simply run | MongoDB. To install it, simply run | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # easy_install mongoengine |     # pip install -U mongoengine | ||||||
|  |  | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_. | :doc:`tutorial` | ||||||
|  |   Start here for a quick overview. | ||||||
|  |  | ||||||
|  | :doc:`guide/index` | ||||||
|  |   The Full guide to MongoEngine | ||||||
|  |  | ||||||
|  | :doc:`apireference` | ||||||
|  |   The complete API documentation. | ||||||
|  |  | ||||||
|  | :doc:`upgrade` | ||||||
|  |   How to upgrade MongoEngine. | ||||||
|  |  | ||||||
|  | :doc:`django` | ||||||
|  |   Using MongoEngine and Django | ||||||
|  |  | ||||||
|  | Community | ||||||
|  | --------- | ||||||
|  |  | ||||||
|  | To get help with using MongoEngine, use the `MongoEngine Users mailing list | ||||||
|  | <http://groups.google.com/group/mongoengine-users>`_ or come chat on the | ||||||
|  | `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_. | ||||||
|  |  | ||||||
|  | Contributing | ||||||
|  | ------------ | ||||||
|  |  | ||||||
|  | The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and | ||||||
|  | contributions are always encouraged. Contributions can be as simple as | ||||||
|  | minor tweaks to this documentation. To contribute, fork the project on | ||||||
|  | `GitHub <http://github.com/hmarr/mongoengine>`_ and send a | ||||||
|  | pull request. | ||||||
|  |  | ||||||
|  | Also, you can join the developers' `mailing list | ||||||
|  | <http://groups.google.com/group/mongoengine-dev>`_. | ||||||
|  |  | ||||||
|  | Changes | ||||||
|  | ------- | ||||||
|  | See the :doc:`changelog` for a full list of changes to MongoEngine and | ||||||
|  | :doc:`upgrade` for upgrade information. | ||||||
|  |  | ||||||
| .. toctree:: | .. toctree:: | ||||||
|    :maxdepth: 2 |    :hidden: | ||||||
|  |  | ||||||
|    tutorial |    tutorial | ||||||
|    guide/index |    guide/index | ||||||
|    apireference |    apireference | ||||||
|    django |    django | ||||||
|    changelog |    changelog | ||||||
|  |    upgrade | ||||||
|  |  | ||||||
| Indices and tables | Indices and tables | ||||||
| ================== | ------------------ | ||||||
|  |  | ||||||
| * :ref:`genindex` | * :ref:`genindex` | ||||||
|  | * :ref:`modindex` | ||||||
| * :ref:`search` | * :ref:`search` | ||||||
|  |  | ||||||
|   | |||||||
| @@ -152,6 +152,26 @@ We can then store a list of comment documents in our post document:: | |||||||
|         tags = ListField(StringField(max_length=30)) |         tags = ListField(StringField(max_length=30)) | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|  | Handling deletions of references | ||||||
|  | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||||
|  |  | ||||||
|  | The :class:`~mongoengine.ReferenceField` object takes a keyword | ||||||
|  | `reverse_delete_rule` for handling deletion rules if the reference is deleted. | ||||||
|  | To delete all the posts if a user is deleted set the rule:: | ||||||
|  |  | ||||||
|  |     class Post(Document): | ||||||
|  |         title = StringField(max_length=120, required=True) | ||||||
|  |         author = ReferenceField(User, reverse_delete_rule=CASCADE) | ||||||
|  |         tags = ListField(StringField(max_length=30)) | ||||||
|  |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|  | See :class:`~mongoengine.ReferenceField` for more information. | ||||||
|  |  | ||||||
|  | ..note:: | ||||||
|  |     MapFields and DictFields currently don't support automatic handling of | ||||||
|  |     deleted references | ||||||
|  |  | ||||||
|  |  | ||||||
| Adding data to our Tumblelog | Adding data to our Tumblelog | ||||||
| ============================ | ============================ | ||||||
| Now that we've defined how our documents will be structured, let's start adding | Now that we've defined how our documents will be structured, let's start adding | ||||||
| @@ -250,5 +270,5 @@ the first matched by the query you provide. Aggregation functions may also be | |||||||
| used on :class:`~mongoengine.queryset.QuerySet` objects:: | used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||||
|  |  | ||||||
|     num_posts = Post.objects(tags='mongodb').count() |     num_posts = Post.objects(tags='mongodb').count() | ||||||
|     print 'Found % posts with tag "mongodb"' % num_posts |     print 'Found %d posts with tag "mongodb"' % num_posts | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										178
									
								
								docs/upgrade.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										178
									
								
								docs/upgrade.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,178 @@ | |||||||
|  | ========= | ||||||
|  | Upgrading | ||||||
|  | ========= | ||||||
|  |  | ||||||
|  | 0.6 to 0.7 | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | Cascade saves | ||||||
|  | ------------- | ||||||
|  |  | ||||||
|  | Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set | ||||||
|  | to True.  This is because in 0.8 it will default to False.  If you require | ||||||
|  | cascading saves then either set it in the `meta` or pass | ||||||
|  | via `save` eg :: | ||||||
|  |  | ||||||
|  |     # At the class level: | ||||||
|  |     class Person(Document): | ||||||
|  |         meta = {'cascade': True} | ||||||
|  |  | ||||||
|  |     # Or in code: | ||||||
|  |     my_document.save(cascade=True) | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |     Remember: cascading saves **do not** cascade through lists. | ||||||
|  |  | ||||||
|  | ReferenceFields | ||||||
|  | --------------- | ||||||
|  |  | ||||||
|  | ReferenceFields now can store references as ObjectId strings instead of DBRefs. | ||||||
|  | This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` | ||||||
|  | will be raised. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | To explicitly continue to use DBRefs change the `dbref` flag | ||||||
|  | to True :: | ||||||
|  |  | ||||||
|  |    class Person(Document): | ||||||
|  |        groups = ListField(ReferenceField(Group, dbref=True)) | ||||||
|  |  | ||||||
|  | To migrate to using strings instead of DBRefs you will have to manually | ||||||
|  | migrate :: | ||||||
|  |  | ||||||
|  |         # Step 1 - Migrate the model definition | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=False) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         # Step 2 - Migrate the data | ||||||
|  |         for g in Group.objects(): | ||||||
|  |             g.author = g.author | ||||||
|  |             g.members = g.members | ||||||
|  |             g.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | item_frequencies | ||||||
|  | ---------------- | ||||||
|  |  | ||||||
|  | In the 0.6 series we added support for null / zero / false values in | ||||||
|  | item_frequencies.  A side effect was to return keys in the value they are | ||||||
|  | stored in rather than as string representations.  Your code may need to be | ||||||
|  | updated to handle native types rather than strings keys for the results of | ||||||
|  | item frequency queries. | ||||||
|  |  | ||||||
|  | 0.5 to 0.6 | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | Embedded Documents - if you had a `pk` field you will have to rename it from | ||||||
|  | `_id` to `pk` as pk is no longer a property of Embedded Documents. | ||||||
|  |  | ||||||
|  | Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | ||||||
|  | an InvalidDocument error as they aren't currently supported. | ||||||
|  |  | ||||||
|  | Document._get_subclasses - Is no longer used and the class method has been | ||||||
|  | removed. | ||||||
|  |  | ||||||
|  | Document.objects.with_id - now raises an InvalidQueryError if used with a | ||||||
|  | filter. | ||||||
|  |  | ||||||
|  | FutureWarning - A future warning has been added to all inherited classes that | ||||||
|  | don't define `allow_inheritance` in their meta. | ||||||
|  |  | ||||||
|  | You may need to update pyMongo to 2.0 for use with Sharding. | ||||||
|  |  | ||||||
|  | 0.4 to 0.5 | ||||||
|  | =========== | ||||||
|  |  | ||||||
|  | There have been the following backwards incompatibilities from 0.4 to 0.5.  The | ||||||
|  | main areas of changed are: choices in fields, map_reduce and collection names. | ||||||
|  |  | ||||||
|  | Choice options: | ||||||
|  | --------------- | ||||||
|  |  | ||||||
|  | Are now expected to be an iterable of tuples, with  the first element in each | ||||||
|  | tuple being the actual value to be stored. The second element is the | ||||||
|  | human-readable name for the option. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | PyMongo / MongoDB | ||||||
|  | ----------------- | ||||||
|  |  | ||||||
|  | map reduce now requires pymongo 1.11+- The pymongo `merge_output` and | ||||||
|  | `reduce_output` parameters, have been depreciated. | ||||||
|  |  | ||||||
|  | More methods now use map_reduce as db.eval is not supported for sharding as | ||||||
|  | such the following have been changed: | ||||||
|  |  | ||||||
|  |     * :meth:`~mongoengine.queryset.QuerySet.sum` | ||||||
|  |     * :meth:`~mongoengine.queryset.QuerySet.average` | ||||||
|  |     * :meth:`~mongoengine.queryset.QuerySet.item_frequencies` | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Default collection naming | ||||||
|  | ------------------------- | ||||||
|  |  | ||||||
|  | Previously it was just lowercase, its now much more pythonic and readable as | ||||||
|  | its lowercase and underscores, previously :: | ||||||
|  |  | ||||||
|  |     class MyAceDocument(Document): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     MyAceDocument._meta['collection'] == myacedocument | ||||||
|  |  | ||||||
|  | In 0.5 this will change to :: | ||||||
|  |  | ||||||
|  |     class MyAceDocument(Document): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     MyAceDocument._get_collection_name() == my_ace_document | ||||||
|  |  | ||||||
|  | To upgrade use a Mixin class to set meta like so :: | ||||||
|  |  | ||||||
|  |     class BaseMixin(object): | ||||||
|  |         meta = { | ||||||
|  |             'collection': lambda c: c.__name__.lower() | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     class MyAceDocument(Document, BaseMixin): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     MyAceDocument._get_collection_name() == "myacedocument" | ||||||
|  |  | ||||||
|  | Alternatively, you can rename your collections eg :: | ||||||
|  |  | ||||||
|  |     from mongoengine.connection import _get_db | ||||||
|  |     from mongoengine.base import _document_registry | ||||||
|  |  | ||||||
|  |     def rename_collections(): | ||||||
|  |         db = _get_db() | ||||||
|  |  | ||||||
|  |         failure = False | ||||||
|  |  | ||||||
|  |         collection_names = [d._get_collection_name() | ||||||
|  |                             for d in _document_registry.values()] | ||||||
|  |  | ||||||
|  |         for new_style_name in collection_names: | ||||||
|  |             if not new_style_name:  # embedded documents don't have collections | ||||||
|  |                 continue | ||||||
|  |             old_style_name = new_style_name.replace('_', '') | ||||||
|  |  | ||||||
|  |             if old_style_name == new_style_name: | ||||||
|  |                 continue  # Nothing to do | ||||||
|  |  | ||||||
|  |             existing = db.collection_names() | ||||||
|  |             if old_style_name in existing: | ||||||
|  |                 if new_style_name in existing: | ||||||
|  |                     failure = True | ||||||
|  |                     print "FAILED to rename: %s to %s (already exists)" % ( | ||||||
|  |                         old_style_name, new_style_name) | ||||||
|  |                 else: | ||||||
|  |                     db[old_style_name].rename(new_style_name) | ||||||
|  |                     print "Renamed:  %s to %s" % (old_style_name, | ||||||
|  |                                                   new_style_name) | ||||||
|  |  | ||||||
|  |         if failure: | ||||||
|  |             print "Upgrading  collection names failed" | ||||||
|  |         else: | ||||||
|  |             print "Upgraded collection names" | ||||||
|  |  | ||||||
| @@ -6,19 +6,18 @@ import connection | |||||||
| from connection import * | from connection import * | ||||||
| import queryset | import queryset | ||||||
| from queryset import * | from queryset import * | ||||||
|  | import signals | ||||||
|  | from signals import * | ||||||
|  |  | ||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||||
|            queryset.__all__) |            queryset.__all__ + signals.__all__) | ||||||
|  |  | ||||||
| __author__ = 'Harry Marr' | VERSION = (0, 7, 2) | ||||||
|  |  | ||||||
| VERSION = (0, 2, 2) |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     version = '%s.%s' % (VERSION[0], VERSION[1]) |     if isinstance(VERSION[-1], basestring): | ||||||
|     if VERSION[2]: |         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] | ||||||
|         version = '%s.%s' % (version, VERSION[2]) |     return '.'.join(map(str, VERSION)) | ||||||
|     return version |  | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										1429
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
							
						
						
									
										1429
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,62 +1,166 @@ | |||||||
| from pymongo import Connection | import pymongo | ||||||
|  | from pymongo import Connection, ReplicaSetConnection, uri_parser | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['ConnectionError', 'connect'] | __all__ = ['ConnectionError', 'connect', 'register_connection', | ||||||
|  |            'DEFAULT_CONNECTION_NAME'] | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_settings = { | DEFAULT_CONNECTION_NAME = 'default' | ||||||
|     'host': 'localhost', |  | ||||||
|     'port': 27017, |  | ||||||
|     'pool_size': 1, |  | ||||||
| } |  | ||||||
| _connection = None |  | ||||||
|  |  | ||||||
| _db_name = None |  | ||||||
| _db_username = None |  | ||||||
| _db_password = None |  | ||||||
| _db = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionError(Exception): | class ConnectionError(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_connection(): | _connection_settings = {} | ||||||
|     global _connection | _connections = {} | ||||||
|     # Connect to the database if not already connected | _dbs = {} | ||||||
|     if _connection is None: |  | ||||||
|         try: |  | ||||||
|             _connection = Connection(**_connection_settings) |  | ||||||
|         except: |  | ||||||
|             raise ConnectionError('Cannot connect to the database') |  | ||||||
|     return _connection |  | ||||||
|  |  | ||||||
| def _get_db(): |  | ||||||
|     global _db, _connection |  | ||||||
|     # Connect if not already connected |  | ||||||
|     if _connection is None: |  | ||||||
|         _connection = _get_connection() |  | ||||||
|  |  | ||||||
|     if _db is None: | def register_connection(alias, name, host='localhost', port=27017, | ||||||
|         # _db_name will be None if the user hasn't called connect() |                         is_slave=False, read_preference=False, slaves=None, | ||||||
|         if _db_name is None: |                         username=None, password=None, **kwargs): | ||||||
|             raise ConnectionError('Not connected to the database') |     """Add a connection. | ||||||
|  |  | ||||||
|         # Get DB from current connection and authenticate if necessary |     :param alias: the name that will be used to refer to this connection | ||||||
|         _db = _connection[_db_name] |         throughout MongoEngine | ||||||
|         if _db_username and _db_password: |     :param name: the name of the specific database to use | ||||||
|             _db.authenticate(_db_username, _db_password) |     :param host: the host name of the :program:`mongod` instance to connect to | ||||||
|  |     :param port: the port that the :program:`mongod` instance is running on | ||||||
|  |     :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ | ||||||
|  |     :param read_preference: The read preference for the collection ** Added pymongo 2.1 | ||||||
|  |     :param slaves: a list of aliases of slave connections; each of these must | ||||||
|  |         be a registered connection that has :attr:`is_slave` set to ``True`` | ||||||
|  |     :param username: username to authenticate with | ||||||
|  |     :param password: password to authenticate with | ||||||
|  |     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver | ||||||
|  |  | ||||||
|     return _db |  | ||||||
|  |  | ||||||
| def connect(db, username=None, password=None, **kwargs): |  | ||||||
|     """Connect to the database specified by the 'db' argument. Connection  |  | ||||||
|     settings may be provided here as well if the database is not running on |  | ||||||
|     the default port on localhost. If authentication is needed, provide |  | ||||||
|     username and password arguments as well. |  | ||||||
|     """ |     """ | ||||||
|     global _connection_settings, _db_name, _db_username, _db_password |     global _connection_settings | ||||||
|     _connection_settings.update(kwargs) |  | ||||||
|     _db_name = db |     conn_settings = { | ||||||
|     _db_username = username |         'name': name, | ||||||
|     _db_password = password |         'host': host, | ||||||
|  |         'port': port, | ||||||
|  |         'is_slave': is_slave, | ||||||
|  |         'slaves': slaves or [], | ||||||
|  |         'username': username, | ||||||
|  |         'password': password, | ||||||
|  |         'read_preference': read_preference | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     # Handle uri style connections | ||||||
|  |     if "://" in host: | ||||||
|  |         uri_dict = uri_parser.parse_uri(host) | ||||||
|  |         if uri_dict.get('database') is None: | ||||||
|  |             raise ConnectionError("If using URI style connection include "\ | ||||||
|  |                                   "database name in string") | ||||||
|  |         conn_settings.update({ | ||||||
|  |             'host': host, | ||||||
|  |             'name': uri_dict.get('database'), | ||||||
|  |             'username': uri_dict.get('username'), | ||||||
|  |             'password': uri_dict.get('password'), | ||||||
|  |             'read_preference': read_preference, | ||||||
|  |         }) | ||||||
|  |         if "replicaSet" in host: | ||||||
|  |             conn_settings['replicaSet'] = True | ||||||
|  |  | ||||||
|  |     conn_settings.update(kwargs) | ||||||
|  |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|  |     global _connections | ||||||
|  |     global _dbs | ||||||
|  |  | ||||||
|  |     if alias in _connections: | ||||||
|  |         get_connection(alias=alias).disconnect() | ||||||
|  |         del _connections[alias] | ||||||
|  |     if alias in _dbs: | ||||||
|  |         del _dbs[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|  |     global _connections | ||||||
|  |     # Connect to the database if not already connected | ||||||
|  |     if reconnect: | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |     if alias not in _connections: | ||||||
|  |         if alias not in _connection_settings: | ||||||
|  |             msg = 'Connection with alias "%s" has not been defined' % alias | ||||||
|  |             if alias == DEFAULT_CONNECTION_NAME: | ||||||
|  |                 msg = 'You have not defined a default connection' | ||||||
|  |             raise ConnectionError(msg) | ||||||
|  |         conn_settings = _connection_settings[alias].copy() | ||||||
|  |  | ||||||
|  |         if hasattr(pymongo, 'version_tuple'):  # Support for 2.1+ | ||||||
|  |             conn_settings.pop('name', None) | ||||||
|  |             conn_settings.pop('slaves', None) | ||||||
|  |             conn_settings.pop('is_slave', None) | ||||||
|  |             conn_settings.pop('username', None) | ||||||
|  |             conn_settings.pop('password', None) | ||||||
|  |         else: | ||||||
|  |             # Get all the slave connections | ||||||
|  |             if 'slaves' in conn_settings: | ||||||
|  |                 slaves = [] | ||||||
|  |                 for slave_alias in conn_settings['slaves']: | ||||||
|  |                     slaves.append(get_connection(slave_alias)) | ||||||
|  |                 conn_settings['slaves'] = slaves | ||||||
|  |                 conn_settings.pop('read_preference', None) | ||||||
|  |  | ||||||
|  |         connection_class = Connection | ||||||
|  |         if 'replicaSet' in conn_settings: | ||||||
|  |             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||||
|  |             # Discard port since it can't be used on ReplicaSetConnection | ||||||
|  |             conn_settings.pop('port', None) | ||||||
|  |             # Discard replicaSet if not base string | ||||||
|  |             if not isinstance(conn_settings['replicaSet'], basestring): | ||||||
|  |                 conn_settings.pop('replicaSet', None) | ||||||
|  |             connection_class = ReplicaSetConnection | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             _connections[alias] = connection_class(**conn_settings) | ||||||
|  |         except Exception, e: | ||||||
|  |             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) | ||||||
|  |     return _connections[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|  |     global _dbs | ||||||
|  |     if reconnect: | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |     if alias not in _dbs: | ||||||
|  |         conn = get_connection(alias) | ||||||
|  |         conn_settings = _connection_settings[alias] | ||||||
|  |         _dbs[alias] = conn[conn_settings['name']] | ||||||
|  |         # Authenticate if necessary | ||||||
|  |         if conn_settings['username'] and conn_settings['password']: | ||||||
|  |             _dbs[alias].authenticate(conn_settings['username'], | ||||||
|  |                                      conn_settings['password']) | ||||||
|  |     return _dbs[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||||
|  |     """Connect to the database specified by the 'db' argument. | ||||||
|  |  | ||||||
|  |     Connection settings may be provided here as well if the database is not | ||||||
|  |     running on the default port on localhost. If authentication is needed, | ||||||
|  |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|  |     Multiple databases are supported by using aliases.  Provide a separate | ||||||
|  |     `alias` to connect to a different instance of :program:`mongod`. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.6 - added multiple database support. | ||||||
|  |     """ | ||||||
|  |     global _connections | ||||||
|  |     if alias not in _connections: | ||||||
|  |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|  |     return get_connection(alias) | ||||||
|  |  | ||||||
|  | # Support old naming convention | ||||||
|  | _get_connection = get_connection | ||||||
|  | _get_db = get_db | ||||||
|   | |||||||
							
								
								
									
										211
									
								
								mongoengine/dereference.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										211
									
								
								mongoengine/dereference.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,211 @@ | |||||||
|  | from bson import DBRef, SON | ||||||
|  |  | ||||||
|  | from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) | ||||||
|  | from fields import (ReferenceField, ListField, DictField, MapField) | ||||||
|  | from connection import get_db | ||||||
|  | from queryset import QuerySet | ||||||
|  | from document import Document | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DeReference(object): | ||||||
|  |  | ||||||
|  |     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||||
|  |         """ | ||||||
|  |         Cheaply dereferences the items to a set depth. | ||||||
|  |         Also handles the convertion of complex data types. | ||||||
|  |  | ||||||
|  |         :param items: The iterable (dict, list, queryset) to be dereferenced. | ||||||
|  |         :param max_depth: The maximum depth to recurse to | ||||||
|  |         :param instance: The owning instance used for tracking changes by | ||||||
|  |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|  |         :param name: The name of the field, used for tracking changes by | ||||||
|  |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|  |         :param get: A boolean determining if being called by __get__ | ||||||
|  |         """ | ||||||
|  |         if items is None or isinstance(items, basestring): | ||||||
|  |             return items | ||||||
|  |  | ||||||
|  |         # cheapest way to convert a queryset to a list | ||||||
|  |         # list(queryset) uses a count() query to determine length | ||||||
|  |         if isinstance(items, QuerySet): | ||||||
|  |             items = [i for i in items] | ||||||
|  |  | ||||||
|  |         self.max_depth = max_depth | ||||||
|  |  | ||||||
|  |         doc_type = None | ||||||
|  |         if instance and instance._fields: | ||||||
|  |             doc_type = instance._fields[name] | ||||||
|  |             if hasattr(doc_type, 'field'): | ||||||
|  |                 doc_type = doc_type.field | ||||||
|  |  | ||||||
|  |             if isinstance(doc_type, ReferenceField): | ||||||
|  |                 field = doc_type | ||||||
|  |                 doc_type = doc_type.document_type | ||||||
|  |                 is_list = not hasattr(items, 'items') | ||||||
|  |  | ||||||
|  |                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||||
|  |                     return items | ||||||
|  |                 elif not is_list and all([i.__class__ == doc_type | ||||||
|  |                                          for i in items.values()]): | ||||||
|  |                     return items | ||||||
|  |                 elif not field.dbref: | ||||||
|  |                     if not hasattr(items, 'items'): | ||||||
|  |                         items = [field.to_python(v) | ||||||
|  |                              if not isinstance(v, (DBRef, Document)) else v | ||||||
|  |                              for v in items] | ||||||
|  |                     else: | ||||||
|  |                         items = dict([ | ||||||
|  |                             (k, field.to_python(v)) | ||||||
|  |                             if not isinstance(v, (DBRef, Document)) else (k, v) | ||||||
|  |                             for k, v in items.iteritems()] | ||||||
|  |                         ) | ||||||
|  |  | ||||||
|  |         self.reference_map = self._find_references(items) | ||||||
|  |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
|  |         return self._attach_objects(items, 0, instance, name) | ||||||
|  |  | ||||||
|  |     def _find_references(self, items, depth=0): | ||||||
|  |         """ | ||||||
|  |         Recursively finds all db references to be dereferenced | ||||||
|  |  | ||||||
|  |         :param items: The iterable (dict, list, queryset) | ||||||
|  |         :param depth: The current depth of recursion | ||||||
|  |         """ | ||||||
|  |         reference_map = {} | ||||||
|  |         if not items or depth >= self.max_depth: | ||||||
|  |             return reference_map | ||||||
|  |  | ||||||
|  |         # Determine the iterator to use | ||||||
|  |         if not hasattr(items, 'items'): | ||||||
|  |             iterator = enumerate(items) | ||||||
|  |         else: | ||||||
|  |             iterator = items.iteritems() | ||||||
|  |  | ||||||
|  |         # Recursively find dbreferences | ||||||
|  |         depth += 1 | ||||||
|  |         for k, item in iterator: | ||||||
|  |             if hasattr(item, '_fields'): | ||||||
|  |                 for field_name, field in item._fields.iteritems(): | ||||||
|  |                     v = item._data.get(field_name, None) | ||||||
|  |                     if isinstance(v, (DBRef)): | ||||||
|  |                         reference_map.setdefault(field.document_type, []).append(v.id) | ||||||
|  |                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||||
|  |                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) | ||||||
|  |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|  |                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||||
|  |                         references = self._find_references(v, depth) | ||||||
|  |                         for key, refs in references.iteritems(): | ||||||
|  |                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||||
|  |                                 key = field_cls | ||||||
|  |                             reference_map.setdefault(key, []).extend(refs) | ||||||
|  |             elif isinstance(item, (DBRef)): | ||||||
|  |                 reference_map.setdefault(item.collection, []).append(item.id) | ||||||
|  |             elif isinstance(item, (dict, SON)) and '_ref' in item: | ||||||
|  |                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) | ||||||
|  |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|  |                 references = self._find_references(item, depth - 1) | ||||||
|  |                 for key, refs in references.iteritems(): | ||||||
|  |                     reference_map.setdefault(key, []).extend(refs) | ||||||
|  |  | ||||||
|  |         return reference_map | ||||||
|  |  | ||||||
|  |     def _fetch_objects(self, doc_type=None): | ||||||
|  |         """Fetch all references and convert to their document objects | ||||||
|  |         """ | ||||||
|  |         object_map = {} | ||||||
|  |         for col, dbrefs in self.reference_map.iteritems(): | ||||||
|  |             keys = object_map.keys() | ||||||
|  |             refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) | ||||||
|  |             if hasattr(col, 'objects'):  # We have a document class for the refs | ||||||
|  |                 references = col.objects.in_bulk(refs) | ||||||
|  |                 for key, doc in references.iteritems(): | ||||||
|  |                     object_map[key] = doc | ||||||
|  |             else:  # Generic reference: use the refs data to convert to document | ||||||
|  |                 if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): | ||||||
|  |                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) | ||||||
|  |                     for ref in references: | ||||||
|  |                         doc = doc_type._from_son(ref) | ||||||
|  |                         object_map[doc.id] = doc | ||||||
|  |                 else: | ||||||
|  |                     references = get_db()[col].find({'_id': {'$in': refs}}) | ||||||
|  |                     for ref in references: | ||||||
|  |                         if '_cls' in ref: | ||||||
|  |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|  |                         elif doc_type is None: | ||||||
|  |                             doc = get_document( | ||||||
|  |                                 ''.join(x.capitalize() | ||||||
|  |                                         for x in col.split('_')))._from_son(ref) | ||||||
|  |                         else: | ||||||
|  |                             doc = doc_type._from_son(ref) | ||||||
|  |                         object_map[doc.id] = doc | ||||||
|  |         return object_map | ||||||
|  |  | ||||||
|  |     def _attach_objects(self, items, depth=0, instance=None, name=None): | ||||||
|  |         """ | ||||||
|  |         Recursively finds all db references to be dereferenced | ||||||
|  |  | ||||||
|  |         :param items: The iterable (dict, list, queryset) | ||||||
|  |         :param depth: The current depth of recursion | ||||||
|  |         :param instance: The owning instance used for tracking changes by | ||||||
|  |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|  |         :param name: The name of the field, used for tracking changes by | ||||||
|  |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|  |         """ | ||||||
|  |         if not items: | ||||||
|  |             if isinstance(items, (BaseDict, BaseList)): | ||||||
|  |                 return items | ||||||
|  |  | ||||||
|  |             if instance: | ||||||
|  |                 if isinstance(items, dict): | ||||||
|  |                     return BaseDict(items, instance, name) | ||||||
|  |                 else: | ||||||
|  |                     return BaseList(items, instance, name) | ||||||
|  |  | ||||||
|  |         if isinstance(items, (dict, SON)): | ||||||
|  |             if '_ref' in items: | ||||||
|  |                 return self.object_map.get(items['_ref'].id, items) | ||||||
|  |             elif '_types' in items and '_cls' in items: | ||||||
|  |                 doc = get_document(items['_cls'])._from_son(items) | ||||||
|  |                 doc._data = self._attach_objects(doc._data, depth, doc, name) | ||||||
|  |                 return doc | ||||||
|  |  | ||||||
|  |         if not hasattr(items, 'items'): | ||||||
|  |             is_list = True | ||||||
|  |             iterator = enumerate(items) | ||||||
|  |             data = [] | ||||||
|  |         else: | ||||||
|  |             is_list = False | ||||||
|  |             iterator = items.iteritems() | ||||||
|  |             data = {} | ||||||
|  |  | ||||||
|  |         depth += 1 | ||||||
|  |         for k, v in iterator: | ||||||
|  |             if is_list: | ||||||
|  |                 data.append(v) | ||||||
|  |             else: | ||||||
|  |                 data[k] = v | ||||||
|  |  | ||||||
|  |             if k in self.object_map and not is_list: | ||||||
|  |                 data[k] = self.object_map[k] | ||||||
|  |             elif hasattr(v, '_fields'): | ||||||
|  |                 for field_name, field in v._fields.iteritems(): | ||||||
|  |                     v = data[k]._data.get(field_name, None) | ||||||
|  |                     if isinstance(v, (DBRef)): | ||||||
|  |                         data[k]._data[field_name] = self.object_map.get(v.id, v) | ||||||
|  |                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||||
|  |                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) | ||||||
|  |                     elif isinstance(v, dict) and depth <= self.max_depth: | ||||||
|  |                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||||
|  |                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: | ||||||
|  |                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||||
|  |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|  |                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) | ||||||
|  |             elif hasattr(v, 'id'): | ||||||
|  |                 data[k] = self.object_map.get(v.id, v) | ||||||
|  |  | ||||||
|  |         if instance and name: | ||||||
|  |             if is_list: | ||||||
|  |                 return BaseList(data, instance, name) | ||||||
|  |             return BaseDict(data, instance, name) | ||||||
|  |         depth += 1 | ||||||
|  |         return data | ||||||
| @@ -1,35 +1,79 @@ | |||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from django.utils.hashcompat import md5_constructor, sha_constructor |  | ||||||
| from django.utils.encoding import smart_str |  | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
|  |  | ||||||
| import datetime | import datetime | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from django.utils.encoding import smart_str | ||||||
|  | from django.contrib.auth.models import AnonymousUser | ||||||
|  | from django.utils.translation import ugettext_lazy as _ | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from django.contrib.auth.hashers import check_password, make_password | ||||||
|  | except ImportError: | ||||||
|  |     """Handle older versions of Django""" | ||||||
|  |     from django.utils.hashcompat import md5_constructor, sha_constructor | ||||||
|  |  | ||||||
|  |     def get_hexdigest(algorithm, salt, raw_password): | ||||||
|  |         raw_password, salt = smart_str(raw_password), smart_str(salt) | ||||||
|  |         if algorithm == 'md5': | ||||||
|  |             return md5_constructor(salt + raw_password).hexdigest() | ||||||
|  |         elif algorithm == 'sha1': | ||||||
|  |             return sha_constructor(salt + raw_password).hexdigest() | ||||||
|  |         raise ValueError('Got unknown password algorithm type in password') | ||||||
|  |  | ||||||
|  |     def check_password(raw_password, password): | ||||||
|  |         algo, salt, hash = password.split('$') | ||||||
|  |         return hash == get_hexdigest(algo, salt, raw_password) | ||||||
|  |  | ||||||
|  |     def make_password(raw_password): | ||||||
|  |         from random import random | ||||||
|  |         algo = 'sha1' | ||||||
|  |         salt = get_hexdigest(algo, str(random()), str(random()))[:5] | ||||||
|  |         hash = get_hexdigest(algo, salt, raw_password) | ||||||
|  |         return '%s$%s$%s' % (algo, salt, hash) | ||||||
|  |  | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' | REDIRECT_FIELD_NAME = 'next' | ||||||
|  |  | ||||||
| def get_hexdigest(algorithm, salt, raw_password): |  | ||||||
|     raw_password, salt = smart_str(raw_password), smart_str(salt) |  | ||||||
|     if algorithm == 'md5': |  | ||||||
|         return md5_constructor(salt + raw_password).hexdigest() |  | ||||||
|     elif algorithm == 'sha1': |  | ||||||
|         return sha_constructor(salt + raw_password).hexdigest() |  | ||||||
|     raise ValueError('Got unknown password algorithm type in password') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     """A User document that aims to mirror most of the API specified by Django |     """A User document that aims to mirror most of the API specified by Django | ||||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users |     at http://docs.djangoproject.com/en/dev/topics/auth/#users | ||||||
|     """ |     """ | ||||||
|     username = StringField(max_length=30, required=True) |     username = StringField(max_length=30, required=True, | ||||||
|     first_name = StringField(max_length=30) |                            verbose_name=_('username'), | ||||||
|     last_name = StringField(max_length=30) |                            help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) | ||||||
|     email = StringField() |  | ||||||
|     password = StringField(max_length=128) |     first_name = StringField(max_length=30, | ||||||
|     is_staff = BooleanField(default=False) |                              verbose_name=_('first name')) | ||||||
|     is_active = BooleanField(default=True) |  | ||||||
|     is_superuser = BooleanField(default=False) |     last_name = StringField(max_length=30, | ||||||
|     last_login = DateTimeField(default=datetime.datetime.now) |                             verbose_name=_('last name')) | ||||||
|  |     email = EmailField(verbose_name=_('e-mail address')) | ||||||
|  |     password = StringField(max_length=128, | ||||||
|  |                            verbose_name=_('password'), | ||||||
|  |                            help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) | ||||||
|  |     is_staff = BooleanField(default=False, | ||||||
|  |                             verbose_name=_('staff status'), | ||||||
|  |                             help_text=_("Designates whether the user can log into this admin site.")) | ||||||
|  |     is_active = BooleanField(default=True, | ||||||
|  |                              verbose_name=_('active'), | ||||||
|  |                              help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) | ||||||
|  |     is_superuser = BooleanField(default=False, | ||||||
|  |                                 verbose_name=_('superuser status'), | ||||||
|  |                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) | ||||||
|  |     last_login = DateTimeField(default=datetime.datetime.now, | ||||||
|  |                                verbose_name=_('last login')) | ||||||
|  |     date_joined = DateTimeField(default=datetime.datetime.now, | ||||||
|  |                                 verbose_name=_('date joined')) | ||||||
|  |  | ||||||
|  |     meta = { | ||||||
|  |         'allow_inheritance': True, | ||||||
|  |         'indexes': [ | ||||||
|  |             {'fields': ['username'], 'unique': True} | ||||||
|  |         ] | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     def __unicode__(self): | ||||||
|  |         return self.username | ||||||
|  |  | ||||||
|     def get_full_name(self): |     def get_full_name(self): | ||||||
|         """Returns the users first and last names, separated by a space. |         """Returns the users first and last names, separated by a space. | ||||||
| @@ -48,11 +92,9 @@ class User(Document): | |||||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the |         assigning to :attr:`~mongoengine.django.auth.User.password` as the | ||||||
|         password is hashed before storage. |         password is hashed before storage. | ||||||
|         """ |         """ | ||||||
|         from random import random |         self.password = make_password(raw_password) | ||||||
|         algo = 'sha1' |         self.save() | ||||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] |         return self | ||||||
|         hash = get_hexdigest(algo, salt, raw_password) |  | ||||||
|         self.password = '%s$%s$%s' % (algo, salt, hash) |  | ||||||
|  |  | ||||||
|     def check_password(self, raw_password): |     def check_password(self, raw_password): | ||||||
|         """Checks the user's password against a provided password - always use |         """Checks the user's password against a provided password - always use | ||||||
| @@ -60,24 +102,42 @@ class User(Document): | |||||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is |         :attr:`~mongoengine.django.auth.User.password` as the password is | ||||||
|         hashed before storage. |         hashed before storage. | ||||||
|         """ |         """ | ||||||
|         algo, salt, hash = self.password.split('$') |         return check_password(raw_password, self.password) | ||||||
|         return hash == get_hexdigest(algo, salt, raw_password) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_user(cls, username, password, email=None): |     def create_user(cls, username, password, email=None): | ||||||
|         """Create (and save) a new user with the given username, password and |         """Create (and save) a new user with the given username, password and | ||||||
|         email address. |         email address. | ||||||
|         """ |         """ | ||||||
|         user = User(username=username, email=email) |         now = datetime.datetime.now() | ||||||
|  |  | ||||||
|  |         # Normalize the address by lowercasing the domain part of the email | ||||||
|  |         # address. | ||||||
|  |         if email is not None: | ||||||
|  |             try: | ||||||
|  |                 email_name, domain_part = email.strip().split('@', 1) | ||||||
|  |             except ValueError: | ||||||
|  |                 pass | ||||||
|  |             else: | ||||||
|  |                 email = '@'.join([email_name, domain_part.lower()]) | ||||||
|  |  | ||||||
|  |         user = cls(username=username, email=email, date_joined=now) | ||||||
|         user.set_password(password) |         user.set_password(password) | ||||||
|         user.save() |         user.save() | ||||||
|         return user |         return user | ||||||
|  |  | ||||||
|  |     def get_and_delete_messages(self): | ||||||
|  |         return [] | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineBackend(object): | class MongoEngineBackend(object): | ||||||
|     """Authenticate using MongoEngine and mongoengine.django.auth.User. |     """Authenticate using MongoEngine and mongoengine.django.auth.User. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     supports_object_permissions = False | ||||||
|  |     supports_anonymous_user = False | ||||||
|  |     supports_inactive_user = False | ||||||
|  |  | ||||||
|     def authenticate(self, username=None, password=None): |     def authenticate(self, username=None, password=None): | ||||||
|         user = User.objects(username=username).first() |         user = User.objects(username=username).first() | ||||||
|         if user: |         if user: | ||||||
|   | |||||||
| @@ -1,3 +1,6 @@ | |||||||
|  | from datetime import datetime | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||||
| from django.core.exceptions import SuspiciousOperation | from django.core.exceptions import SuspiciousOperation | ||||||
| from django.utils.encoding import force_unicode | from django.utils.encoding import force_unicode | ||||||
| @@ -5,8 +8,12 @@ from django.utils.encoding import force_unicode | |||||||
| from mongoengine.document import Document | from mongoengine.document import Document | ||||||
| from mongoengine import fields | from mongoengine import fields | ||||||
| from mongoengine.queryset import OperationError | from mongoengine.queryset import OperationError | ||||||
|  | from mongoengine.connection import DEFAULT_CONNECTION_NAME | ||||||
|  |  | ||||||
| from datetime import datetime |  | ||||||
|  | MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||||
|  |     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||||
|  |     DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoSession(Document): | class MongoSession(Document): | ||||||
| @@ -14,7 +21,9 @@ class MongoSession(Document): | |||||||
|     session_data = fields.StringField() |     session_data = fields.StringField() | ||||||
|     expire_date = fields.DateTimeField() |     expire_date = fields.DateTimeField() | ||||||
|  |  | ||||||
|     meta = {'collection': 'django_session', 'allow_inheritance': False} |     meta = {'collection': 'django_session', | ||||||
|  |             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||||
|  |             'allow_inheritance': False} | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): | class SessionStore(SessionBase): | ||||||
| @@ -35,7 +44,7 @@ class SessionStore(SessionBase): | |||||||
|  |  | ||||||
|     def create(self): |     def create(self): | ||||||
|         while True: |         while True: | ||||||
|             self.session_key = self._get_new_session_key() |             self._session_key = self._get_new_session_key() | ||||||
|             try: |             try: | ||||||
|                 self.save(must_create=True) |                 self.save(must_create=True) | ||||||
|             except CreateError: |             except CreateError: | ||||||
| @@ -45,6 +54,8 @@ class SessionStore(SessionBase): | |||||||
|             return |             return | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |     def save(self, must_create=False): | ||||||
|  |         if self.session_key is None: | ||||||
|  |             self._session_key = self._get_new_session_key() | ||||||
|         s = MongoSession(session_key=self.session_key) |         s = MongoSession(session_key=self.session_key) | ||||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) |         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||||
|         s.expire_date = self.get_expiry_date() |         s.expire_date = self.get_expiry_date() | ||||||
|   | |||||||
							
								
								
									
										47
									
								
								mongoengine/django/shortcuts.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								mongoengine/django/shortcuts.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | |||||||
|  | from mongoengine.queryset import QuerySet | ||||||
|  | from mongoengine.base import BaseDocument | ||||||
|  | from mongoengine.base import ValidationError | ||||||
|  |  | ||||||
|  | def _get_queryset(cls): | ||||||
|  |     """Inspired by django.shortcuts.*""" | ||||||
|  |     if isinstance(cls, QuerySet): | ||||||
|  |         return cls | ||||||
|  |     else: | ||||||
|  |         return cls.objects | ||||||
|  |  | ||||||
|  | def get_document_or_404(cls, *args, **kwargs): | ||||||
|  |     """ | ||||||
|  |     Uses get() to return an document, or raises a Http404 exception if the document | ||||||
|  |     does not exist. | ||||||
|  |  | ||||||
|  |     cls may be a Document or QuerySet object. All other passed | ||||||
|  |     arguments and keyword arguments are used in the get() query. | ||||||
|  |  | ||||||
|  |     Note: Like with get(), an MultipleObjectsReturned will be raised if more than one | ||||||
|  |     object is found. | ||||||
|  |  | ||||||
|  |     Inspired by django.shortcuts.* | ||||||
|  |     """ | ||||||
|  |     queryset = _get_queryset(cls) | ||||||
|  |     try: | ||||||
|  |         return queryset.get(*args, **kwargs) | ||||||
|  |     except (queryset._document.DoesNotExist, ValidationError): | ||||||
|  |         from django.http import Http404 | ||||||
|  |         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||||
|  |  | ||||||
|  | def get_list_or_404(cls, *args, **kwargs): | ||||||
|  |     """ | ||||||
|  |     Uses filter() to return a list of documents, or raise a Http404 exception if | ||||||
|  |     the list is empty. | ||||||
|  |  | ||||||
|  |     cls may be a Document or QuerySet object. All other passed | ||||||
|  |     arguments and keyword arguments are used in the filter() query. | ||||||
|  |  | ||||||
|  |     Inspired by django.shortcuts.* | ||||||
|  |     """ | ||||||
|  |     queryset = _get_queryset(cls) | ||||||
|  |     obj_list = list(queryset.filter(*args, **kwargs)) | ||||||
|  |     if not obj_list: | ||||||
|  |         from django.http import Http404 | ||||||
|  |         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||||
|  |     return obj_list | ||||||
							
								
								
									
										112
									
								
								mongoengine/django/storage.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										112
									
								
								mongoengine/django/storage.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,112 @@ | |||||||
|  | import os | ||||||
|  | import itertools | ||||||
|  | import urlparse | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from django.conf import settings | ||||||
|  | from django.core.files.storage import Storage | ||||||
|  | from django.core.exceptions import ImproperlyConfigured | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FileDocument(Document): | ||||||
|  |     """A document used to store a single file in GridFS. | ||||||
|  |     """ | ||||||
|  |     file = FileField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class GridFSStorage(Storage): | ||||||
|  |     """A custom storage backend to store files in GridFS | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, base_url=None): | ||||||
|  |  | ||||||
|  |         if base_url is None: | ||||||
|  |             base_url = settings.MEDIA_URL | ||||||
|  |         self.base_url = base_url | ||||||
|  |         self.document = FileDocument | ||||||
|  |         self.field = 'file' | ||||||
|  |  | ||||||
|  |     def delete(self, name): | ||||||
|  |         """Deletes the specified file from the storage system. | ||||||
|  |         """ | ||||||
|  |         if self.exists(name): | ||||||
|  |             doc = self.document.objects.first() | ||||||
|  |             field = getattr(doc, self.field) | ||||||
|  |             self._get_doc_with_name(name).delete()  # Delete the FileField | ||||||
|  |             field.delete()                          # Delete the FileDocument | ||||||
|  |  | ||||||
|  |     def exists(self, name): | ||||||
|  |         """Returns True if a file referened by the given name already exists in the | ||||||
|  |         storage system, or False if the name is available for a new file. | ||||||
|  |         """ | ||||||
|  |         doc = self._get_doc_with_name(name) | ||||||
|  |         if doc: | ||||||
|  |             field = getattr(doc, self.field) | ||||||
|  |             return bool(field.name) | ||||||
|  |         else: | ||||||
|  |             return False | ||||||
|  |  | ||||||
|  |     def listdir(self, path=None): | ||||||
|  |         """Lists the contents of the specified path, returning a 2-tuple of lists; | ||||||
|  |         the first item being directories, the second item being files. | ||||||
|  |         """ | ||||||
|  |         def name(doc): | ||||||
|  |             return getattr(doc, self.field).name | ||||||
|  |         docs = self.document.objects | ||||||
|  |         return [], [name(d) for d in docs if name(d)] | ||||||
|  |  | ||||||
|  |     def size(self, name): | ||||||
|  |         """Returns the total size, in bytes, of the file specified by name. | ||||||
|  |         """ | ||||||
|  |         doc = self._get_doc_with_name(name) | ||||||
|  |         if doc: | ||||||
|  |             return getattr(doc, self.field).length | ||||||
|  |         else: | ||||||
|  |             raise ValueError("No such file or directory: '%s'" % name) | ||||||
|  |  | ||||||
|  |     def url(self, name): | ||||||
|  |         """Returns an absolute URL where the file's contents can be accessed | ||||||
|  |         directly by a web browser. | ||||||
|  |         """ | ||||||
|  |         if self.base_url is None: | ||||||
|  |             raise ValueError("This file is not accessible via a URL.") | ||||||
|  |         return urlparse.urljoin(self.base_url, name).replace('\\', '/') | ||||||
|  |  | ||||||
|  |     def _get_doc_with_name(self, name): | ||||||
|  |         """Find the documents in the store with the given name | ||||||
|  |         """ | ||||||
|  |         docs = self.document.objects | ||||||
|  |         doc = [d for d in docs if getattr(d, self.field).name == name] | ||||||
|  |         if doc: | ||||||
|  |             return doc[0] | ||||||
|  |         else: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|  |     def _open(self, name, mode='rb'): | ||||||
|  |         doc = self._get_doc_with_name(name) | ||||||
|  |         if doc: | ||||||
|  |             return getattr(doc, self.field) | ||||||
|  |         else: | ||||||
|  |             raise ValueError("No file found with the name '%s'." % name) | ||||||
|  |  | ||||||
|  |     def get_available_name(self, name): | ||||||
|  |         """Returns a filename that's free on the target storage system, and | ||||||
|  |         available for new content to be written to. | ||||||
|  |         """ | ||||||
|  |         file_root, file_ext = os.path.splitext(name) | ||||||
|  |         # If the filename already exists, add an underscore and a number (before | ||||||
|  |         # the file extension, if one exists) to the filename until the generated | ||||||
|  |         # filename doesn't exist. | ||||||
|  |         count = itertools.count(1) | ||||||
|  |         while self.exists(name): | ||||||
|  |             # file_ext includes the dot. | ||||||
|  |             name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext)) | ||||||
|  |  | ||||||
|  |         return name | ||||||
|  |  | ||||||
|  |     def _save(self, name, content): | ||||||
|  |         doc = self.document() | ||||||
|  |         getattr(doc, self.field).put(content, filename=name) | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         return name | ||||||
							
								
								
									
										39
									
								
								mongoengine/django/tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								mongoengine/django/tests.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | |||||||
|  | #coding: utf-8 | ||||||
|  | from nose.plugins.skip import SkipTest | ||||||
|  |  | ||||||
|  | from mongoengine.python_support import PY3 | ||||||
|  | from mongoengine import connect | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from django.test import TestCase | ||||||
|  |     from django.conf import settings | ||||||
|  | except Exception as err: | ||||||
|  |     if PY3: | ||||||
|  |         from unittest import TestCase | ||||||
|  |         # Dummy value so no error | ||||||
|  |         class settings: | ||||||
|  |             MONGO_DATABASE_NAME = 'dummy' | ||||||
|  |     else: | ||||||
|  |         raise err | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MongoTestCase(TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|  |  | ||||||
|  |     """ | ||||||
|  |     TestCase class that clear the collection between the tests | ||||||
|  |     """ | ||||||
|  |     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME | ||||||
|  |     def __init__(self, methodName='runtest'): | ||||||
|  |         self.db = connect(self.db_name).get_db() | ||||||
|  |         super(MongoTestCase, self).__init__(methodName) | ||||||
|  |  | ||||||
|  |     def _post_teardown(self): | ||||||
|  |         super(MongoTestCase, self)._post_teardown() | ||||||
|  |         for collection in self.db.collection_names(): | ||||||
|  |             if collection == 'system.indexes': | ||||||
|  |                 continue | ||||||
|  |             self.db.drop_collection(collection) | ||||||
| @@ -1,12 +1,23 @@ | |||||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | import warnings | ||||||
|                   ValidationError) |  | ||||||
| from queryset import OperationError |  | ||||||
| from connection import _get_db |  | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
|  | import re | ||||||
|  |  | ||||||
|  | from bson.dbref import DBRef | ||||||
|  | from mongoengine import signals, queryset | ||||||
|  |  | ||||||
|  | from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||||
|  |                   BaseDict, BaseList) | ||||||
|  | from queryset import OperationError, NotUniqueError | ||||||
|  | from connection import get_db, DEFAULT_CONNECTION_NAME | ||||||
|  |  | ||||||
|  | __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | ||||||
|  |            'DynamicEmbeddedDocument', 'OperationError', | ||||||
|  |            'InvalidCollectionError', 'NotUniqueError'] | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] | class InvalidCollectionError(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocument(BaseDocument): | class EmbeddedDocument(BaseDocument): | ||||||
| @@ -16,8 +27,31 @@ class EmbeddedDocument(BaseDocument): | |||||||
|     :class:`~mongoengine.EmbeddedDocumentField` field type. |     :class:`~mongoengine.EmbeddedDocumentField` field type. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = DocumentMetaclass | ||||||
|     __metaclass__ = DocumentMetaclass |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|  |     def __init__(self, *args, **kwargs): | ||||||
|  |         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||||
|  |         self._changed_fields = [] | ||||||
|  |  | ||||||
|  |     def __delattr__(self, *args, **kwargs): | ||||||
|  |         """Handle deletions of fields""" | ||||||
|  |         field_name = args[0] | ||||||
|  |         if field_name in self._fields: | ||||||
|  |             default = self._fields[field_name].default | ||||||
|  |             if callable(default): | ||||||
|  |                 default = default() | ||||||
|  |             setattr(self, field_name, default) | ||||||
|  |         else: | ||||||
|  |             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         if isinstance(other, self.__class__): | ||||||
|  |             return self._data == other._data | ||||||
|  |         return False | ||||||
|  |  | ||||||
|  |  | ||||||
| class Document(BaseDocument): | class Document(BaseDocument): | ||||||
|     """The base class used for defining the structure and properties of |     """The base class used for defining the structure and properties of | ||||||
| @@ -52,11 +86,77 @@ class Document(BaseDocument): | |||||||
|     dictionary. The value should be a list of field names or tuples of field |     dictionary. The value should be a list of field names or tuples of field | ||||||
|     names. Index direction may be specified by prefixing the field names with |     names. Index direction may be specified by prefixing the field names with | ||||||
|     a **+** or **-** sign. |     a **+** or **-** sign. | ||||||
|  |  | ||||||
|  |     Automatic index creation can be disabled by specifying | ||||||
|  |     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to | ||||||
|  |     False then indexes will not be created by MongoEngine.  This is useful in | ||||||
|  |     production systems where index creation is performed as part of a deployment | ||||||
|  |     system. | ||||||
|  |  | ||||||
|  |     By default, _types will be added to the start of every index (that | ||||||
|  |     doesn't contain a list) if allow_inheritance is True. This can be | ||||||
|  |     disabled by either setting types to False on the specific index or | ||||||
|  |     by setting index_types to False on the meta dictionary for the document. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = TopLevelDocumentMetaclass | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     def save(self, safe=True, force_insert=False): |     def pk(): | ||||||
|  |         """Primary key alias | ||||||
|  |         """ | ||||||
|  |         def fget(self): | ||||||
|  |             return getattr(self, self._meta['id_field']) | ||||||
|  |         def fset(self, value): | ||||||
|  |             return setattr(self, self._meta['id_field'], value) | ||||||
|  |         return property(fget, fset) | ||||||
|  |     pk = pk() | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _get_db(cls): | ||||||
|  |         """Some Model using other db_alias""" | ||||||
|  |         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _get_collection(cls): | ||||||
|  |         """Returns the collection for the document.""" | ||||||
|  |         if not hasattr(cls, '_collection') or cls._collection is None: | ||||||
|  |             db = cls._get_db() | ||||||
|  |             collection_name = cls._get_collection_name() | ||||||
|  |             # Create collection as a capped collection if specified | ||||||
|  |             if cls._meta['max_size'] or cls._meta['max_documents']: | ||||||
|  |                 # Get max document limit and max byte size from meta | ||||||
|  |                 max_size = cls._meta['max_size'] or 10000000  # 10MB default | ||||||
|  |                 max_documents = cls._meta['max_documents'] | ||||||
|  |  | ||||||
|  |                 if collection_name in db.collection_names(): | ||||||
|  |                     cls._collection = db[collection_name] | ||||||
|  |                     # The collection already exists, check if its capped | ||||||
|  |                     # options match the specified capped options | ||||||
|  |                     options = cls._collection.options() | ||||||
|  |                     if options.get('max') != max_documents or \ | ||||||
|  |                        options.get('size') != max_size: | ||||||
|  |                         msg = (('Cannot create collection "%s" as a capped ' | ||||||
|  |                                'collection as it already exists') | ||||||
|  |                                 % cls._collection) | ||||||
|  |                         raise InvalidCollectionError(msg) | ||||||
|  |                 else: | ||||||
|  |                     # Create the collection as a capped collection | ||||||
|  |                     opts = {'capped': True, 'size': max_size} | ||||||
|  |                     if max_documents: | ||||||
|  |                         opts['max'] = max_documents | ||||||
|  |                     cls._collection = db.create_collection( | ||||||
|  |                         collection_name, **opts | ||||||
|  |                     ) | ||||||
|  |             else: | ||||||
|  |                 cls._collection = db[collection_name] | ||||||
|  |         return cls._collection | ||||||
|  |  | ||||||
|  |     def save(self, safe=True, force_insert=False, validate=True, | ||||||
|  |              write_options=None,  cascade=None, cascade_kwargs=None, | ||||||
|  |              _refs=None): | ||||||
|         """Save the :class:`~mongoengine.Document` to the database. If the |         """Save the :class:`~mongoengine.Document` to the database. If the | ||||||
|         document already exists, it will be updated, otherwise it will be |         document already exists, it will be updated, otherwise it will be | ||||||
|         created. |         created. | ||||||
| @@ -67,22 +167,156 @@ class Document(BaseDocument): | |||||||
|         :param safe: check if the operation succeeded before returning |         :param safe: check if the operation succeeded before returning | ||||||
|         :param force_insert: only try to create a new document, don't allow |         :param force_insert: only try to create a new document, don't allow | ||||||
|             updates of existing documents |             updates of existing documents | ||||||
|  |         :param validate: validates the document; set to ``False`` to skip. | ||||||
|  |         :param write_options: Extra keyword arguments are passed down to | ||||||
|  |             :meth:`~pymongo.collection.Collection.save` OR | ||||||
|  |             :meth:`~pymongo.collection.Collection.insert` | ||||||
|  |             which will be used as options for the resultant | ||||||
|  |             ``getLastError`` command.  For example, | ||||||
|  |             ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||||
|  |             wait until at least two servers have recorded the write and | ||||||
|  |             will force an fsync on the primary server. | ||||||
|  |         :param cascade: Sets the flag for cascading saves.  You can set a | ||||||
|  |             default by setting "cascade" in the document __meta__ | ||||||
|  |         :param cascade_kwargs: optional kwargs dictionary to be passed throw | ||||||
|  |             to cascading saves | ||||||
|  |         :param _refs: A list of processed references used in cascading saves | ||||||
|  |  | ||||||
|  |         .. versionchanged:: 0.5 | ||||||
|  |             In existing documents it only saves changed fields using | ||||||
|  |             set / unset.  Saves are cascaded and any | ||||||
|  |             :class:`~bson.dbref.DBRef` objects that have changes are | ||||||
|  |             saved as well. | ||||||
|  |         .. versionchanged:: 0.6 | ||||||
|  |             Cascade saves are optional = defaults to True, if you want | ||||||
|  |             fine grain control then you can turn off using document | ||||||
|  |             meta['cascade'] = False  Also you can pass different kwargs to | ||||||
|  |             the cascade save using cascade_kwargs which overwrites the | ||||||
|  |             existing kwargs with custom values | ||||||
|         """ |         """ | ||||||
|         self.validate() |         signals.pre_save.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|  |         if validate: | ||||||
|  |             self.validate() | ||||||
|  |  | ||||||
|  |         if not write_options: | ||||||
|  |             write_options = {} | ||||||
|  |  | ||||||
|         doc = self.to_mongo() |         doc = self.to_mongo() | ||||||
|  |  | ||||||
|  |         created = force_insert or '_id' not in doc | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             collection = self.__class__.objects._collection |             collection = self.__class__.objects._collection | ||||||
|             if force_insert: |             if created: | ||||||
|                 object_id = collection.insert(doc, safe=safe) |                 if force_insert: | ||||||
|  |                     object_id = collection.insert(doc, safe=safe, | ||||||
|  |                                                   **write_options) | ||||||
|  |                 else: | ||||||
|  |                     object_id = collection.save(doc, safe=safe, | ||||||
|  |                                                 **write_options) | ||||||
|             else: |             else: | ||||||
|                 object_id = collection.save(doc, safe=safe) |                 object_id = doc['_id'] | ||||||
|  |                 updates, removals = self._delta() | ||||||
|  |                 # Need to add shard key to query, or you get an error | ||||||
|  |                 select_dict = {'_id': object_id} | ||||||
|  |                 shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
|  |                 for k in shard_key: | ||||||
|  |                     actual_key = self._db_field_map.get(k, k) | ||||||
|  |                     select_dict[actual_key] = doc[actual_key] | ||||||
|  |  | ||||||
|  |                 upsert = self._created | ||||||
|  |                 if updates: | ||||||
|  |                     collection.update(select_dict, {"$set": updates}, | ||||||
|  |                         upsert=upsert, safe=safe, **write_options) | ||||||
|  |                 if removals: | ||||||
|  |                     collection.update(select_dict, {"$unset": removals}, | ||||||
|  |                         upsert=upsert, safe=safe, **write_options) | ||||||
|  |  | ||||||
|  |             warn_cascade = not cascade and 'cascade' not in self._meta | ||||||
|  |             cascade = (self._meta.get('cascade', True) | ||||||
|  |                        if cascade is None else cascade) | ||||||
|  |             if cascade: | ||||||
|  |                 kwargs = { | ||||||
|  |                     "safe": safe, | ||||||
|  |                     "force_insert": force_insert, | ||||||
|  |                     "validate": validate, | ||||||
|  |                     "write_options": write_options, | ||||||
|  |                     "cascade": cascade | ||||||
|  |                 } | ||||||
|  |                 if cascade_kwargs:  # Allow granular control over cascades | ||||||
|  |                     kwargs.update(cascade_kwargs) | ||||||
|  |                 kwargs['_refs'] = _refs | ||||||
|  |                 self.cascade_save(warn_cascade=warn_cascade, **kwargs) | ||||||
|  |  | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             if 'duplicate key' in str(err): |             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||||
|                 message = 'Tried to save duplicate unique keys (%s)' |                 # E11000 - duplicate key error index | ||||||
|             raise OperationError(message % str(err)) |                 # E11001 - duplicate key on update | ||||||
|  |                 message = u'Tried to save duplicate unique keys (%s)' | ||||||
|  |                 raise NotUniqueError(message % unicode(err)) | ||||||
|  |             raise OperationError(message % unicode(err)) | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         self[id_field] = self._fields[id_field].to_python(object_id) |         if id_field not in self._meta.get('shard_key', []): | ||||||
|  |             self[id_field] = self._fields[id_field].to_python(object_id) | ||||||
|  |  | ||||||
|  |         self._changed_fields = [] | ||||||
|  |         self._created = False | ||||||
|  |         signals.post_save.send(self.__class__, document=self, created=created) | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def cascade_save(self, warn_cascade=None, *args, **kwargs): | ||||||
|  |         """Recursively saves any references / | ||||||
|  |            generic references on an objects""" | ||||||
|  |         import fields | ||||||
|  |         _refs = kwargs.get('_refs', []) or [] | ||||||
|  |  | ||||||
|  |         for name, cls in self._fields.items(): | ||||||
|  |             if not isinstance(cls, (fields.ReferenceField, | ||||||
|  |                                     fields.GenericReferenceField)): | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             ref = getattr(self, name) | ||||||
|  |             if not ref or isinstance(ref, DBRef): | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             if not getattr(ref, '_changed_fields', True): | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||||
|  |             if ref and ref_id not in _refs: | ||||||
|  |                 if warn_cascade: | ||||||
|  |                     msg = ("Cascading saves will default to off in 0.8, " | ||||||
|  |                           "please  explicitly set `.save(cascade=True)`") | ||||||
|  |                     warnings.warn(msg, FutureWarning) | ||||||
|  |                 _refs.append(ref_id) | ||||||
|  |                 kwargs["_refs"] = _refs | ||||||
|  |                 ref.save(**kwargs) | ||||||
|  |                 ref._changed_fields = [] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def _object_key(self): | ||||||
|  |         """Dict to identify object in collection | ||||||
|  |         """ | ||||||
|  |         select_dict = {'pk': self.pk} | ||||||
|  |         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
|  |         for k in shard_key: | ||||||
|  |             select_dict[k] = getattr(self, k) | ||||||
|  |         return select_dict | ||||||
|  |  | ||||||
|  |     def update(self, **kwargs): | ||||||
|  |         """Performs an update on the :class:`~mongoengine.Document` | ||||||
|  |         A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. | ||||||
|  |  | ||||||
|  |         Raises :class:`OperationError` if called on an object that has not yet | ||||||
|  |         been saved. | ||||||
|  |         """ | ||||||
|  |         if not self.pk: | ||||||
|  |             raise OperationError('attempt to update a document not yet saved') | ||||||
|  |  | ||||||
|  |         # Need to add shard key to query, or you get an error | ||||||
|  |         return self.__class__.objects(**self._object_key).update_one(**kwargs) | ||||||
|  |  | ||||||
|     def delete(self, safe=False): |     def delete(self, safe=False): | ||||||
|         """Delete the :class:`~mongoengine.Document` from the database. This |         """Delete the :class:`~mongoengine.Document` from the database. This | ||||||
| @@ -90,27 +324,171 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|         :param safe: check if the operation succeeded before returning |         :param safe: check if the operation succeeded before returning | ||||||
|         """ |         """ | ||||||
|         id_field = self._meta['id_field'] |         signals.pre_delete.send(self.__class__, document=self) | ||||||
|         object_id = self._fields[id_field].to_mongo(self[id_field]) |  | ||||||
|         try: |  | ||||||
|             self.__class__.objects(**{id_field: object_id}).delete(safe=safe) |  | ||||||
|         except pymongo.errors.OperationFailure, err: |  | ||||||
|             raise OperationError('Could not delete document (%s)' % str(err)) |  | ||||||
|  |  | ||||||
|     def reload(self): |         try: | ||||||
|  |             self.__class__.objects(**self._object_key).delete(safe=safe) | ||||||
|  |         except pymongo.errors.OperationFailure, err: | ||||||
|  |             message = u'Could not delete document (%s)' % err.message | ||||||
|  |             raise OperationError(message) | ||||||
|  |  | ||||||
|  |         signals.post_delete.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|  |     def select_related(self, max_depth=1): | ||||||
|  |         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to | ||||||
|  |         a maximum depth in order to cut down the number queries to mongodb. | ||||||
|  |  | ||||||
|  |         .. versionadded:: 0.5 | ||||||
|  |         """ | ||||||
|  |         import dereference | ||||||
|  |         self._data = dereference.DeReference()(self._data, max_depth) | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def reload(self, max_depth=1): | ||||||
|         """Reloads all attributes from the database. |         """Reloads all attributes from the database. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.1.2 |         .. versionadded:: 0.1.2 | ||||||
|  |         .. versionchanged:: 0.6  Now chainable | ||||||
|         """ |         """ | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         obj = self.__class__.objects(**{id_field: self[id_field]}).first() |         obj = self.__class__.objects( | ||||||
|  |                 **{id_field: self[id_field]} | ||||||
|  |               ).first().select_related(max_depth=max_depth) | ||||||
|         for field in self._fields: |         for field in self._fields: | ||||||
|             setattr(self, field, obj[field]) |             setattr(self, field, self._reload(field, obj[field])) | ||||||
|  |         if self._dynamic: | ||||||
|  |             for name in self._dynamic_fields.keys(): | ||||||
|  |                 setattr(self, name, self._reload(name, obj._data[name])) | ||||||
|  |         self._changed_fields = obj._changed_fields | ||||||
|  |         return obj | ||||||
|  |  | ||||||
|  |     def _reload(self, key, value): | ||||||
|  |         """Used by :meth:`~mongoengine.Document.reload` to ensure the | ||||||
|  |         correct instance is linked to self. | ||||||
|  |         """ | ||||||
|  |         if isinstance(value, BaseDict): | ||||||
|  |             value = [(k, self._reload(k, v)) for k, v in value.items()] | ||||||
|  |             value = BaseDict(value, self, key) | ||||||
|  |         elif isinstance(value, BaseList): | ||||||
|  |             value = [self._reload(key, v) for v in value] | ||||||
|  |             value = BaseList(value, self, key) | ||||||
|  |         elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): | ||||||
|  |             value._changed_fields = [] | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def to_dbref(self): | ||||||
|  |         """Returns an instance of :class:`~bson.dbref.DBRef` useful in | ||||||
|  |         `__raw__` queries.""" | ||||||
|  |         if not self.pk: | ||||||
|  |             msg = "Only saved documents can have a valid dbref" | ||||||
|  |             raise OperationError(msg) | ||||||
|  |         return DBRef(self.__class__._get_collection_name(), self.pk) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def register_delete_rule(cls, document_cls, field_name, rule): | ||||||
|  |         """This method registers the delete rules to apply when removing this | ||||||
|  |         object. | ||||||
|  |         """ | ||||||
|  |         delete_rules = cls._meta.get('delete_rules') or {} | ||||||
|  |         delete_rules[(document_cls, field_name)] = rule | ||||||
|  |         cls._meta['delete_rules'] = delete_rules | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def drop_collection(cls): |     def drop_collection(cls): | ||||||
|         """Drops the entire collection associated with this |         """Drops the entire collection associated with this | ||||||
|         :class:`~mongoengine.Document` type from the database. |         :class:`~mongoengine.Document` type from the database. | ||||||
|         """ |         """ | ||||||
|         db = _get_db() |         db = cls._get_db() | ||||||
|         db.drop_collection(cls._meta['collection']) |         db.drop_collection(cls._get_collection_name()) | ||||||
|  |         queryset.QuerySet._reset_already_indexed(cls) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicDocument(Document): | ||||||
|  |     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||||
|  |     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||||
|  |     way as an ordinary document but has expando style properties.  Any data | ||||||
|  |     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||||
|  |     not a field is automatically converted into a | ||||||
|  |     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||||
|  |     field. | ||||||
|  |  | ||||||
|  |     .. note:: | ||||||
|  |  | ||||||
|  |         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = TopLevelDocumentMetaclass | ||||||
|  |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|  |     _dynamic = True | ||||||
|  |  | ||||||
|  |     def __delattr__(self, *args, **kwargs): | ||||||
|  |         """Deletes the attribute by setting to None and allowing _delta to unset | ||||||
|  |         it""" | ||||||
|  |         field_name = args[0] | ||||||
|  |         if field_name in self._dynamic_fields: | ||||||
|  |             setattr(self, field_name, None) | ||||||
|  |         else: | ||||||
|  |             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicEmbeddedDocument(EmbeddedDocument): | ||||||
|  |     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||||
|  |     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||||
|  |     information about dynamic documents. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = DocumentMetaclass | ||||||
|  |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|  |     _dynamic = True | ||||||
|  |  | ||||||
|  |     def __delattr__(self, *args, **kwargs): | ||||||
|  |         """Deletes the attribute by setting to None and allowing _delta to unset | ||||||
|  |         it""" | ||||||
|  |         field_name = args[0] | ||||||
|  |         setattr(self, field_name, None) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MapReduceDocument(object): | ||||||
|  |     """A document returned from a map/reduce query. | ||||||
|  |  | ||||||
|  |     :param collection: An instance of :class:`~pymongo.Collection` | ||||||
|  |     :param key: Document/result key, often an instance of | ||||||
|  |                 :class:`~bson.objectid.ObjectId`. If supplied as | ||||||
|  |                 an ``ObjectId`` found in the given ``collection``, | ||||||
|  |                 the object can be accessed via the ``object`` property. | ||||||
|  |     :param value: The result(s) for this key. | ||||||
|  |  | ||||||
|  |     .. versionadded:: 0.3 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, document, collection, key, value): | ||||||
|  |         self._document = document | ||||||
|  |         self._collection = collection | ||||||
|  |         self.key = key | ||||||
|  |         self.value = value | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def object(self): | ||||||
|  |         """Lazy-load the object referenced by ``self.key``. ``self.key`` | ||||||
|  |         should be the ``primary_key``. | ||||||
|  |         """ | ||||||
|  |         id_field = self._document()._meta['id_field'] | ||||||
|  |         id_field_type = type(id_field) | ||||||
|  |  | ||||||
|  |         if not isinstance(self.key, id_field_type): | ||||||
|  |             try: | ||||||
|  |                 self.key = id_field_type(self.key) | ||||||
|  |             except: | ||||||
|  |                 raise Exception("Could not cast key as %s" % \ | ||||||
|  |                                 id_field_type.__name__) | ||||||
|  |  | ||||||
|  |         if not hasattr(self, "_key_object"): | ||||||
|  |             self._key_object = self._document.objects.with_id(self.key) | ||||||
|  |             return self._key_object | ||||||
|  |         return self._key_object | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | |||||||
|  | """Helper functions and types to aid with Python 2.5 - 3 support.""" | ||||||
|  |  | ||||||
|  | import sys | ||||||
|  |  | ||||||
|  | PY3 = sys.version_info[0] == 3 | ||||||
|  | PY25 = sys.version_info[:2] == (2, 5) | ||||||
|  | UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 | ||||||
|  |  | ||||||
|  | if PY3: | ||||||
|  |     import codecs | ||||||
|  |     from io import BytesIO as StringIO | ||||||
|  |     # return s converted to binary.  b('test') should be equivalent to b'test' | ||||||
|  |     def b(s): | ||||||
|  |         return codecs.latin_1_encode(s)[0] | ||||||
|  |  | ||||||
|  |     bin_type = bytes | ||||||
|  |     txt_type   = str | ||||||
|  | else: | ||||||
|  |     try: | ||||||
|  |         from cStringIO import StringIO | ||||||
|  |     except ImportError: | ||||||
|  |         from StringIO import StringIO | ||||||
|  |  | ||||||
|  |     # Conversion to binary only necessary in Python 3 | ||||||
|  |     def b(s): | ||||||
|  |         return s | ||||||
|  |  | ||||||
|  |     bin_type = str | ||||||
|  |     txt_type = unicode | ||||||
|  |  | ||||||
|  | str_types = (bin_type, txt_type) | ||||||
|  |  | ||||||
|  | if PY25: | ||||||
|  |     def product(*args, **kwds): | ||||||
|  |         pools = map(tuple, args) * kwds.get('repeat', 1) | ||||||
|  |         result = [[]] | ||||||
|  |         for pool in pools: | ||||||
|  |             result = [x + [y] for x in result for y in pool] | ||||||
|  |         for prod in result: | ||||||
|  |             yield tuple(prod) | ||||||
|  |     reduce = reduce | ||||||
|  | else: | ||||||
|  |     from itertools import product | ||||||
|  |     from functools import reduce | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # For use with Python 2.5 | ||||||
|  | # converts all keys from unicode to str for d and all nested dictionaries | ||||||
|  | def to_str_keys_recursive(d): | ||||||
|  |     if isinstance(d, list): | ||||||
|  |         for val in d: | ||||||
|  |             if isinstance(val, (dict, list)): | ||||||
|  |                 to_str_keys_recursive(val) | ||||||
|  |     elif isinstance(d, dict): | ||||||
|  |         for key, val in d.items(): | ||||||
|  |             if isinstance(val, (dict, list)): | ||||||
|  |                 to_str_keys_recursive(val) | ||||||
|  |             if isinstance(key, unicode): | ||||||
|  |                 d[str(key)] = d.pop(key) | ||||||
|  |     else: | ||||||
|  |         raise ValueError("non list/dict parameter not allowed") | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										46
									
								
								mongoengine/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								mongoengine/signals.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  |  | ||||||
|  | __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', | ||||||
|  |            'pre_delete', 'post_delete'] | ||||||
|  |  | ||||||
|  | signals_available = False | ||||||
|  | try: | ||||||
|  |     from blinker import Namespace | ||||||
|  |     signals_available = True | ||||||
|  | except ImportError: | ||||||
|  |     class Namespace(object): | ||||||
|  |         def signal(self, name, doc=None): | ||||||
|  |             return _FakeSignal(name, doc) | ||||||
|  |  | ||||||
|  |     class _FakeSignal(object): | ||||||
|  |         """If blinker is unavailable, create a fake class with the same | ||||||
|  |         interface that allows sending of signals but will fail with an | ||||||
|  |         error on anything else.  Instead of doing anything on send, it | ||||||
|  |         will just ignore the arguments and do nothing instead. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         def __init__(self, name, doc=None): | ||||||
|  |             self.name = name | ||||||
|  |             self.__doc__ = doc | ||||||
|  |  | ||||||
|  |         def _fail(self, *args, **kwargs): | ||||||
|  |             raise RuntimeError('signalling support is unavailable ' | ||||||
|  |                                'because the blinker library is ' | ||||||
|  |                                'not installed.') | ||||||
|  |         send = lambda *a, **kw: None | ||||||
|  |         connect = disconnect = has_receivers_for = receivers_for = \ | ||||||
|  |             temporarily_connected_to = _fail | ||||||
|  |         del _fail | ||||||
|  |  | ||||||
|  | # the namespace for code signals.  If you are not mongoengine code, do | ||||||
|  | # not put signals in here.  Create your own namespace instead. | ||||||
|  | _signals = Namespace() | ||||||
|  |  | ||||||
|  | pre_init = _signals.signal('pre_init') | ||||||
|  | post_init = _signals.signal('post_init') | ||||||
|  | pre_save = _signals.signal('pre_save') | ||||||
|  | post_save = _signals.signal('post_save') | ||||||
|  | pre_delete = _signals.signal('pre_delete') | ||||||
|  | post_delete = _signals.signal('post_delete') | ||||||
|  | pre_bulk_insert = _signals.signal('pre_bulk_insert') | ||||||
|  | post_bulk_insert = _signals.signal('post_bulk_insert') | ||||||
							
								
								
									
										59
									
								
								mongoengine/tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								mongoengine/tests.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | |||||||
|  | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class query_counter(object): | ||||||
|  |     """ Query_counter contextmanager to get the number of queries. """ | ||||||
|  |  | ||||||
|  |     def __init__(self): | ||||||
|  |         """ Construct the query_counter. """ | ||||||
|  |         self.counter = 0 | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         """ On every with block we need to drop the profile collection. """ | ||||||
|  |         self.db.set_profiling_level(0) | ||||||
|  |         self.db.system.profile.drop() | ||||||
|  |         self.db.set_profiling_level(2) | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         """ Reset the profiling level. """ | ||||||
|  |         self.db.set_profiling_level(0) | ||||||
|  |  | ||||||
|  |     def __eq__(self, value): | ||||||
|  |         """ == Compare querycounter. """ | ||||||
|  |         return value == self._get_count() | ||||||
|  |  | ||||||
|  |     def __ne__(self, value): | ||||||
|  |         """ != Compare querycounter. """ | ||||||
|  |         return not self.__eq__(value) | ||||||
|  |  | ||||||
|  |     def __lt__(self, value): | ||||||
|  |         """ < Compare querycounter. """ | ||||||
|  |         return self._get_count() < value | ||||||
|  |  | ||||||
|  |     def __le__(self, value): | ||||||
|  |         """ <= Compare querycounter. """ | ||||||
|  |         return self._get_count() <= value | ||||||
|  |  | ||||||
|  |     def __gt__(self, value): | ||||||
|  |         """ > Compare querycounter. """ | ||||||
|  |         return self._get_count() > value | ||||||
|  |  | ||||||
|  |     def __ge__(self, value): | ||||||
|  |         """ >= Compare querycounter. """ | ||||||
|  |         return self._get_count() >= value | ||||||
|  |  | ||||||
|  |     def __int__(self): | ||||||
|  |         """ int representation. """ | ||||||
|  |         return self._get_count() | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         """ repr query_counter as the number of queries. """ | ||||||
|  |         return u"%s" % self._get_count() | ||||||
|  |  | ||||||
|  |     def _get_count(self): | ||||||
|  |         """ Get the number of queries. """ | ||||||
|  |         count = self.db.system.profile.find().count() - self.counter | ||||||
|  |         self.counter += 1 | ||||||
|  |         return count | ||||||
							
								
								
									
										54
									
								
								python-mongoengine.spec
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								python-mongoengine.spec
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | |||||||
|  | # sitelib for noarch packages, sitearch for others (remove the unneeded one) | ||||||
|  | %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} | ||||||
|  | %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} | ||||||
|  |  | ||||||
|  | %define srcname mongoengine | ||||||
|  |  | ||||||
|  | Name:           python-%{srcname} | ||||||
|  | Version:        0.7.2 | ||||||
|  | Release:        1%{?dist} | ||||||
|  | Summary:        A Python Document-Object Mapper for working with MongoDB | ||||||
|  |  | ||||||
|  | Group:          Development/Libraries | ||||||
|  | License:        MIT | ||||||
|  | URL:            https://github.com/MongoEngine/mongoengine | ||||||
|  | Source0:        %{srcname}-%{version}.tar.bz2 | ||||||
|  |  | ||||||
|  | BuildRequires:  python-devel | ||||||
|  | BuildRequires:  python-setuptools | ||||||
|  |  | ||||||
|  | Requires:       mongodb | ||||||
|  | Requires:       pymongo | ||||||
|  | Requires:       python-blinker | ||||||
|  | Requires:       python-imaging | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %description | ||||||
|  | MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
|  |  | ||||||
|  | %prep | ||||||
|  | %setup -q -n %{srcname}-%{version} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %build | ||||||
|  | # Remove CFLAGS=... for noarch packages (unneeded) | ||||||
|  | CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %install | ||||||
|  | rm -rf $RPM_BUILD_ROOT | ||||||
|  | %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT | ||||||
|  |  | ||||||
|  | %clean | ||||||
|  | rm -rf $RPM_BUILD_ROOT | ||||||
|  |  | ||||||
|  | %files | ||||||
|  | %defattr(-,root,root,-) | ||||||
|  | %doc docs AUTHORS LICENSE README.rst | ||||||
|  | # For noarch packages: sitelib | ||||||
|  |  %{python_sitelib}/* | ||||||
|  | # For arch-specific packages: sitearch | ||||||
|  | # %{python_sitearch}/* | ||||||
|  |  | ||||||
|  | %changelog | ||||||
|  | * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||||
							
								
								
									
										1
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | pymongo | ||||||
							
								
								
									
										11
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | [nosetests] | ||||||
|  | verbosity = 3 | ||||||
|  | detailed-errors = 1 | ||||||
|  | #with-coverage = 1 | ||||||
|  | #cover-erase = 1 | ||||||
|  | #cover-html = 1 | ||||||
|  | #cover-html-dir = ../htmlcov | ||||||
|  | #cover-package = mongoengine | ||||||
|  | py3where = build | ||||||
|  | where = tests | ||||||
|  | #tests =  test_bugfix.py | ||||||
							
								
								
									
										53
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										53
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,27 +1,35 @@ | |||||||
| from setuptools import setup, find_packages |  | ||||||
| import os | import os | ||||||
|  | import sys | ||||||
|  | from setuptools import setup, find_packages | ||||||
|  |  | ||||||
| DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB" | # Hack to silence atexit traceback in newer python versions | ||||||
|  | try: | ||||||
|  |     import multiprocessing | ||||||
|  | except ImportError: | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  | DESCRIPTION = """MongoEngine is a Python Object-Document | ||||||
|  | Mapper for working with MongoDB.""" | ||||||
| LONG_DESCRIPTION = None | LONG_DESCRIPTION = None | ||||||
| try: | try: | ||||||
|     LONG_DESCRIPTION = open('README.rst').read() |     LONG_DESCRIPTION = open('README.rst').read() | ||||||
| except: | except: | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(version_tuple): | def get_version(version_tuple): | ||||||
|     version = '%s.%s' % (version_tuple[0], version_tuple[1]) |     if not isinstance(version_tuple[-1], int): | ||||||
|     if version_tuple[2]: |         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] | ||||||
|         version = '%s.%s' % (version, version_tuple[2]) |     return '.'.join(map(str, version_tuple)) | ||||||
|     return version |  | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||||
| version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0] | version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | ||||||
|  |  | ||||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | VERSION = get_version(eval(version_line.split('=')[-1])) | ||||||
| print VERSION | print(VERSION) | ||||||
|  |  | ||||||
| CLASSIFIERS = [ | CLASSIFIERS = [ | ||||||
|     'Development Status :: 4 - Beta', |     'Development Status :: 4 - Beta', | ||||||
| @@ -29,16 +37,38 @@ CLASSIFIERS = [ | |||||||
|     'License :: OSI Approved :: MIT License', |     'License :: OSI Approved :: MIT License', | ||||||
|     'Operating System :: OS Independent', |     'Operating System :: OS Independent', | ||||||
|     'Programming Language :: Python', |     'Programming Language :: Python', | ||||||
|  |     "Programming Language :: Python :: 2", | ||||||
|  |     "Programming Language :: Python :: 2.5", | ||||||
|  |     "Programming Language :: Python :: 2.6", | ||||||
|  |     "Programming Language :: Python :: 2.7", | ||||||
|  |     "Programming Language :: Python :: 3", | ||||||
|  |     "Programming Language :: Python :: 3.1", | ||||||
|  |     "Programming Language :: Python :: 3.2", | ||||||
|  |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     'Topic :: Database', |     'Topic :: Database', | ||||||
|     'Topic :: Software Development :: Libraries :: Python Modules', |     'Topic :: Software Development :: Libraries :: Python Modules', | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | extra_opts = {} | ||||||
|  | if sys.version_info[0] == 3: | ||||||
|  |     extra_opts['use_2to3'] = True | ||||||
|  |     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] | ||||||
|  |     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||||
|  |     if "test" in sys.argv or "nosetests" in sys.argv: | ||||||
|  |         extra_opts['packages'].append("tests") | ||||||
|  |         extra_opts['package_data'] = {"tests": ["mongoengine.png"]} | ||||||
|  | else: | ||||||
|  |     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||||
|  |     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||||
|  |  | ||||||
| setup(name='mongoengine', | setup(name='mongoengine', | ||||||
|       version=VERSION, |       version=VERSION, | ||||||
|       packages=find_packages(), |  | ||||||
|       author='Harry Marr', |       author='Harry Marr', | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |       author_email='harry.marr@{nospam}gmail.com', | ||||||
|       url='http://hmarr.com/mongoengine/', |       maintainer="Ross Lawley", | ||||||
|  |       maintainer_email="ross.lawley@{nospam}gmail.com", | ||||||
|  |       url='http://mongoengine.org/', | ||||||
|  |       download_url='https://github.com/MongoEngine/mongoengine/tarball/master', | ||||||
|       license='MIT', |       license='MIT', | ||||||
|       include_package_data=True, |       include_package_data=True, | ||||||
|       description=DESCRIPTION, |       description=DESCRIPTION, | ||||||
| @@ -46,5 +76,6 @@ setup(name='mongoengine', | |||||||
|       platforms=['any'], |       platforms=['any'], | ||||||
|       classifiers=CLASSIFIERS, |       classifiers=CLASSIFIERS, | ||||||
|       install_requires=['pymongo'], |       install_requires=['pymongo'], | ||||||
|       test_suite='tests', |       test_suite='nose.collector', | ||||||
|  |       **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,541 +0,0 @@ | |||||||
| import unittest |  | ||||||
| import datetime |  | ||||||
| import pymongo |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import _get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DocumentTest(unittest.TestCase): |  | ||||||
|      |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = _get_db() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_drop_collection(self): |  | ||||||
|         """Ensure that the collection may be dropped from the database. |  | ||||||
|         """ |  | ||||||
|         self.Person(name='Test').save() |  | ||||||
|  |  | ||||||
|         collection = self.Person._meta['collection'] |  | ||||||
|         self.assertTrue(collection in self.db.collection_names()) |  | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|         self.assertFalse(collection in self.db.collection_names()) |  | ||||||
|  |  | ||||||
|     def test_definition(self): |  | ||||||
|         """Ensure that document may be defined using fields. |  | ||||||
|         """ |  | ||||||
|         name_field = StringField() |  | ||||||
|         age_field = IntField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = name_field |  | ||||||
|             age = age_field |  | ||||||
|             non_field = True |  | ||||||
|          |  | ||||||
|         self.assertEqual(Person._fields['name'], name_field) |  | ||||||
|         self.assertEqual(Person._fields['age'], age_field) |  | ||||||
|         self.assertFalse('non_field' in Person._fields) |  | ||||||
|         self.assertTrue('id' in Person._fields) |  | ||||||
|         # Test iteration over fields |  | ||||||
|         fields = list(Person()) |  | ||||||
|         self.assertTrue('name' in fields and 'age' in fields) |  | ||||||
|         # Ensure Document isn't treated like an actual document |  | ||||||
|         self.assertFalse(hasattr(Document, '_fields')) |  | ||||||
|  |  | ||||||
|     def test_get_superclasses(self): |  | ||||||
|         """Ensure that the correct list of superclasses is assembled. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): pass |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|  |  | ||||||
|         mammal_superclasses = {'Animal': Animal} |  | ||||||
|         self.assertEqual(Mammal._superclasses, mammal_superclasses) |  | ||||||
|          |  | ||||||
|         dog_superclasses = { |  | ||||||
|             'Animal': Animal, |  | ||||||
|             'Animal.Mammal': Mammal, |  | ||||||
|         } |  | ||||||
|         self.assertEqual(Dog._superclasses, dog_superclasses) |  | ||||||
|  |  | ||||||
|     def test_get_subclasses(self): |  | ||||||
|         """Ensure that the correct list of subclasses is retrieved by the  |  | ||||||
|         _get_subclasses method. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): pass |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|  |  | ||||||
|         mammal_subclasses = { |  | ||||||
|             'Animal.Mammal.Dog': Dog,  |  | ||||||
|             'Animal.Mammal.Human': Human |  | ||||||
|         } |  | ||||||
|         self.assertEqual(Mammal._get_subclasses(), mammal_subclasses) |  | ||||||
|          |  | ||||||
|         animal_subclasses = { |  | ||||||
|             'Animal.Fish': Fish, |  | ||||||
|             'Animal.Mammal': Mammal, |  | ||||||
|             'Animal.Mammal.Dog': Dog,  |  | ||||||
|             'Animal.Mammal.Human': Human |  | ||||||
|         } |  | ||||||
|         self.assertEqual(Animal._get_subclasses(), animal_subclasses) |  | ||||||
|  |  | ||||||
|     def test_polymorphic_queries(self): |  | ||||||
|         """Ensure that the correct subclasses are returned from a query""" |  | ||||||
|         class Animal(Document): pass |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|  |  | ||||||
|         Animal().save() |  | ||||||
|         Fish().save() |  | ||||||
|         Mammal().save() |  | ||||||
|         Human().save() |  | ||||||
|         Dog().save() |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Animal.objects] |  | ||||||
|         self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Mammal.objects] |  | ||||||
|         self.assertEqual(classes, [Mammal, Human, Dog]) |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Human.objects] |  | ||||||
|         self.assertEqual(classes, [Human]) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_inheritance(self): |  | ||||||
|         """Ensure that document may inherit fields from a superclass document. |  | ||||||
|         """ |  | ||||||
|         class Employee(self.Person): |  | ||||||
|             salary = IntField() |  | ||||||
|  |  | ||||||
|         self.assertTrue('name' in Employee._fields) |  | ||||||
|         self.assertTrue('salary' in Employee._fields) |  | ||||||
|         self.assertEqual(Employee._meta['collection'],  |  | ||||||
|                          self.Person._meta['collection']) |  | ||||||
|  |  | ||||||
|     def test_allow_inheritance(self): |  | ||||||
|         """Ensure that inheritance may be disabled on simple classes and that |  | ||||||
|         _cls and _types will not be used. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|         def create_dog_class(): |  | ||||||
|             class Dog(Animal): |  | ||||||
|                 pass |  | ||||||
|         self.assertRaises(ValueError, create_dog_class) |  | ||||||
|          |  | ||||||
|         # Check that _cls etc aren't present on simple documents |  | ||||||
|         dog = Animal(name='dog') |  | ||||||
|         dog.save() |  | ||||||
|         collection = self.db[Animal._meta['collection']] |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertFalse('_cls' in obj) |  | ||||||
|         self.assertFalse('_types' in obj) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|         def create_employee_class(): |  | ||||||
|             class Employee(self.Person): |  | ||||||
|                 meta = {'allow_inheritance': False} |  | ||||||
|         self.assertRaises(ValueError, create_employee_class) |  | ||||||
|  |  | ||||||
|     def test_collection_name(self): |  | ||||||
|         """Ensure that a collection with a specified name may be used. |  | ||||||
|         """ |  | ||||||
|         collection = 'personCollTest' |  | ||||||
|         if collection in self.db.collection_names(): |  | ||||||
|             self.db.drop_collection(collection) |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'collection': collection} |  | ||||||
|          |  | ||||||
|         user = Person(name="Test User") |  | ||||||
|         user.save() |  | ||||||
|         self.assertTrue(collection in self.db.collection_names()) |  | ||||||
|  |  | ||||||
|         user_obj = self.db[collection].find_one() |  | ||||||
|         self.assertEqual(user_obj['name'], "Test User") |  | ||||||
|  |  | ||||||
|         user_obj = Person.objects[0] |  | ||||||
|         self.assertEqual(user_obj.name, "Test User") |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         self.assertFalse(collection in self.db.collection_names()) |  | ||||||
|  |  | ||||||
|     def test_capped_collection(self): |  | ||||||
|         """Ensure that capped collections work properly. |  | ||||||
|         """ |  | ||||||
|         class Log(Document): |  | ||||||
|             date = DateTimeField(default=datetime.datetime.now) |  | ||||||
|             meta = { |  | ||||||
|                 'max_documents': 10, |  | ||||||
|                 'max_size': 90000, |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|  |  | ||||||
|         # Ensure that the collection handles up to its maximum |  | ||||||
|         for i in range(10): |  | ||||||
|             Log().save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(len(Log.objects), 10) |  | ||||||
|  |  | ||||||
|         # Check that extra documents don't increase the size |  | ||||||
|         Log().save() |  | ||||||
|         self.assertEqual(len(Log.objects), 10) |  | ||||||
|  |  | ||||||
|         options = Log.objects._collection.options() |  | ||||||
|         self.assertEqual(options['capped'], True) |  | ||||||
|         self.assertEqual(options['max'], 10) |  | ||||||
|         self.assertEqual(options['size'], 90000) |  | ||||||
|  |  | ||||||
|         # Check that the document cannot be redefined with different options |  | ||||||
|         def recreate_log_document(): |  | ||||||
|             class Log(Document): |  | ||||||
|                 date = DateTimeField(default=datetime.datetime.now) |  | ||||||
|                 meta = { |  | ||||||
|                     'max_documents': 11, |  | ||||||
|                 } |  | ||||||
|             # Create the collection by accessing Document.objects |  | ||||||
|             Log.objects |  | ||||||
|         self.assertRaises(InvalidCollectionError, recreate_log_document) |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_indexes(self): |  | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             date = DateTimeField(name='addDate', default=datetime.datetime.now) |  | ||||||
|             category = StringField() |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|             meta = { |  | ||||||
|                 'indexes': [ |  | ||||||
|                     '-date',  |  | ||||||
|                     'tags', |  | ||||||
|                     ('category', '-date') |  | ||||||
|                 ], |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         # _id, types, '-date', 'tags', ('cat', 'date') |  | ||||||
|         self.assertEqual(len(info), 5)  |  | ||||||
|  |  | ||||||
|         # Indexes are lazy so use list() to perform query |  | ||||||
|         list(BlogPost.objects) |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]  |  | ||||||
|                         in info.values()) |  | ||||||
|         self.assertTrue([('_types', 1), ('addDate', -1)] in info.values()) |  | ||||||
|         # tags is a list field so it shouldn't have _types in the index |  | ||||||
|         self.assertTrue([('tags', 1)] in info.values()) |  | ||||||
|          |  | ||||||
|         class ExtendedBlogPost(BlogPost): |  | ||||||
|             title = StringField() |  | ||||||
|             meta = {'indexes': ['title']} |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         list(ExtendedBlogPost.objects) |  | ||||||
|         info = ExtendedBlogPost.objects._collection.index_information() |  | ||||||
|         self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]  |  | ||||||
|                         in info.values()) |  | ||||||
|         self.assertTrue([('_types', 1), ('addDate', -1)] in info.values()) |  | ||||||
|         self.assertTrue([('_types', 1), ('title', 1)] in info.values()) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_unique(self): |  | ||||||
|         """Ensure that uniqueness constraints are applied to fields. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             slug = StringField(unique=True) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='test1', slug='test') |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         # Two posts with the same slug is not allowed |  | ||||||
|         post2 = BlogPost(title='test2', slug='test') |  | ||||||
|         self.assertRaises(OperationError, post2.save) |  | ||||||
|  |  | ||||||
|         class Date(EmbeddedDocument): |  | ||||||
|             year = IntField(name='yr') |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             date = EmbeddedDocumentField(Date) |  | ||||||
|             slug = StringField(unique_with='date.year') |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         # day is different so won't raise exception |  | ||||||
|         post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         # Now there will be two docs with the same slug and the same day: fail |  | ||||||
|         post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') |  | ||||||
|         self.assertRaises(OperationError, post3.save) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_custom_id_field(self): |  | ||||||
|         """Ensure that documents may be created with custom primary keys. |  | ||||||
|         """ |  | ||||||
|         class User(Document): |  | ||||||
|             username = StringField(primary_key=True) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|  |  | ||||||
|         self.assertEqual(User._fields['username'].name, '_id') |  | ||||||
|         self.assertEqual(User._meta['id_field'], 'username') |  | ||||||
|  |  | ||||||
|         def create_invalid_user(): |  | ||||||
|             User(name='test').save() # no primary key field |  | ||||||
|         self.assertRaises(ValidationError, create_invalid_user) |  | ||||||
|  |  | ||||||
|         def define_invalid_user(): |  | ||||||
|             class EmailUser(User): |  | ||||||
|                 email = StringField(primary_key=True) |  | ||||||
|         self.assertRaises(ValueError, define_invalid_user) |  | ||||||
|          |  | ||||||
|         user = User(username='test', name='test user') |  | ||||||
|         user.save() |  | ||||||
|  |  | ||||||
|         user_obj = User.objects.first() |  | ||||||
|         self.assertEqual(user_obj.id, 'test') |  | ||||||
|  |  | ||||||
|         user_son = User.objects._collection.find_one() |  | ||||||
|         self.assertEqual(user_son['_id'], 'test') |  | ||||||
|         self.assertTrue('username' not in user_son['_id']) |  | ||||||
|          |  | ||||||
|         User.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_creation(self): |  | ||||||
|         """Ensure that document may be created using keyword arguments. |  | ||||||
|         """ |  | ||||||
|         person = self.Person(name="Test User", age=30) |  | ||||||
|         self.assertEqual(person.name, "Test User") |  | ||||||
|         self.assertEqual(person.age, 30) |  | ||||||
|  |  | ||||||
|     def test_reload(self): |  | ||||||
|         """Ensure that attributes may be reloaded. |  | ||||||
|         """ |  | ||||||
|         person = self.Person(name="Test User", age=20) |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         person_obj = self.Person.objects.first() |  | ||||||
|         person_obj.name = "Mr Test User" |  | ||||||
|         person_obj.age = 21 |  | ||||||
|         person_obj.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(person.name, "Test User") |  | ||||||
|         self.assertEqual(person.age, 20) |  | ||||||
|  |  | ||||||
|         person.reload() |  | ||||||
|         self.assertEqual(person.name, "Mr Test User") |  | ||||||
|         self.assertEqual(person.age, 21) |  | ||||||
|  |  | ||||||
|     def test_dictionary_access(self): |  | ||||||
|         """Ensure that dictionary-style field access works properly. |  | ||||||
|         """ |  | ||||||
|         person = self.Person(name='Test User', age=30) |  | ||||||
|         self.assertEquals(person['name'], 'Test User') |  | ||||||
|  |  | ||||||
|         self.assertRaises(KeyError, person.__getitem__, 'salary') |  | ||||||
|         self.assertRaises(KeyError, person.__setitem__, 'salary', 50) |  | ||||||
|  |  | ||||||
|         person['name'] = 'Another User' |  | ||||||
|         self.assertEquals(person['name'], 'Another User') |  | ||||||
|  |  | ||||||
|         # Length = length(assigned fields + id) |  | ||||||
|         self.assertEquals(len(person), 3) |  | ||||||
|  |  | ||||||
|         self.assertTrue('age' in person) |  | ||||||
|         person.age = None |  | ||||||
|         self.assertFalse('age' in person) |  | ||||||
|         self.assertFalse('nationality' in person) |  | ||||||
|  |  | ||||||
|     def test_embedded_document(self): |  | ||||||
|         """Ensure that embedded documents are set up correctly. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|          |  | ||||||
|         self.assertTrue('content' in Comment._fields) |  | ||||||
|         self.assertFalse('id' in Comment._fields) |  | ||||||
|         self.assertFalse(hasattr(Comment, '_meta')) |  | ||||||
|      |  | ||||||
|     def test_embedded_document_validation(self): |  | ||||||
|         """Ensure that embedded documents may be validated. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             date = DateTimeField() |  | ||||||
|             content = StringField(required=True) |  | ||||||
|          |  | ||||||
|         comment = Comment() |  | ||||||
|         self.assertRaises(ValidationError, comment.validate) |  | ||||||
|  |  | ||||||
|         comment.content = 'test' |  | ||||||
|         comment.validate() |  | ||||||
|  |  | ||||||
|         comment.date = 4 |  | ||||||
|         self.assertRaises(ValidationError, comment.validate) |  | ||||||
|  |  | ||||||
|         comment.date = datetime.datetime.now() |  | ||||||
|         comment.validate() |  | ||||||
|  |  | ||||||
|     def test_save(self): |  | ||||||
|         """Ensure that a document may be saved in the database. |  | ||||||
|         """ |  | ||||||
|         # Create person object and save it to the database |  | ||||||
|         person = self.Person(name='Test User', age=30) |  | ||||||
|         person.save() |  | ||||||
|         # Ensure that the object is in the database |  | ||||||
|         collection = self.db[self.Person._meta['collection']] |  | ||||||
|         person_obj = collection.find_one({'name': 'Test User'}) |  | ||||||
|         self.assertEqual(person_obj['name'], 'Test User') |  | ||||||
|         self.assertEqual(person_obj['age'], 30) |  | ||||||
|         self.assertEqual(str(person_obj['_id']), person.id) |  | ||||||
|  |  | ||||||
|     def test_delete(self): |  | ||||||
|         """Ensure that document may be deleted using the delete method. |  | ||||||
|         """ |  | ||||||
|         person = self.Person(name="Test User", age=30) |  | ||||||
|         person.save() |  | ||||||
|         self.assertEqual(len(self.Person.objects), 1) |  | ||||||
|         person.delete() |  | ||||||
|         self.assertEqual(len(self.Person.objects), 0) |  | ||||||
|  |  | ||||||
|     def test_save_custom_id(self): |  | ||||||
|         """Ensure that a document may be saved with a custom _id. |  | ||||||
|         """ |  | ||||||
|         # Create person object and save it to the database |  | ||||||
|         person = self.Person(name='Test User', age=30,  |  | ||||||
|                              id='497ce96f395f2f052a494fd4') |  | ||||||
|         person.save() |  | ||||||
|         # Ensure that the object is in the database with the correct _id |  | ||||||
|         collection = self.db[self.Person._meta['collection']] |  | ||||||
|         person_obj = collection.find_one({'name': 'Test User'}) |  | ||||||
|         self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') |  | ||||||
|  |  | ||||||
|     def test_save_list(self): |  | ||||||
|         """Ensure that a list field may be properly saved. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Went for a walk today...') |  | ||||||
|         post.tags = tags = ['fun', 'leisure'] |  | ||||||
|         comments = [Comment(content='Good for you'), Comment(content='Yay.')] |  | ||||||
|         post.comments = comments |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         collection = self.db[BlogPost._meta['collection']] |  | ||||||
|         post_obj = collection.find_one() |  | ||||||
|         self.assertEqual(post_obj['tags'], tags) |  | ||||||
|         for comment_obj, comment in zip(post_obj['comments'], comments): |  | ||||||
|             self.assertEqual(comment_obj['content'], comment['content']) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_save_embedded_document(self): |  | ||||||
|         """Ensure that a document with an embedded document field may be  |  | ||||||
|         saved in the database. |  | ||||||
|         """ |  | ||||||
|         class EmployeeDetails(EmbeddedDocument): |  | ||||||
|             position = StringField() |  | ||||||
|  |  | ||||||
|         class Employee(self.Person): |  | ||||||
|             salary = IntField() |  | ||||||
|             details = EmbeddedDocumentField(EmployeeDetails) |  | ||||||
|  |  | ||||||
|         # Create employee object and save it to the database |  | ||||||
|         employee = Employee(name='Test Employee', age=50, salary=20000) |  | ||||||
|         employee.details = EmployeeDetails(position='Developer') |  | ||||||
|         employee.save() |  | ||||||
|  |  | ||||||
|         # Ensure that the object is in the database |  | ||||||
|         collection = self.db[self.Person._meta['collection']] |  | ||||||
|         employee_obj = collection.find_one({'name': 'Test Employee'}) |  | ||||||
|         self.assertEqual(employee_obj['name'], 'Test Employee') |  | ||||||
|         self.assertEqual(employee_obj['age'], 50) |  | ||||||
|         # Ensure that the 'details' embedded object saved correctly |  | ||||||
|         self.assertEqual(employee_obj['details']['position'], 'Developer') |  | ||||||
|  |  | ||||||
|     def test_save_reference(self): |  | ||||||
|         """Ensure that a document reference field may be saved in the database. |  | ||||||
|         """ |  | ||||||
|          |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             meta = {'collection': 'blogpost_1'} |  | ||||||
|             content = StringField() |  | ||||||
|             author = ReferenceField(self.Person) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         author = self.Person(name='Test User') |  | ||||||
|         author.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Watched some TV today... how exciting.') |  | ||||||
|         # Should only reference author when saving |  | ||||||
|         post.author = author |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         post_obj = BlogPost.objects.first() |  | ||||||
|  |  | ||||||
|         # Test laziness |  | ||||||
|         self.assertTrue(isinstance(post_obj._data['author'],  |  | ||||||
|                                    pymongo.dbref.DBRef)) |  | ||||||
|         self.assertTrue(isinstance(post_obj.author, self.Person)) |  | ||||||
|         self.assertEqual(post_obj.author.name, 'Test User') |  | ||||||
|  |  | ||||||
|         # Ensure that the dereferenced object may be changed and saved |  | ||||||
|         post_obj.author.age = 25 |  | ||||||
|         post_obj.author.save() |  | ||||||
|  |  | ||||||
|         author = list(self.Person.objects(name='Test User'))[-1] |  | ||||||
|         self.assertEqual(author.age, 25) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
							
								
								
									
										264
									
								
								tests/fields.py
									
									
									
									
									
								
							
							
						
						
									
										264
									
								
								tests/fields.py
									
									
									
									
									
								
							| @@ -1,264 +0,0 @@ | |||||||
| import unittest |  | ||||||
| import datetime |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import _get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FieldTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = _get_db() |  | ||||||
|  |  | ||||||
|     def test_default_values(self): |  | ||||||
|         """Ensure that default field values are used when creating a document. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField(default=30) |  | ||||||
|             userid = StringField(default=lambda: 'test') |  | ||||||
|  |  | ||||||
|         person = Person(name='Test Person') |  | ||||||
|         self.assertEqual(person._data['age'], 30) |  | ||||||
|         self.assertEqual(person._data['userid'], 'test') |  | ||||||
|  |  | ||||||
|     def test_required_values(self): |  | ||||||
|         """Ensure that required field constraints are enforced. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField(required=True) |  | ||||||
|             age = IntField(required=True) |  | ||||||
|             userid = StringField() |  | ||||||
|  |  | ||||||
|         person = Person(name="Test User") |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person = Person(age=30) |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_object_id_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to string fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|          |  | ||||||
|         person = Person(name='Test User') |  | ||||||
|         self.assertEqual(person.id, None) |  | ||||||
|  |  | ||||||
|         person.id = 47 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.id = 'abc' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.id = '497ce96f395f2f052a494fd4' |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|     def test_string_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to string fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField(max_length=20) |  | ||||||
|             userid = StringField(r'[0-9a-z_]+$') |  | ||||||
|  |  | ||||||
|         person = Person(name=34) |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         # Test regex validation on userid |  | ||||||
|         person = Person(userid='test.User') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.userid = 'test_user' |  | ||||||
|         self.assertEqual(person.userid, 'test_user') |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         # Test max length validation on name |  | ||||||
|         person = Person(name='Name that is more than twenty characters') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.name = 'Shorter name' |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|     def test_int_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to int fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             age = IntField(min_value=0, max_value=110) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.age = 50 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.age = -1 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.age = 120 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.age = 'ten' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_float_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to float fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             height = FloatField(min_value=0.1, max_value=3.5) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.height = 1.89 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.height = 2 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.height = 0.01 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.height = 4.0 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_boolean_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to boolean fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             admin = BooleanField() |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.admin = True |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.admin = 2 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.admin = 'Yes' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_datetime_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to datetime fields. |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             time = DateTimeField() |  | ||||||
|  |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.time = datetime.datetime.now() |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = -1 |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = '1pm' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|  |  | ||||||
|     def test_list_validation(self): |  | ||||||
|         """Ensure that a list field only accepts lists with valid elements. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Went for a walk today...') |  | ||||||
|         post.validate() |  | ||||||
|  |  | ||||||
|         post.tags = 'fun' |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|         post.tags = [1, 2] |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.tags = ['fun', 'leisure'] |  | ||||||
|         post.validate() |  | ||||||
|         post.tags = ('fun', 'leisure') |  | ||||||
|         post.validate() |  | ||||||
|  |  | ||||||
|         comments = [Comment(content='Good for you'), Comment(content='Yay.')] |  | ||||||
|         post.comments = comments |  | ||||||
|         post.validate() |  | ||||||
|  |  | ||||||
|         post.comments = ['a'] |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|         post.comments = 'yay' |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|     def test_embedded_document_validation(self): |  | ||||||
|         """Ensure that invalid embedded documents cannot be assigned to |  | ||||||
|         embedded document fields. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         class PersonPreferences(EmbeddedDocument): |  | ||||||
|             food = StringField() |  | ||||||
|             number = IntField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             preferences = EmbeddedDocumentField(PersonPreferences) |  | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |  | ||||||
|         person.preferences = 'My Preferences' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.preferences = Comment(content='Nice blog post...') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.preferences = PersonPreferences(food='Cheese', number=47) |  | ||||||
|         self.assertEqual(person.preferences.food, 'Cheese') |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|     def test_embedded_document_inheritance(self): |  | ||||||
|         """Ensure that subclasses of embedded documents may be provided to  |  | ||||||
|         EmbeddedDocumentFields of the superclass' type. |  | ||||||
|         """ |  | ||||||
|         class User(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class PowerUser(User): |  | ||||||
|             power = IntField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = EmbeddedDocumentField(User) |  | ||||||
|          |  | ||||||
|         post = BlogPost(content='What I did today...') |  | ||||||
|         post.author = User(name='Test User') |  | ||||||
|         post.author = PowerUser(name='Test User', power=47) |  | ||||||
|  |  | ||||||
|     def test_reference_validation(self): |  | ||||||
|         """Ensure that invalid docment objects cannot be assigned to reference |  | ||||||
|         fields. |  | ||||||
|         """ |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = ReferenceField(User) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) |  | ||||||
|  |  | ||||||
|         user = User(name='Test User') |  | ||||||
|  |  | ||||||
|         # Ensure that the referenced object must have been saved |  | ||||||
|         post1 = BlogPost(content='Chips and gravy taste good.') |  | ||||||
|         post1.author = user |  | ||||||
|         self.assertRaises(ValidationError, post1.save) |  | ||||||
|  |  | ||||||
|         # Check that an invalid object type cannot be used |  | ||||||
|         post2 = BlogPost(content='Chips and chilli taste good.') |  | ||||||
|         post1.author = post2 |  | ||||||
|         self.assertRaises(ValidationError, post1.validate) |  | ||||||
|  |  | ||||||
|         user.save() |  | ||||||
|         post1.author = user |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2.save() |  | ||||||
|         post1.author = post2 |  | ||||||
|         self.assertRaises(ValidationError, post1.validate) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
							
								
								
									
										23
									
								
								tests/fixtures.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								tests/fixtures.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | |||||||
|  | from datetime import datetime | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PickleEmbedded(EmbeddedDocument): | ||||||
|  |     date = DateTimeField(default=datetime.now) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PickleTest(Document): | ||||||
|  |     number = IntField() | ||||||
|  |     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||||
|  |     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||||
|  |     lists = ListField(StringField()) | ||||||
|  |     photo = FileField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Mixin(object): | ||||||
|  |     name = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Base(Document): | ||||||
|  |     meta = {'allow_inheritance': True} | ||||||
							
								
								
									
										
											BIN
										
									
								
								tests/mongoengine.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/mongoengine.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 8.1 KiB | 
| @@ -1,542 +0,0 @@ | |||||||
| import unittest |  | ||||||
| import pymongo |  | ||||||
| from datetime import datetime |  | ||||||
|  |  | ||||||
| from mongoengine.queryset import QuerySet |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): |  | ||||||
|      |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_initialisation(self): |  | ||||||
|         """Ensure that a QuerySet is correctly initialised by QuerySetManager. |  | ||||||
|         """ |  | ||||||
|         self.assertTrue(isinstance(self.Person.objects, QuerySet)) |  | ||||||
|         self.assertEqual(self.Person.objects._collection.name(),  |  | ||||||
|                          self.Person._meta['collection']) |  | ||||||
|         self.assertTrue(isinstance(self.Person.objects._collection, |  | ||||||
|                                    pymongo.collection.Collection)) |  | ||||||
|  |  | ||||||
|     def test_transform_query(self): |  | ||||||
|         """Ensure that the _transform_query function operates correctly. |  | ||||||
|         """ |  | ||||||
|         self.assertEqual(QuerySet._transform_query(name='test', age=30), |  | ||||||
|                          {'name': 'test', 'age': 30}) |  | ||||||
|         self.assertEqual(QuerySet._transform_query(age__lt=30),  |  | ||||||
|                          {'age': {'$lt': 30}}) |  | ||||||
|         self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50), |  | ||||||
|                          {'age': {'$gt': 20, '$lt': 50}}) |  | ||||||
|         self.assertEqual(QuerySet._transform_query(age=20, age__gt=50), |  | ||||||
|                          {'age': 20}) |  | ||||||
|         self.assertEqual(QuerySet._transform_query(friend__age__gte=30),  |  | ||||||
|                          {'friend.age': {'$gte': 30}}) |  | ||||||
|         self.assertEqual(QuerySet._transform_query(name__exists=True),  |  | ||||||
|                          {'name': {'$exists': True}}) |  | ||||||
|  |  | ||||||
|     def test_find(self): |  | ||||||
|         """Ensure that a query returns a valid set of results. |  | ||||||
|         """ |  | ||||||
|         person1 = self.Person(name="User A", age=20) |  | ||||||
|         person1.save() |  | ||||||
|         person2 = self.Person(name="User B", age=30) |  | ||||||
|         person2.save() |  | ||||||
|  |  | ||||||
|         q1 = Q(name='test') |  | ||||||
|         q2 = Q(age__gte=18) |  | ||||||
|  |  | ||||||
|         # Find all people in the collection |  | ||||||
|         people = self.Person.objects |  | ||||||
|         self.assertEqual(len(people), 2) |  | ||||||
|         results = list(people) |  | ||||||
|         self.assertTrue(isinstance(results[0], self.Person)) |  | ||||||
|         self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId, |  | ||||||
|                                                     str, unicode))) |  | ||||||
|         self.assertEqual(results[0].name, "User A") |  | ||||||
|         self.assertEqual(results[0].age, 20) |  | ||||||
|         self.assertEqual(results[1].name, "User B") |  | ||||||
|         self.assertEqual(results[1].age, 30) |  | ||||||
|  |  | ||||||
|         # Use a query to filter the people found to just person1 |  | ||||||
|         people = self.Person.objects(age=20) |  | ||||||
|         self.assertEqual(len(people), 1) |  | ||||||
|         person = people.next() |  | ||||||
|         self.assertEqual(person.name, "User A") |  | ||||||
|         self.assertEqual(person.age, 20) |  | ||||||
|  |  | ||||||
|         # Test limit |  | ||||||
|         people = list(self.Person.objects.limit(1)) |  | ||||||
|         self.assertEqual(len(people), 1) |  | ||||||
|         self.assertEqual(people[0].name, 'User A') |  | ||||||
|  |  | ||||||
|         # Test skip |  | ||||||
|         people = list(self.Person.objects.skip(1)) |  | ||||||
|         self.assertEqual(len(people), 1) |  | ||||||
|         self.assertEqual(people[0].name, 'User B') |  | ||||||
|  |  | ||||||
|         person3 = self.Person(name="User C", age=40) |  | ||||||
|         person3.save() |  | ||||||
|  |  | ||||||
|         # Test slice limit |  | ||||||
|         people = list(self.Person.objects[:2]) |  | ||||||
|         self.assertEqual(len(people), 2) |  | ||||||
|         self.assertEqual(people[0].name, 'User A') |  | ||||||
|         self.assertEqual(people[1].name, 'User B') |  | ||||||
|  |  | ||||||
|         # Test slice skip |  | ||||||
|         people = list(self.Person.objects[1:]) |  | ||||||
|         self.assertEqual(len(people), 2) |  | ||||||
|         self.assertEqual(people[0].name, 'User B') |  | ||||||
|         self.assertEqual(people[1].name, 'User C') |  | ||||||
|  |  | ||||||
|         # Test slice limit and skip |  | ||||||
|         people = list(self.Person.objects[1:2]) |  | ||||||
|         self.assertEqual(len(people), 1) |  | ||||||
|         self.assertEqual(people[0].name, 'User B') |  | ||||||
|  |  | ||||||
|     def test_find_one(self): |  | ||||||
|         """Ensure that a query using find_one returns a valid result. |  | ||||||
|         """ |  | ||||||
|         person1 = self.Person(name="User A", age=20) |  | ||||||
|         person1.save() |  | ||||||
|         person2 = self.Person(name="User B", age=30) |  | ||||||
|         person2.save() |  | ||||||
|  |  | ||||||
|         # Retrieve the first person from the database |  | ||||||
|         person = self.Person.objects.first() |  | ||||||
|         self.assertTrue(isinstance(person, self.Person)) |  | ||||||
|         self.assertEqual(person.name, "User A") |  | ||||||
|         self.assertEqual(person.age, 20) |  | ||||||
|  |  | ||||||
|         # Use a query to filter the people found to just person2 |  | ||||||
|         person = self.Person.objects(age=30).first() |  | ||||||
|         self.assertEqual(person.name, "User B") |  | ||||||
|  |  | ||||||
|         person = self.Person.objects(age__lt=30).first() |  | ||||||
|         self.assertEqual(person.name, "User A") |  | ||||||
|  |  | ||||||
|         # Use array syntax |  | ||||||
|         person = self.Person.objects[0] |  | ||||||
|         self.assertEqual(person.name, "User A") |  | ||||||
|  |  | ||||||
|         person = self.Person.objects[1] |  | ||||||
|         self.assertEqual(person.name, "User B") |  | ||||||
|  |  | ||||||
|         self.assertRaises(IndexError, self.Person.objects.__getitem__, 2) |  | ||||||
|          |  | ||||||
|         # Find a document using just the object id |  | ||||||
|         person = self.Person.objects.with_id(person1.id) |  | ||||||
|         self.assertEqual(person.name, "User A") |  | ||||||
|  |  | ||||||
|     def test_filter_chaining(self): |  | ||||||
|         """Ensure filters can be chained together. |  | ||||||
|         """ |  | ||||||
|         from datetime import datetime |  | ||||||
|          |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             is_published = BooleanField() |  | ||||||
|             published_date = DateTimeField() |  | ||||||
|              |  | ||||||
|             @queryset_manager |  | ||||||
|             def published(queryset): |  | ||||||
|                 return queryset(is_published=True) |  | ||||||
|                  |  | ||||||
|         blog_post_1 = BlogPost(title="Blog Post #1",  |  | ||||||
|                                is_published = True, |  | ||||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) |  | ||||||
|         blog_post_2 = BlogPost(title="Blog Post #2",  |  | ||||||
|                                is_published = True, |  | ||||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) |  | ||||||
|         blog_post_3 = BlogPost(title="Blog Post #3",  |  | ||||||
|                                is_published = True, |  | ||||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) |  | ||||||
|  |  | ||||||
|         blog_post_1.save() |  | ||||||
|         blog_post_2.save() |  | ||||||
|         blog_post_3.save() |  | ||||||
|          |  | ||||||
|         # find all published blog posts before 2010-01-07 |  | ||||||
|         published_posts = BlogPost.published() |  | ||||||
|         published_posts = published_posts.filter( |  | ||||||
|             published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) |  | ||||||
|         self.assertEqual(published_posts.count(), 2) |  | ||||||
|          |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_ordering(self): |  | ||||||
|         """Ensure default ordering is applied and can be overridden. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             published_date = DateTimeField() |  | ||||||
|  |  | ||||||
|             meta = { |  | ||||||
|                 'ordering': ['-published_date'] |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         blog_post_1 = BlogPost(title="Blog Post #1",  |  | ||||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) |  | ||||||
|         blog_post_2 = BlogPost(title="Blog Post #2",  |  | ||||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) |  | ||||||
|         blog_post_3 = BlogPost(title="Blog Post #3",  |  | ||||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) |  | ||||||
|  |  | ||||||
|         blog_post_1.save() |  | ||||||
|         blog_post_2.save() |  | ||||||
|         blog_post_3.save() |  | ||||||
|          |  | ||||||
|         # get the "first" BlogPost using default ordering |  | ||||||
|         # from BlogPost.meta.ordering |  | ||||||
|         latest_post = BlogPost.objects.first()  |  | ||||||
|         self.assertEqual(latest_post.title, "Blog Post #3") |  | ||||||
|          |  | ||||||
|         # override default ordering, order BlogPosts by "published_date" |  | ||||||
|         first_post = BlogPost.objects.order_by("+published_date").first() |  | ||||||
|         self.assertEqual(first_post.title, "Blog Post #1") |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_find_embedded(self): |  | ||||||
|         """Ensure that an embedded document is properly returned from a query. |  | ||||||
|         """ |  | ||||||
|         class User(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = EmbeddedDocumentField(User) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Had a good coffee today...') |  | ||||||
|         post.author = User(name='Test User') |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         result = BlogPost.objects.first() |  | ||||||
|         self.assertTrue(isinstance(result.author, User)) |  | ||||||
|         self.assertEqual(result.author.name, 'Test User') |  | ||||||
|          |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_q(self): |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             publish_date = DateTimeField() |  | ||||||
|             published = BooleanField() |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(publish_date=datetime(2010, 1, 8), published=False) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(publish_date=datetime(2010, 1, 15), published=True) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post3 = BlogPost(published=True) |  | ||||||
|         post3.save() |  | ||||||
|  |  | ||||||
|         post4 = BlogPost(publish_date=datetime(2010, 1, 8)) |  | ||||||
|         post4.save() |  | ||||||
|  |  | ||||||
|         post5 = BlogPost(publish_date=datetime(2010, 1, 15)) |  | ||||||
|         post5.save() |  | ||||||
|  |  | ||||||
|         post6 = BlogPost(published=False) |  | ||||||
|         post6.save() |  | ||||||
|  |  | ||||||
|         date = datetime(2010, 1, 10) |  | ||||||
|         q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) |  | ||||||
|         posts = [post.id for post in q] |  | ||||||
|  |  | ||||||
|         published_posts = (post1, post2, post3, post4) |  | ||||||
|         self.assertTrue(all(obj.id in posts for obj in published_posts)) |  | ||||||
|  |  | ||||||
|         self.assertFalse(any(obj.id in posts for obj in [post5, post6])) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_delete(self): |  | ||||||
|         """Ensure that documents are properly deleted from the database. |  | ||||||
|         """ |  | ||||||
|         self.Person(name="User A", age=20).save() |  | ||||||
|         self.Person(name="User B", age=30).save() |  | ||||||
|         self.Person(name="User C", age=40).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(len(self.Person.objects), 3) |  | ||||||
|  |  | ||||||
|         self.Person.objects(age__lt=30).delete() |  | ||||||
|         self.assertEqual(len(self.Person.objects), 2) |  | ||||||
|  |  | ||||||
|         self.Person.objects.delete() |  | ||||||
|         self.assertEqual(len(self.Person.objects), 0) |  | ||||||
|  |  | ||||||
|     def test_update(self): |  | ||||||
|         """Ensure that atomic updates work properly. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             hits = IntField() |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost(name="Test Post", hits=5, tags=['test']) |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         BlogPost.objects.update(set__hits=10) |  | ||||||
|         post.reload() |  | ||||||
|         self.assertEqual(post.hits, 10) |  | ||||||
|  |  | ||||||
|         BlogPost.objects.update_one(inc__hits=1) |  | ||||||
|         post.reload() |  | ||||||
|         self.assertEqual(post.hits, 11) |  | ||||||
|  |  | ||||||
|         BlogPost.objects.update_one(dec__hits=1) |  | ||||||
|         post.reload() |  | ||||||
|         self.assertEqual(post.hits, 10) |  | ||||||
|  |  | ||||||
|         BlogPost.objects.update(push__tags='mongo') |  | ||||||
|         post.reload() |  | ||||||
|         self.assertTrue('mongo' in post.tags) |  | ||||||
|  |  | ||||||
|         BlogPost.objects.update_one(push_all__tags=['db', 'nosql']) |  | ||||||
|         post.reload() |  | ||||||
|         self.assertTrue('db' in post.tags and 'nosql' in post.tags) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_order_by(self): |  | ||||||
|         """Ensure that QuerySets may be ordered. |  | ||||||
|         """ |  | ||||||
|         self.Person(name="User A", age=20).save() |  | ||||||
|         self.Person(name="User B", age=40).save() |  | ||||||
|         self.Person(name="User C", age=30).save() |  | ||||||
|  |  | ||||||
|         names = [p.name for p in self.Person.objects.order_by('-age')] |  | ||||||
|         self.assertEqual(names, ['User B', 'User C', 'User A']) |  | ||||||
|  |  | ||||||
|         names = [p.name for p in self.Person.objects.order_by('+age')] |  | ||||||
|         self.assertEqual(names, ['User A', 'User C', 'User B']) |  | ||||||
|  |  | ||||||
|         names = [p.name for p in self.Person.objects.order_by('age')] |  | ||||||
|         self.assertEqual(names, ['User A', 'User C', 'User B']) |  | ||||||
|          |  | ||||||
|         ages = [p.age for p in self.Person.objects.order_by('-name')] |  | ||||||
|         self.assertEqual(ages, [30, 40, 20]) |  | ||||||
|  |  | ||||||
|     def test_item_frequencies(self): |  | ||||||
|         """Ensure that item frequencies are properly generated from lists. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             hits = IntField() |  | ||||||
|             tags = ListField(StringField(), name='blogTags') |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         BlogPost(hits=1, tags=['music', 'film', 'actors']).save() |  | ||||||
|         BlogPost(hits=2, tags=['music']).save() |  | ||||||
|         BlogPost(hits=3, tags=['music', 'actors']).save() |  | ||||||
|  |  | ||||||
|         f = BlogPost.objects.item_frequencies('tags') |  | ||||||
|         f = dict((key, int(val)) for key, val in f.items()) |  | ||||||
|         self.assertEqual(set(['music', 'film', 'actors']), set(f.keys())) |  | ||||||
|         self.assertEqual(f['music'], 3) |  | ||||||
|         self.assertEqual(f['actors'], 2) |  | ||||||
|         self.assertEqual(f['film'], 1) |  | ||||||
|  |  | ||||||
|         # Ensure query is taken into account |  | ||||||
|         f = BlogPost.objects(hits__gt=1).item_frequencies('tags') |  | ||||||
|         f = dict((key, int(val)) for key, val in f.items()) |  | ||||||
|         self.assertEqual(set(['music', 'actors']), set(f.keys())) |  | ||||||
|         self.assertEqual(f['music'], 2) |  | ||||||
|         self.assertEqual(f['actors'], 1) |  | ||||||
|  |  | ||||||
|         # Check that normalization works |  | ||||||
|         f = BlogPost.objects.item_frequencies('tags', normalize=True) |  | ||||||
|         self.assertAlmostEqual(f['music'], 3.0/6.0) |  | ||||||
|         self.assertAlmostEqual(f['actors'], 2.0/6.0) |  | ||||||
|         self.assertAlmostEqual(f['film'], 1.0/6.0) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_average(self): |  | ||||||
|         """Ensure that field can be averaged correctly. |  | ||||||
|         """ |  | ||||||
|         ages = [23, 54, 12, 94, 27] |  | ||||||
|         for i, age in enumerate(ages): |  | ||||||
|             self.Person(name='test%s' % i, age=age).save() |  | ||||||
|  |  | ||||||
|         avg = float(sum(ages)) / len(ages) |  | ||||||
|         self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) |  | ||||||
|  |  | ||||||
|         self.Person(name='ageless person').save() |  | ||||||
|         self.assertEqual(int(self.Person.objects.average('age')), avg) |  | ||||||
|  |  | ||||||
|     def test_sum(self): |  | ||||||
|         """Ensure that field can be summed over correctly. |  | ||||||
|         """ |  | ||||||
|         ages = [23, 54, 12, 94, 27] |  | ||||||
|         for i, age in enumerate(ages): |  | ||||||
|             self.Person(name='test%s' % i, age=age).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) |  | ||||||
|  |  | ||||||
|         self.Person(name='ageless person').save() |  | ||||||
|         self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) |  | ||||||
|  |  | ||||||
|     def test_custom_manager(self): |  | ||||||
|         """Ensure that custom QuerySetManager instances work as expected. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|             @queryset_manager |  | ||||||
|             def music_posts(queryset): |  | ||||||
|                 return queryset(tags='music') |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(tags=['music', 'film']) |  | ||||||
|         post1.save() |  | ||||||
|         post2 = BlogPost(tags=['music']) |  | ||||||
|         post2.save() |  | ||||||
|         post3 = BlogPost(tags=['film', 'actors']) |  | ||||||
|         post3.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual([p.id for p in BlogPost.objects], |  | ||||||
|                          [post1.id, post2.id, post3.id]) |  | ||||||
|         self.assertEqual([p.id for p in BlogPost.music_posts], |  | ||||||
|                          [post1.id, post2.id]) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_query_field_name(self): |  | ||||||
|         """Ensure that the correct field name is used when querying. |  | ||||||
|         """ |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField(name='commentContent') |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField(name='postTitle') |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment), |  | ||||||
|                                  name='postComments') |  | ||||||
|                                   |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         data = {'title': 'Post 1', 'comments': [Comment(content='test')]} |  | ||||||
|         BlogPost(**data).save() |  | ||||||
|  |  | ||||||
|         self.assertTrue('postTitle' in  |  | ||||||
|                         BlogPost.objects(title=data['title'])._query) |  | ||||||
|         self.assertFalse('title' in  |  | ||||||
|                          BlogPost.objects(title=data['title'])._query) |  | ||||||
|         self.assertEqual(len(BlogPost.objects(title=data['title'])), 1) |  | ||||||
|  |  | ||||||
|         self.assertTrue('postComments.commentContent' in  |  | ||||||
|                         BlogPost.objects(comments__content='test')._query) |  | ||||||
|         self.assertEqual(len(BlogPost.objects(comments__content='test')), 1) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_query_value_conversion(self): |  | ||||||
|         """Ensure that query values are properly converted when necessary. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             author = ReferenceField(self.Person) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         person = self.Person(name='test', age=30) |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost(author=person) |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         # Test that query may be performed by providing a document as a value |  | ||||||
|         # while using a ReferenceField's name - the document should be  |  | ||||||
|         # converted to an DBRef, which is legal, unlike a Document object |  | ||||||
|         post_obj = BlogPost.objects(author=person).first() |  | ||||||
|         self.assertEqual(post.id, post_obj.id) |  | ||||||
|  |  | ||||||
|         # Test that lists of values work when using the 'in', 'nin' and 'all' |  | ||||||
|         post_obj = BlogPost.objects(author__in=[person]).first() |  | ||||||
|         self.assertEqual(post.id, post_obj.id) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_types_index(self): |  | ||||||
|         """Ensure that and index is used when '_types' is being used in a |  | ||||||
|         query. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             date = DateTimeField() |  | ||||||
|             meta = {'indexes': ['-date']} |  | ||||||
|  |  | ||||||
|         # Indexes are lazy so use list() to perform query |  | ||||||
|         list(BlogPost.objects) |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         self.assertTrue([('_types', 1)] in info.values()) |  | ||||||
|         self.assertTrue([('_types', 1), ('date', -1)] in info.values()) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         # _types is not used on objects where allow_inheritance is False |  | ||||||
|         list(BlogPost.objects) |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         self.assertFalse([('_types', 1)] in info.values()) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QTest(unittest.TestCase): |  | ||||||
|      |  | ||||||
|     def test_or_and(self): |  | ||||||
|         q1 = Q(name='test') |  | ||||||
|         q2 = Q(age__gte=18) |  | ||||||
|  |  | ||||||
|         query = ['(', {'name': 'test'}, '||', {'age__gte': 18}, ')'] |  | ||||||
|         self.assertEqual((q1 | q2).query, query) |  | ||||||
|  |  | ||||||
|         query = ['(', {'name': 'test'}, '&&', {'age__gte': 18}, ')'] |  | ||||||
|         self.assertEqual((q1 & q2).query, query) |  | ||||||
|  |  | ||||||
|         query = ['(', '(', {'name': 'test'}, '&&', {'age__gte': 18}, ')', '||', |  | ||||||
|                  {'name': 'example'}, ')'] |  | ||||||
|         self.assertEqual((q1 & q2 | Q(name='example')).query, query) |  | ||||||
|  |  | ||||||
|     def test_item_query_as_js(self): |  | ||||||
|         """Ensure that the _item_query_as_js utilitiy method works properly. |  | ||||||
|         """ |  | ||||||
|         q = Q() |  | ||||||
|         examples = [ |  | ||||||
|             ({'name': 'test'}, 'this.name == i0f0', {'i0f0': 'test'}), |  | ||||||
|             ({'age': {'$gt': 18}}, 'this.age > i0f0o0', {'i0f0o0': 18}), |  | ||||||
|             ({'name': 'test', 'age': {'$gt': 18, '$lte': 65}},  |  | ||||||
|              'this.age <= i0f0o0 && this.age > i0f0o1 && this.name == i0f1',  |  | ||||||
|              {'i0f0o0': 65, 'i0f0o1': 18, 'i0f1': 'test'}), |  | ||||||
|         ] |  | ||||||
|         for item, js, scope in examples: |  | ||||||
|             test_scope = {} |  | ||||||
|             self.assertEqual(q._item_query_as_js(item, test_scope, 0), js) |  | ||||||
|             self.assertEqual(scope, test_scope) |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
							
								
								
									
										96
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,96 @@ | |||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.tests import query_counter | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestWarnings(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         conn = connect(db='mongoenginetest') | ||||||
|  |         self.warning_list = [] | ||||||
|  |         self.showwarning_default = warnings.showwarning | ||||||
|  |         warnings.showwarning = self.append_to_warning_list | ||||||
|  |  | ||||||
|  |     def append_to_warning_list(self, message, category, *args): | ||||||
|  |         self.warning_list.append({"message": message, | ||||||
|  |                                   "category": category}) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         # restore default handling of warnings | ||||||
|  |         warnings.showwarning = self.showwarning_default | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_future_warning(self): | ||||||
|  |         """Add FutureWarning for future allow_inhertiance default change. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class SimpleBase(Document): | ||||||
|  |             a = IntField() | ||||||
|  |  | ||||||
|  |         class InheritedClass(SimpleBase): | ||||||
|  |             b = IntField() | ||||||
|  |  | ||||||
|  |         InheritedClass() | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("InheritedClass" in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_dbref_reference_field_future_warning(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person() | ||||||
|  |         p1.parent = None | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Wilson Jr") | ||||||
|  |         p2.parent = p1 | ||||||
|  |         p2.save(cascade=False) | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("ReferenceFields will default to using ObjectId" | ||||||
|  |                         in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_document_save_cascade_future_warning(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="Wilson Snr") | ||||||
|  |         p1.parent = None | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Wilson Jr") | ||||||
|  |         p2.parent = p1 | ||||||
|  |         p2.parent.name = "Poppa Wilson" | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("Cascading saves will default to off in 0.8" | ||||||
|  |                         in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_document_collection_syntax_warning(self): | ||||||
|  |  | ||||||
|  |         class NonAbstractBase(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class InheritedDocumentFailTest(NonAbstractBase): | ||||||
|  |             meta = {'collection': 'fail'} | ||||||
|  |  | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(SyntaxWarning, warning["category"]) | ||||||
|  |         self.assertEqual('non_abstract_base', | ||||||
|  |                          InheritedDocumentFailTest._get_collection_name()) | ||||||
							
								
								
									
										98
									
								
								tests/test_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								tests/test_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | |||||||
|  | import datetime | ||||||
|  | import pymongo | ||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | import mongoengine.connection | ||||||
|  |  | ||||||
|  | from bson.tz_util import utc | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db, get_connection, ConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         mongoengine.connection._connection_settings = {} | ||||||
|  |         mongoengine.connection._connections = {} | ||||||
|  |         mongoengine.connection._dbs = {} | ||||||
|  |  | ||||||
|  |     def test_connect(self): | ||||||
|  |         """Ensure that the connect() method works properly. | ||||||
|  |         """ | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |  | ||||||
|  |         conn = get_connection() | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest2', alias='testdb') | ||||||
|  |         conn = get_connection('testdb') | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |     def test_connect_uri(self): | ||||||
|  |         """Ensure that the connect() method works properly with uri's | ||||||
|  |         """ | ||||||
|  |         c = connect(db='mongoenginetest', alias='admin') | ||||||
|  |         c.admin.system.users.remove({}) | ||||||
|  |         c.mongoenginetest.system.users.remove({}) | ||||||
|  |  | ||||||
|  |         c.admin.add_user("admin", "password") | ||||||
|  |         c.admin.authenticate("admin", "password") | ||||||
|  |         c.mongoenginetest.add_user("username", "password") | ||||||
|  |  | ||||||
|  |         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||||
|  |  | ||||||
|  |         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') | ||||||
|  |  | ||||||
|  |         conn = get_connection() | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|  |     def test_register_connection(self): | ||||||
|  |         """Ensure that connections with different aliases may be registered. | ||||||
|  |         """ | ||||||
|  |         register_connection('testdb', 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |         self.assertRaises(ConnectionError, get_connection) | ||||||
|  |         conn = get_connection('testdb') | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db('testdb') | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |     def test_connection_kwargs(self): | ||||||
|  |         """Ensure that connection kwargs get passed to pymongo. | ||||||
|  |         """ | ||||||
|  |         connect('mongoenginetest', alias='t1', tz_aware=True) | ||||||
|  |         conn = get_connection('t1') | ||||||
|  |  | ||||||
|  |         self.assertTrue(conn.tz_aware) | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest2', alias='t2') | ||||||
|  |         conn = get_connection('t2') | ||||||
|  |         self.assertFalse(conn.tz_aware) | ||||||
|  |  | ||||||
|  |     def test_datetime(self): | ||||||
|  |         connect('mongoenginetest', tz_aware=True) | ||||||
|  |         d = datetime.datetime(2010, 5, 5, tzinfo=utc) | ||||||
|  |  | ||||||
|  |         class DateDoc(Document): | ||||||
|  |             the_date = DateTimeField(required=True) | ||||||
|  |  | ||||||
|  |         DateDoc.drop_collection() | ||||||
|  |         DateDoc(the_date=d).save() | ||||||
|  |  | ||||||
|  |         date_doc = DateDoc.objects.first() | ||||||
|  |         self.assertEqual(d, date_doc.the_date) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										992
									
								
								tests/test_dereference.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										992
									
								
								tests/test_dereference.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,992 @@ | |||||||
|  | from __future__ import with_statement | ||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from bson import DBRef | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  | from mongoengine.tests import query_counter | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FieldTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|  |     def test_list_item_dereference(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_list_item_dereference_dref_false(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |  | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_handle_old_style_references(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=True)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 26): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group._get_collection().find_one() | ||||||
|  |  | ||||||
|  |         # Update the model to change the reference | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         group.members.append(User(name="String!").save()) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         self.assertEqual(group.members[0].name, 'user 1') | ||||||
|  |         self.assertEqual(group.members[-1].name, 'String!') | ||||||
|  |  | ||||||
|  |     def test_migrate_references(self): | ||||||
|  |         """Example of migrating ReferenceField storage | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         # Create some sample data | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=True) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=True)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         user = User(name="Ross").save() | ||||||
|  |         group = Group(author=user, members=[user]).save() | ||||||
|  |  | ||||||
|  |         raw_data = Group._get_collection().find_one() | ||||||
|  |         self.assertTrue(isinstance(raw_data['author'], DBRef)) | ||||||
|  |         self.assertTrue(isinstance(raw_data['members'][0], DBRef)) | ||||||
|  |  | ||||||
|  |         # Migrate the model definition | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=False) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         # Migrate the data | ||||||
|  |         for g in Group.objects(): | ||||||
|  |             g.author = g.author | ||||||
|  |             g.members = g.members | ||||||
|  |             g.save() | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         self.assertEqual(group.author, user) | ||||||
|  |         self.assertEqual(group.members, [user]) | ||||||
|  |  | ||||||
|  |         raw_data = Group._get_collection().find_one() | ||||||
|  |         self.assertTrue(isinstance(raw_data['author'], basestring)) | ||||||
|  |         self.assertTrue(isinstance(raw_data['members'][0], basestring)) | ||||||
|  |  | ||||||
|  |     def test_recursive_reference(self): | ||||||
|  |         """Ensure that ReferenceFields can reference their own documents. | ||||||
|  |         """ | ||||||
|  |         class Employee(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             boss = ReferenceField('self') | ||||||
|  |             friends = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |         Employee.drop_collection() | ||||||
|  |  | ||||||
|  |         bill = Employee(name='Bill Lumbergh') | ||||||
|  |         bill.save() | ||||||
|  |  | ||||||
|  |         michael = Employee(name='Michael Bolton') | ||||||
|  |         michael.save() | ||||||
|  |  | ||||||
|  |         samir = Employee(name='Samir Nagheenanajar') | ||||||
|  |         samir.save() | ||||||
|  |  | ||||||
|  |         friends = [michael, samir] | ||||||
|  |         peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) | ||||||
|  |         peter.save() | ||||||
|  |  | ||||||
|  |         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() | ||||||
|  |         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() | ||||||
|  |         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             peter = Employee.objects.with_id(peter.id) | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             peter.boss | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             peter.friends | ||||||
|  |             self.assertEqual(q, 3) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             peter = Employee.objects.with_id(peter.id).select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             self.assertEqual(peter.boss, bill) | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             self.assertEqual(peter.friends, friends) | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             employees = Employee.objects(boss=bill).select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for employee in employees: | ||||||
|  |                 self.assertEqual(employee.boss, bill) | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |                 self.assertEqual(employee.friends, friends) | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |     def test_circular_reference(self): | ||||||
|  |         """Ensure you can handle circular references | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             relations = ListField(EmbeddedDocumentField('Relation')) | ||||||
|  |  | ||||||
|  |             def __repr__(self): | ||||||
|  |                 return "<Person: %s>" % self.name | ||||||
|  |  | ||||||
|  |         class Relation(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             person = ReferenceField('Person') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         mother = Person(name="Mother") | ||||||
|  |         daughter = Person(name="Daughter") | ||||||
|  |  | ||||||
|  |         mother.save() | ||||||
|  |         daughter.save() | ||||||
|  |  | ||||||
|  |         daughter_rel = Relation(name="Daughter", person=daughter) | ||||||
|  |         mother.relations.append(daughter_rel) | ||||||
|  |         mother.save() | ||||||
|  |  | ||||||
|  |         mother_rel = Relation(name="Daughter", person=mother) | ||||||
|  |         self_rel = Relation(name="Self", person=daughter) | ||||||
|  |         daughter.relations.append(mother_rel) | ||||||
|  |         daughter.relations.append(self_rel) | ||||||
|  |         daughter.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||||
|  |  | ||||||
|  |     def test_circular_reference_on_self(self): | ||||||
|  |         """Ensure you can handle circular references | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             relations = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |             def __repr__(self): | ||||||
|  |                 return "<Person: %s>" % self.name | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         mother = Person(name="Mother") | ||||||
|  |         daughter = Person(name="Daughter") | ||||||
|  |  | ||||||
|  |         mother.save() | ||||||
|  |         daughter.save() | ||||||
|  |  | ||||||
|  |         mother.relations.append(daughter) | ||||||
|  |         mother.save() | ||||||
|  |  | ||||||
|  |         daughter.relations.append(mother) | ||||||
|  |         daughter.relations.append(daughter) | ||||||
|  |         daughter.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||||
|  |  | ||||||
|  |     def test_circular_tree_reference(self): | ||||||
|  |         """Ensure you can handle circular references with more than one level | ||||||
|  |         """ | ||||||
|  |         class Other(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             friends = ListField(ReferenceField('Person')) | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             other = EmbeddedDocumentField(Other, default=lambda: Other()) | ||||||
|  |  | ||||||
|  |             def __repr__(self): | ||||||
|  |                 return "<Person: %s>" % self.name | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         paul = Person(name="Paul") | ||||||
|  |         paul.save() | ||||||
|  |         maria = Person(name="Maria") | ||||||
|  |         maria.save() | ||||||
|  |         julia = Person(name='Julia') | ||||||
|  |         julia.save() | ||||||
|  |         anna = Person(name='Anna') | ||||||
|  |         anna.save() | ||||||
|  |  | ||||||
|  |         paul.other.friends = [maria, julia, anna] | ||||||
|  |         paul.other.name = "Paul's friends" | ||||||
|  |         paul.save() | ||||||
|  |  | ||||||
|  |         maria.other.friends = [paul, julia, anna] | ||||||
|  |         maria.other.name = "Maria's friends" | ||||||
|  |         maria.save() | ||||||
|  |  | ||||||
|  |         julia.other.friends = [paul, maria, anna] | ||||||
|  |         julia.other.name = "Julia's friends" | ||||||
|  |         julia.save() | ||||||
|  |  | ||||||
|  |         anna.other.friends = [paul, maria, julia] | ||||||
|  |         anna.other.name = "Anna's friends" | ||||||
|  |         anna.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", | ||||||
|  |             "%s" % Person.objects() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_generic_reference(self): | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserB(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserC(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(GenericReferenceField()) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             b = UserB(name='User B %s' % i) | ||||||
|  |             b.save() | ||||||
|  |  | ||||||
|  |             c = UserC(name='User C %s' % i) | ||||||
|  |             c.save() | ||||||
|  |  | ||||||
|  |             members += [a, b, c] | ||||||
|  |  | ||||||
|  |         group = Group(members=members) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=members) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for m in group_obj.members: | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for m in group_obj.members: | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 for m in group_obj.members: | ||||||
|  |                     self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_list_field_complex(self): | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserB(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserC(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField() | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             b = UserB(name='User B %s' % i) | ||||||
|  |             b.save() | ||||||
|  |  | ||||||
|  |             c = UserC(name='User C %s' % i) | ||||||
|  |             c.save() | ||||||
|  |  | ||||||
|  |             members += [a, b, c] | ||||||
|  |  | ||||||
|  |         group = Group(members=members) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=members) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for m in group_obj.members: | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for m in group_obj.members: | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 for m in group_obj.members: | ||||||
|  |                     self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_map_field_reference(self): | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = MapField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |             members.append(user) | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue(isinstance(m, User)) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue(isinstance(m, User)) | ||||||
|  |  | ||||||
|  |        # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |                 for k, m in group_obj.members.iteritems(): | ||||||
|  |                     self.assertTrue(isinstance(m, User)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_dict_field(self): | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserB(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserC(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = DictField() | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             b = UserB(name='User B %s' % i) | ||||||
|  |             b.save() | ||||||
|  |  | ||||||
|  |             c = UserC(name='User C %s' % i) | ||||||
|  |             c.save() | ||||||
|  |  | ||||||
|  |             members += [a, b, c] | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 for k, m in group_obj.members.iteritems(): | ||||||
|  |                     self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         Group.objects.delete() | ||||||
|  |         Group().save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |             self.assertEqual(group_obj.members, {}) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_dict_field_no_field_inheritance(self): | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = DictField() | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             members += [a] | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue(isinstance(m, UserA)) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue(isinstance(m, UserA)) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |                 for k, m in group_obj.members.iteritems(): | ||||||
|  |                     self.assertTrue(isinstance(m, UserA)) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_generic_reference_map_field(self): | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserB(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserC(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = MapField(GenericReferenceField()) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             b = UserB(name='User B %s' % i) | ||||||
|  |             b.save() | ||||||
|  |  | ||||||
|  |             c = UserC(name='User C %s' % i) | ||||||
|  |             c.save() | ||||||
|  |  | ||||||
|  |             members += [a, b, c] | ||||||
|  |  | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for k, m in group_obj.members.iteritems(): | ||||||
|  |                 self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|  |                 for k, m in group_obj.members.iteritems(): | ||||||
|  |                     self.assertTrue('User' in m.__class__.__name__) | ||||||
|  |  | ||||||
|  |         Group.objects.delete() | ||||||
|  |         Group().save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_multidirectional_lists(self): | ||||||
|  |  | ||||||
|  |         class Asset(Document): | ||||||
|  |             name = StringField(max_length=250, required=True) | ||||||
|  |             parent = GenericReferenceField(default=None) | ||||||
|  |             parents = ListField(GenericReferenceField()) | ||||||
|  |             children = ListField(GenericReferenceField()) | ||||||
|  |  | ||||||
|  |         Asset.drop_collection() | ||||||
|  |  | ||||||
|  |         root = Asset(name='', path="/", title="Site Root") | ||||||
|  |         root.save() | ||||||
|  |  | ||||||
|  |         company = Asset(name='company', title='Company', parent=root, parents=[root]) | ||||||
|  |         company.save() | ||||||
|  |  | ||||||
|  |         root.children = [company] | ||||||
|  |         root.save() | ||||||
|  |  | ||||||
|  |         root = root.reload() | ||||||
|  |         self.assertEqual(root.children, [company]) | ||||||
|  |         self.assertEqual(company.parents, [root]) | ||||||
|  |  | ||||||
|  |     def test_dict_in_dbref_instance(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(max_length=250, required=True) | ||||||
|  |  | ||||||
|  |         class Room(Document): | ||||||
|  |             number = StringField(max_length=250, required=True) | ||||||
|  |             staffs_with_position = ListField(DictField()) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Room.drop_collection() | ||||||
|  |  | ||||||
|  |         bob = Person.objects.create(name='Bob') | ||||||
|  |         bob.save() | ||||||
|  |         sarah = Person.objects.create(name='Sarah') | ||||||
|  |         sarah.save() | ||||||
|  |  | ||||||
|  |         room_101 = Room.objects.create(number="101") | ||||||
|  |         room_101.staffs_with_position = [ | ||||||
|  |             {'position_key': 'window', 'staff': sarah}, | ||||||
|  |             {'position_key': 'door', 'staff': bob.to_dbref()}] | ||||||
|  |         room_101.save() | ||||||
|  |  | ||||||
|  |         room = Room.objects.first().select_related() | ||||||
|  |         self.assertEqual(room.staffs_with_position[0]['staff'], sarah) | ||||||
|  |         self.assertEqual(room.staffs_with_position[1]['staff'], bob) | ||||||
|  |  | ||||||
|  |     def test_document_reload_no_inheritance(self): | ||||||
|  |         class Foo(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             bar = ReferenceField('Bar') | ||||||
|  |             baz = ReferenceField('Baz') | ||||||
|  |  | ||||||
|  |         class Bar(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Blammo!') | ||||||
|  |  | ||||||
|  |         class Baz(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Kaboom!') | ||||||
|  |  | ||||||
|  |         Foo.drop_collection() | ||||||
|  |         Bar.drop_collection() | ||||||
|  |         Baz.drop_collection() | ||||||
|  |  | ||||||
|  |         bar = Bar() | ||||||
|  |         bar.save() | ||||||
|  |         baz = Baz() | ||||||
|  |         baz.save() | ||||||
|  |         foo = Foo() | ||||||
|  |         foo.bar = bar | ||||||
|  |         foo.baz = baz | ||||||
|  |         foo.save() | ||||||
|  |         foo.reload() | ||||||
|  |  | ||||||
|  |         self.assertEqual(type(foo.bar), Bar) | ||||||
|  |         self.assertEqual(type(foo.baz), Baz) | ||||||
|  |  | ||||||
|  |     def test_list_lookup_not_checked_in_map(self): | ||||||
|  |         """Ensure we dereference list data correctly | ||||||
|  |         """ | ||||||
|  |         class Comment(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             text = StringField() | ||||||
|  |  | ||||||
|  |         class Message(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             comments = ListField(ReferenceField(Comment)) | ||||||
|  |  | ||||||
|  |         Comment.drop_collection() | ||||||
|  |         Message.drop_collection() | ||||||
|  |  | ||||||
|  |         c1 = Comment(id=0, text='zero').save() | ||||||
|  |         c2 = Comment(id=1, text='one').save() | ||||||
|  |         Message(id=1, comments=[c1, c2]).save() | ||||||
|  |  | ||||||
|  |         msg = Message.objects.get(id=1) | ||||||
|  |         self.assertEqual(0, msg.comments[0].id) | ||||||
|  |         self.assertEqual(1, msg.comments[1].id) | ||||||
							
								
								
									
										122
									
								
								tests/test_django.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								tests/test_django.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,122 @@ | |||||||
|  | from __future__ import with_statement | ||||||
|  | import unittest | ||||||
|  | from nose.plugins.skip import SkipTest | ||||||
|  | from mongoengine.python_support import PY3 | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from mongoengine.django.shortcuts import get_document_or_404 | ||||||
|  |  | ||||||
|  |     from django.http import Http404 | ||||||
|  |     from django.template import Context, Template | ||||||
|  |     from django.conf import settings | ||||||
|  |     from django.core.paginator import Paginator | ||||||
|  |  | ||||||
|  |     settings.configure() | ||||||
|  |  | ||||||
|  |     from django.contrib.sessions.tests import SessionTestsMixin | ||||||
|  |     from mongoengine.django.sessions import SessionStore, MongoSession | ||||||
|  | except Exception, err: | ||||||
|  |     if PY3: | ||||||
|  |         SessionTestsMixin = type  # dummy value so no error | ||||||
|  |         SessionStore = None  # dummy value so no error | ||||||
|  |     else: | ||||||
|  |         raise err | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QuerySetTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def test_order_by_in_django_template(self): | ||||||
|  |         """Ensure that QuerySets are properly ordered in Django template. | ||||||
|  |         """ | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |  | ||||||
|  |         self.Person(name="A", age=20).save() | ||||||
|  |         self.Person(name="D", age=10).save() | ||||||
|  |         self.Person(name="B", age=40).save() | ||||||
|  |         self.Person(name="C", age=30).save() | ||||||
|  |  | ||||||
|  |         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") | ||||||
|  |  | ||||||
|  |         d = {"ol": self.Person.objects.order_by('-name')} | ||||||
|  |         self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') | ||||||
|  |         d = {"ol": self.Person.objects.order_by('+name')} | ||||||
|  |         self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') | ||||||
|  |         d = {"ol": self.Person.objects.order_by('-age')} | ||||||
|  |         self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') | ||||||
|  |         d = {"ol": self.Person.objects.order_by('+age')} | ||||||
|  |         self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') | ||||||
|  |  | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_q_object_filter_in_template(self): | ||||||
|  |  | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |  | ||||||
|  |         self.Person(name="A", age=20).save() | ||||||
|  |         self.Person(name="D", age=10).save() | ||||||
|  |         self.Person(name="B", age=40).save() | ||||||
|  |         self.Person(name="C", age=30).save() | ||||||
|  |  | ||||||
|  |         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") | ||||||
|  |  | ||||||
|  |         d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} | ||||||
|  |         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') | ||||||
|  |  | ||||||
|  |         # Check double rendering doesn't throw an error | ||||||
|  |         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') | ||||||
|  |  | ||||||
|  |     def test_get_document_or_404(self): | ||||||
|  |         p = self.Person(name="G404") | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') | ||||||
|  |         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) | ||||||
|  |  | ||||||
|  |     def test_pagination(self): | ||||||
|  |         """Ensure that Pagination works as expected | ||||||
|  |         """ | ||||||
|  |         class Page(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Page.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 11): | ||||||
|  |             Page(name=str(i)).save() | ||||||
|  |  | ||||||
|  |         paginator = Paginator(Page.objects.all(), 2) | ||||||
|  |  | ||||||
|  |         t = Template("{% for i in page.object_list  %}{{ i.name }}:{% endfor %}") | ||||||
|  |         for p in paginator.page_range: | ||||||
|  |             d = {"page": paginator.page(p)} | ||||||
|  |             end = p * 2 | ||||||
|  |             start = end - 1 | ||||||
|  |             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||||
|  |     backend = SessionStore | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         MongoSession.drop_collection() | ||||||
|  |         super(MongoDBSessionTest, self).setUp() | ||||||
|  |  | ||||||
|  |     def test_first_save(self): | ||||||
|  |         session = SessionStore() | ||||||
|  |         session['test'] = True | ||||||
|  |         session.save() | ||||||
|  |         self.assertTrue('test' in session) | ||||||
							
								
								
									
										3435
									
								
								tests/test_document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3435
									
								
								tests/test_document.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										533
									
								
								tests/test_dynamic_document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										533
									
								
								tests/test_dynamic_document.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,533 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicDocTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def test_simple_dynamic_document(self): | ||||||
|  |         """Ensures simple dynamic documents are saved correctly""" | ||||||
|  |  | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "James" | ||||||
|  |         p.age = 34 | ||||||
|  |  | ||||||
|  |         self.assertEqual(p.to_mongo(), | ||||||
|  |             {"_types": ["Person"], "_cls": "Person", | ||||||
|  |              "name": "James", "age": 34} | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(self.Person.objects.first().age, 34) | ||||||
|  |  | ||||||
|  |         # Confirm no changes to self.Person | ||||||
|  |         self.assertFalse(hasattr(self.Person, 'age')) | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_delta(self): | ||||||
|  |         """Ensures simple dynamic documents can delta correctly""" | ||||||
|  |         p = self.Person(name="James", age=34) | ||||||
|  |         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) | ||||||
|  |  | ||||||
|  |         p.doc = 123 | ||||||
|  |         del(p.doc) | ||||||
|  |         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) | ||||||
|  |  | ||||||
|  |     def test_change_scope_of_variable(self): | ||||||
|  |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertEqual(p.misc, {'hello': 'world'}) | ||||||
|  |  | ||||||
|  |     def test_delete_dynamic_field(self): | ||||||
|  |         """Test deleting a dynamic field works""" | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertEqual(p.misc, {'hello': 'world'}) | ||||||
|  |         collection = self.db[self.Person._get_collection_name()] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) | ||||||
|  |  | ||||||
|  |         del(p.misc) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertFalse(hasattr(p, 'misc')) | ||||||
|  |  | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_queries(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, self.Person.objects(age=22).count()) | ||||||
|  |         p = self.Person.objects(age=22) | ||||||
|  |         p = p.get() | ||||||
|  |         self.assertEqual(22, p.age) | ||||||
|  |  | ||||||
|  |     def test_complex_dynamic_document_queries(self): | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(name="test") | ||||||
|  |         p.age = "ten" | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="test1") | ||||||
|  |         p1.age = "less then ten and a half" | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="test2") | ||||||
|  |         p2.age = 10 | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) | ||||||
|  |         self.assertEqual(Person.objects(age__gte=10).count(), 1) | ||||||
|  |  | ||||||
|  |     def test_complex_data_lookups(self): | ||||||
|  |         """Ensure you can query dynamic document dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||||
|  |  | ||||||
|  |     def test_inheritance(self): | ||||||
|  |         """Ensure that dynamic document plays nice with inheritance""" | ||||||
|  |         class Employee(self.Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         Employee.drop_collection() | ||||||
|  |  | ||||||
|  |         self.assertTrue('name' in Employee._fields) | ||||||
|  |         self.assertTrue('salary' in Employee._fields) | ||||||
|  |         self.assertEqual(Employee._get_collection_name(), | ||||||
|  |                          self.Person._get_collection_name()) | ||||||
|  |  | ||||||
|  |         joe_bloggs = Employee() | ||||||
|  |         joe_bloggs.name = "Joe Bloggs" | ||||||
|  |         joe_bloggs.salary = 10 | ||||||
|  |         joe_bloggs.age = 20 | ||||||
|  |         joe_bloggs.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, self.Person.objects(age=20).count()) | ||||||
|  |         self.assertEqual(1, Employee.objects(age=20).count()) | ||||||
|  |  | ||||||
|  |         joe_bloggs = self.Person.objects.first() | ||||||
|  |         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||||
|  |  | ||||||
|  |     def test_embedded_dynamic_document(self): | ||||||
|  |         """Test dynamic embedded documents""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": ['1', 2, {'hello': 'world'}] | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||||
|  |         self.assertEqual(doc.embedded_field.int_field, 1) | ||||||
|  |         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |     def test_complex_embedded_documents(self): | ||||||
|  |         """Test complex dynamic embedded documents setups""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = 'hello' | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |  | ||||||
|  |         embedded_1.list_field = ['1', 2, embedded_2] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": ['1', 2, | ||||||
|  |                     {"_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                     "list_field": ['1', 2, {'hello': 'world'}]} | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         doc.save() | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||||
|  |         self.assertEqual(doc.embedded_field.int_field, 1) | ||||||
|  |         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||||
|  |  | ||||||
|  |         embedded_field = doc.embedded_field.list_field[2] | ||||||
|  |  | ||||||
|  |         self.assertEqual(embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEqual(embedded_field.string_field, "hello") | ||||||
|  |         self.assertEqual(embedded_field.int_field, 1) | ||||||
|  |         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |     def test_delta_for_dynamic_documents(self): | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p.age = 24 | ||||||
|  |         self.assertEqual(p.age, 24) | ||||||
|  |         self.assertEqual(p._get_changed_fields(), ['age']) | ||||||
|  |         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|  |         p = self.Person.objects(age=22).get() | ||||||
|  |         p.age = 24 | ||||||
|  |         self.assertEqual(p.age, 24) | ||||||
|  |         self.assertEqual(p._get_changed_fields(), ['age']) | ||||||
|  |         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |         self.assertEqual(1, self.Person.objects(age=24).count()) | ||||||
|  |  | ||||||
|  |     def test_delta(self): | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), []) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|  |         doc.string_field = 'hello' | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive(self): | ||||||
|  |         """Testing deltaing works with dynamic documents""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), []) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             'string_field': 'hello', | ||||||
|  |             'int_field': 1, | ||||||
|  |             'dict_field': {'hello': 'world'}, | ||||||
|  |             'list_field': ['1', 2, {'hello': 'world'}] | ||||||
|  |         } | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||||
|  |         embedded_delta.update({ | ||||||
|  |             '_types': ['Embedded'], | ||||||
|  |             '_cls': 'Embedded', | ||||||
|  |         }) | ||||||
|  |         self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field']) | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = 'hello' | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), ({ | ||||||
|  |             'list_field': ['1', 2, { | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                 '_types': ['Embedded'], | ||||||
|  |                 'string_field': 'hello', | ||||||
|  |                 'dict_field': {'hello': 'world'}, | ||||||
|  |                 'int_field': 1, | ||||||
|  |                 'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             }] | ||||||
|  |         }, {})) | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc._delta(), ({ | ||||||
|  |             'embedded_field.list_field': ['1', 2, { | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                  '_types': ['Embedded'], | ||||||
|  |                  'string_field': 'hello', | ||||||
|  |                  'dict_field': {'hello': 'world'}, | ||||||
|  |                  'int_field': 1, | ||||||
|  |                  'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             }] | ||||||
|  |         }, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, []) | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = 'world' | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) | ||||||
|  |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEqual(doc.embedded_field._delta(), ({ | ||||||
|  |             'list_field': ['1', 2, { | ||||||
|  |             '_types': ['Embedded'], | ||||||
|  |             '_cls': 'Embedded', | ||||||
|  |             'string_field': 'hello world', | ||||||
|  |             'int_field': 1, | ||||||
|  |             'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             'dict_field': {'hello': 'world'}}]}, {})) | ||||||
|  |         self.assertEqual(doc._delta(), ({ | ||||||
|  |             'embedded_field.list_field': ['1', 2, { | ||||||
|  |                 '_types': ['Embedded'], | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                 'string_field': 'hello world', | ||||||
|  |                 'int_field': 1, | ||||||
|  |                 'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |                 'dict_field': {'hello': 'world'}} | ||||||
|  |             ]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         del(doc.embedded_field.list_field[2].list_field) | ||||||
|  |         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.dict_field = {'embedded': embedded_1} | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.dict_field['embedded'].string_field = 'Hello World' | ||||||
|  |         self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field']) | ||||||
|  |         self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) | ||||||
|  |  | ||||||
|  |     def test_indexes(self): | ||||||
|  |         """Ensure that indexes are used when meta[indexes] is specified. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(DynamicDocument): | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': [ | ||||||
|  |                     '-date', | ||||||
|  |                     ('category', '-date') | ||||||
|  |                 ], | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         # _id, '-date', ('cat', 'date') | ||||||
|  |         # NB: there is no index on _types by itself, since | ||||||
|  |         # the indices on -date and tags will both contain | ||||||
|  |         # _types as first element in the key | ||||||
|  |         self.assertEqual(len(info), 3) | ||||||
|  |  | ||||||
|  |         # Indexes are lazy so use list() to perform query | ||||||
|  |         list(BlogPost.objects) | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|  |         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] | ||||||
|  |                         in info) | ||||||
|  |         self.assertTrue([('_types', 1), ('date', -1)] in info) | ||||||
|  |  | ||||||
|  |     def test_dynamic_and_embedded(self): | ||||||
|  |         """Ensure embedded documents play nicely""" | ||||||
|  |  | ||||||
|  |         class Address(EmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(name="Ross", address=Address(city="London")).save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address.city = "Lundenne" | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address = Address(city="Londinium") | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.age = 35 | ||||||
|  |         person.save() | ||||||
|  |         self.assertEqual(Person.objects.first().age, 35) | ||||||
							
								
								
									
										2303
									
								
								tests/test_fields.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2303
									
								
								tests/test_fields.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										3648
									
								
								tests/test_queryset.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3648
									
								
								tests/test_queryset.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										33
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | import pymongo | ||||||
|  | from pymongo import ReadPreference, ReplicaSetConnection | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db, get_connection, ConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         mongoengine.connection._connection_settings = {} | ||||||
|  |         mongoengine.connection._connections = {} | ||||||
|  |         mongoengine.connection._dbs = {} | ||||||
|  |  | ||||||
|  |     def test_replicaset_uri_passes_read_preference(self): | ||||||
|  |         """Requires a replica set called "rs" on port 27017 | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) | ||||||
|  |         except ConnectionError, e: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if not isinstance(conn, ReplicaSetConnection): | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										230
									
								
								tests/test_signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										230
									
								
								tests/test_signals.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,230 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine import signals | ||||||
|  |  | ||||||
|  | signal_output = [] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SignalTests(unittest.TestCase): | ||||||
|  |     """ | ||||||
|  |     Testing signals before/after saving and deleting. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def get_signal_output(self, fn, *args, **kwargs): | ||||||
|  |         # Flush any existing signal output | ||||||
|  |         global signal_output | ||||||
|  |         signal_output = [] | ||||||
|  |         fn(*args, **kwargs) | ||||||
|  |         return signal_output | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         class Author(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             def __unicode__(self): | ||||||
|  |                 return self.name | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_init(cls, sender, document, *args, **kwargs): | ||||||
|  |                 signal_output.append('pre_init signal, %s' % cls.__name__) | ||||||
|  |                 signal_output.append(str(kwargs['values'])) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_init(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_init signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_save(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('pre_save signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_save(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_save signal, %s' % document) | ||||||
|  |                 if 'created' in kwargs: | ||||||
|  |                     if kwargs['created']: | ||||||
|  |                         signal_output.append('Is created') | ||||||
|  |                     else: | ||||||
|  |                         signal_output.append('Is updated') | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_delete(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('pre_delete signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_delete(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_delete signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_bulk_insert(cls, sender, documents, **kwargs): | ||||||
|  |                 signal_output.append('pre_bulk_insert signal, %s' % documents) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_bulk_insert(cls, sender, documents, **kwargs): | ||||||
|  |                 signal_output.append('post_bulk_insert signal, %s' % documents) | ||||||
|  |                 if kwargs.get('loaded', False): | ||||||
|  |                     signal_output.append('Is loaded') | ||||||
|  |                 else: | ||||||
|  |                     signal_output.append('Not loaded') | ||||||
|  |         self.Author = Author | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         class Another(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             def __unicode__(self): | ||||||
|  |                 return self.name | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_init(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('pre_init Another signal, %s' % cls.__name__) | ||||||
|  |                 signal_output.append(str(kwargs['values'])) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_init(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_init Another signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_save(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('pre_save Another signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_save(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_save Another signal, %s' % document) | ||||||
|  |                 if 'created' in kwargs: | ||||||
|  |                     if kwargs['created']: | ||||||
|  |                         signal_output.append('Is created') | ||||||
|  |                     else: | ||||||
|  |                         signal_output.append('Is updated') | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_delete(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('pre_delete Another signal, %s' % document) | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def post_delete(cls, sender, document, **kwargs): | ||||||
|  |                 signal_output.append('post_delete Another signal, %s' % document) | ||||||
|  |  | ||||||
|  |         self.Another = Another | ||||||
|  |         # Save up the number of connected signals so that we can check at the end | ||||||
|  |         # that all the signals we register get properly unregistered | ||||||
|  |         self.pre_signals = ( | ||||||
|  |             len(signals.pre_init.receivers), | ||||||
|  |             len(signals.post_init.receivers), | ||||||
|  |             len(signals.pre_save.receivers), | ||||||
|  |             len(signals.post_save.receivers), | ||||||
|  |             len(signals.pre_delete.receivers), | ||||||
|  |             len(signals.post_delete.receivers), | ||||||
|  |             len(signals.pre_bulk_insert.receivers), | ||||||
|  |             len(signals.post_bulk_insert.receivers), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         signals.pre_init.connect(Author.pre_init, sender=Author) | ||||||
|  |         signals.post_init.connect(Author.post_init, sender=Author) | ||||||
|  |         signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|  |         signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |         signals.pre_delete.connect(Author.pre_delete, sender=Author) | ||||||
|  |         signals.post_delete.connect(Author.post_delete, sender=Author) | ||||||
|  |         signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) | ||||||
|  |         signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) | ||||||
|  |  | ||||||
|  |         signals.pre_init.connect(Another.pre_init, sender=Another) | ||||||
|  |         signals.post_init.connect(Another.post_init, sender=Another) | ||||||
|  |         signals.pre_save.connect(Another.pre_save, sender=Another) | ||||||
|  |         signals.post_save.connect(Another.post_save, sender=Another) | ||||||
|  |         signals.pre_delete.connect(Another.pre_delete, sender=Another) | ||||||
|  |         signals.post_delete.connect(Another.post_delete, sender=Another) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         signals.pre_init.disconnect(self.Author.pre_init) | ||||||
|  |         signals.post_init.disconnect(self.Author.post_init) | ||||||
|  |         signals.post_delete.disconnect(self.Author.post_delete) | ||||||
|  |         signals.pre_delete.disconnect(self.Author.pre_delete) | ||||||
|  |         signals.post_save.disconnect(self.Author.post_save) | ||||||
|  |         signals.pre_save.disconnect(self.Author.pre_save) | ||||||
|  |         signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) | ||||||
|  |         signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) | ||||||
|  |  | ||||||
|  |         signals.pre_init.disconnect(self.Another.pre_init) | ||||||
|  |         signals.post_init.disconnect(self.Another.post_init) | ||||||
|  |         signals.post_delete.disconnect(self.Another.post_delete) | ||||||
|  |         signals.pre_delete.disconnect(self.Another.pre_delete) | ||||||
|  |         signals.post_save.disconnect(self.Another.post_save) | ||||||
|  |         signals.pre_save.disconnect(self.Another.pre_save) | ||||||
|  |  | ||||||
|  |         # Check that all our signals got disconnected properly. | ||||||
|  |         post_signals = ( | ||||||
|  |             len(signals.pre_init.receivers), | ||||||
|  |             len(signals.post_init.receivers), | ||||||
|  |             len(signals.pre_save.receivers), | ||||||
|  |             len(signals.post_save.receivers), | ||||||
|  |             len(signals.pre_delete.receivers), | ||||||
|  |             len(signals.post_delete.receivers), | ||||||
|  |             len(signals.pre_bulk_insert.receivers), | ||||||
|  |             len(signals.post_bulk_insert.receivers), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.assertEqual(self.pre_signals, post_signals) | ||||||
|  |  | ||||||
|  |     def test_model_signals(self): | ||||||
|  |         """ Model saves should throw some signals. """ | ||||||
|  |  | ||||||
|  |         def create_author(): | ||||||
|  |             a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |  | ||||||
|  |         def bulk_create_author_with_load(): | ||||||
|  |             a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |             self.Author.objects.insert([a1], load_bulk=True) | ||||||
|  |  | ||||||
|  |         def bulk_create_author_without_load(): | ||||||
|  |             a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |             self.Author.objects.insert([a1], load_bulk=False) | ||||||
|  |  | ||||||
|  |         self.assertEqual(self.get_signal_output(create_author), [ | ||||||
|  |             "pre_init signal, Author", | ||||||
|  |             "{'name': 'Bill Shakespeare'}", | ||||||
|  |             "post_init signal, Bill Shakespeare", | ||||||
|  |         ]) | ||||||
|  |  | ||||||
|  |         a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |         self.assertEqual(self.get_signal_output(a1.save), [ | ||||||
|  |             "pre_save signal, Bill Shakespeare", | ||||||
|  |             "post_save signal, Bill Shakespeare", | ||||||
|  |             "Is created" | ||||||
|  |         ]) | ||||||
|  |  | ||||||
|  |         a1.reload() | ||||||
|  |         a1.name='William Shakespeare' | ||||||
|  |         self.assertEqual(self.get_signal_output(a1.save), [ | ||||||
|  |             "pre_save signal, William Shakespeare", | ||||||
|  |             "post_save signal, William Shakespeare", | ||||||
|  |             "Is updated" | ||||||
|  |         ]) | ||||||
|  |  | ||||||
|  |         self.assertEqual(self.get_signal_output(a1.delete), [ | ||||||
|  |             'pre_delete signal, William Shakespeare', | ||||||
|  |             'post_delete signal, William Shakespeare', | ||||||
|  |         ]) | ||||||
|  |  | ||||||
|  |         signal_output = self.get_signal_output(bulk_create_author_with_load) | ||||||
|  |  | ||||||
|  |         # The output of this signal is not entirely deterministic. The reloaded | ||||||
|  |         # object will have an object ID. Hence, we only check part of the output | ||||||
|  |         self.assertEqual(signal_output[3], | ||||||
|  |             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") | ||||||
|  |         self.assertEqual(signal_output[-2:], | ||||||
|  |             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |              "Is loaded",]) | ||||||
|  |  | ||||||
|  |         self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ | ||||||
|  |             "pre_init signal, Author", | ||||||
|  |             "{'name': 'Bill Shakespeare'}", | ||||||
|  |             "post_init signal, Bill Shakespeare", | ||||||
|  |             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |             "post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |             "Not loaded", | ||||||
|  |         ]) | ||||||
|  |  | ||||||
|  |         self.Author.objects.delete() | ||||||
		Reference in New Issue
	
	Block a user