Compare commits
	
		
			2846 Commits
		
	
	
		
			v0.6.1
			...
			bagerard-p
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | d73ca6f90d | ||
|  | e6c2169f76 | ||
|  | 1d17dc4663 | ||
|  | eeac3bd2e6 | ||
|  | 3f5a15d236 | ||
|  | 338c40b5d5 | ||
|  | fc3ccf9606 | ||
|  | 746faceb5c | ||
|  | eb56fb9bda | ||
|  | 161493c0d2 | ||
|  | cb9f329d11 | ||
|  | 03af784ebe | ||
|  | e5f6e4584a | ||
|  | 79f9f223d0 | ||
|  | 0bc18cd6e1 | ||
|  | 30a3c6a5b7 | ||
|  | 90c5d83f84 | ||
|  | d8b8ff6851 | ||
|  | ee664f0c90 | ||
|  | 94a7e813b1 | ||
|  | 8ef7213426 | ||
|  | 2f4464ead5 | ||
|  | 89b93461ac | ||
|  | 9e40f3ae83 | ||
|  | f4962fbc40 | ||
|  | c9d53ca5d5 | ||
|  | 65f50fd713 | ||
|  | bf1d04e399 | ||
|  | 5a8e5e5a40 | ||
|  | f3919dd839 | ||
|  | 9f82a02ddf | ||
|  | 015a36c85f | ||
|  | fbd3388a59 | ||
|  | d8a52d68c5 | ||
|  | 4286708e2e | ||
|  | e362d089e1 | ||
|  | 6b657886a5 | ||
|  | eb16945147 | ||
|  | 38047ca992 | ||
|  | c801e79d4b | ||
|  | 3fca3739de | ||
|  | c218c8bb6c | ||
|  | 0bbc05995a | ||
|  | 3adb67901b | ||
|  | d4350e7da4 | ||
|  | 4665658145 | ||
|  | 0d289fd5a1 | ||
|  | aabc18755c | ||
|  | 1f2a5db016 | ||
|  | ff40f66291 | ||
|  | 7f77084e0e | ||
|  | aca4de728e | ||
|  | 9e7ca43cad | ||
|  | 7116dec74a | ||
|  | a5302b870b | ||
|  | 604e9974b6 | ||
|  | 3e1c83f8fa | ||
|  | e431e27cb2 | ||
|  | 4f188655d0 | ||
|  | 194b0cac88 | ||
|  | 7b4175fc5c | ||
|  | adb5f74ddb | ||
|  | 107a1c34c8 | ||
|  | dc7da5204f | ||
|  | 0301bca176 | ||
|  | 49f9bca23b | ||
|  | 31498bd7dd | ||
|  | 1698f398eb | ||
|  | 4275c2d7b7 | ||
|  | 22bff8566d | ||
|  | d8657be320 | ||
|  | 3db9d58dac | ||
|  | 3fbe9c3cdd | ||
|  | 130e9c519c | ||
|  | 78c9e9745d | ||
|  | 38ebb5abf4 | ||
|  | 9b73be26ab | ||
|  | fd0095b73f | ||
|  | 226049f66a | ||
|  | dc1cf88ca6 | ||
|  | f5f8b730b5 | ||
|  | e8f6b42316 | ||
|  | 49b0d73654 | ||
|  | 394da67cf1 | ||
|  | ef7da36ac6 | ||
|  | 1312100bc7 | ||
|  | 4085bc2152 | ||
|  | f4d7e72426 | ||
|  | ece63ad071 | ||
|  | a9550b8243 | ||
|  | 43724e40b2 | ||
|  | 1bfa40e926 | ||
|  | d493f71c4e | ||
|  | 87f4d1a323 | ||
|  | 0a0e6114f5 | ||
|  | 41d36fa3bf | ||
|  | 707923e3f5 | ||
|  | d9b9581df2 | ||
|  | 463e7c66af | ||
|  | 2be28a22a7 | ||
|  | d73f0bb1af | ||
|  | ce74978b1e | ||
|  | 2b0157aecd | ||
|  | f49baf5d90 | ||
|  | 7cc964c7d8 | ||
|  | bc77322c2f | ||
|  | 8913a74a86 | ||
|  | af35b25d15 | ||
|  | 476b07af6e | ||
|  | e2b9a02531 | ||
|  | 6cc6229066 | ||
|  | 4c62a060f0 | ||
|  | 3d80637fa4 | ||
|  | 68be9fe979 | ||
|  | 547cd4a3ae | ||
|  | ee2d50b2d1 | ||
|  | 15c3ddece8 | ||
|  | beaa9744b7 | ||
|  | 8eb51790b5 | ||
|  | aadc6262ed | ||
|  | 00ae6298d4 | ||
|  | ad0669a326 | ||
|  | 85df76c623 | ||
|  | 87512246cb | ||
|  | a3f9016ae9 | ||
|  | 4e58e9f8d1 | ||
|  | 7c533394fd | ||
|  | 333e014f13 | ||
|  | c0c0efce18 | ||
|  | beabaee345 | ||
|  | c937af3919 | ||
|  | aa4a6ae023 | ||
|  | b57946ec98 | ||
|  | 1e110a2c41 | ||
|  | b234aa48e4 | ||
|  | 8086576677 | ||
|  | 03e34299f0 | ||
|  | 421e3f324f | ||
|  | a0b803959c | ||
|  | ff4d57032a | ||
|  | ba34589065 | ||
|  | a4d11eef46 | ||
|  | fda2e2b47a | ||
|  | d287f480e5 | ||
|  | d85f0e6226 | ||
|  | cfb4943986 | ||
|  | b453a96211 | ||
|  | 81f9b351b3 | ||
|  | 4bca3de42f | ||
|  | 235b1a3679 | ||
|  | 450658d7ac | ||
|  | 8e17e42e26 | ||
|  | 2d6a4c4b90 | ||
|  | 38703acc29 | ||
|  | 095217e797 | ||
|  | 86e965f854 | ||
|  | 57db68dc04 | ||
|  | 72de6d67c7 | ||
|  | b2c3acd025 | ||
|  | 605de59bd0 | ||
|  | e0565ddac5 | ||
|  | 18b68f1b80 | ||
|  | ea88806630 | ||
|  | 412bed0f6d | ||
|  | 53cf26b9af | ||
|  | d738462139 | ||
|  | 2fa48cd9e5 | ||
|  | e64a7a9448 | ||
|  | 9490ad2bf7 | ||
|  | 84f3dce492 | ||
|  | 60c42dddd5 | ||
|  | f93f9406ee | ||
|  | 705c55ce24 | ||
|  | 928770c43a | ||
|  | 59fbd505a0 | ||
|  | 1cc20c9770 | ||
|  | f8f267a880 | ||
|  | 80ea1f6883 | ||
|  | 75ee282a3d | ||
|  | 4edad4601c | ||
|  | 152b51fd33 | ||
|  | 66a0fca4ad | ||
|  | e7c7a66cd1 | ||
|  | b3dbb87c3c | ||
|  | 3d45538998 | ||
|  | 8df9d3fef9 | ||
|  | 99e660c66d | ||
|  | aa02f87b69 | ||
|  | f0d1ee2cb4 | ||
|  | ca4967311d | ||
|  | 65eb6ab611 | ||
|  | 1cb2f7814c | ||
|  | b5485b16e6 | ||
|  | 62c8597a3b | ||
|  | 488604ff2e | ||
|  | bd88a17b8e | ||
|  | 8e892dccfe | ||
|  | c22eb34017 | ||
|  | dcf3edb03e | ||
|  | c85b59d3b5 | ||
|  | 1170de1e8e | ||
|  | 332bd767d4 | ||
|  | 0053b30237 | ||
|  | d44533d956 | ||
|  | 12d8bd5a22 | ||
|  | ae326678ec | ||
|  | 8d31f165c0 | ||
|  | cfd4d6a161 | ||
|  | 329f030a41 | ||
|  | 68dc2925fb | ||
|  | 0d4e61d489 | ||
|  | dc7b96a569 | ||
|  | 50882e5bb0 | ||
|  | 280a73af3b | ||
|  | d8c0631dab | ||
|  | 9166ba91d7 | ||
|  | 6bc4e602bb | ||
|  | 45a7520fc3 | ||
|  | 64c0cace85 | ||
|  | 82af5e4a19 | ||
|  | 7e0ba1b335 | ||
|  | 44b7f792fe | ||
|  | a3e432eb68 | ||
|  | 009f9a2b14 | ||
|  | 2ca905b6e5 | ||
|  | 3b099f936a | ||
|  | 4d6ddb070e | ||
|  | b205314424 | ||
|  | e83132f32c | ||
|  | 1b38309d70 | ||
|  | 6e8196d475 | ||
|  | 90fecc56dd | ||
|  | d3d7f0e670 | ||
|  | 37ffeafeff | ||
|  | abc159b7b9 | ||
|  | 648b28876d | ||
|  | 5b9f2bac87 | ||
|  | 17151f67c2 | ||
|  | 5f14d958ac | ||
|  | bd6c52e025 | ||
|  | cb77bb6b69 | ||
|  | 78b240b740 | ||
|  | 7e30f00178 | ||
|  | 35310dbc73 | ||
|  | af82c07acc | ||
|  | 3f75f30f26 | ||
|  | f7f0e10d4d | ||
|  | 091238a2cf | ||
|  | 0458ef869e | ||
|  | 0bf08db7b9 | ||
|  | d3420918cd | ||
|  | 138e759161 | ||
|  | f1d6ce7d12 | ||
|  | ff749a7a0a | ||
|  | bff78ca8dd | ||
|  | 81647d67a0 | ||
|  | d8924ed892 | ||
|  | 799cdafae6 | ||
|  | bc0c55e49a | ||
|  | c61c6a8525 | ||
|  | 3e764d068c | ||
|  | ac25f4b98b | ||
|  | aa6ff8c84a | ||
|  | 37ca79e9c5 | ||
|  | 6040b4b494 | ||
|  | 51ea3e3c6f | ||
|  | 5a16dda50d | ||
|  | bbfa978861 | ||
|  | 54ca7bf09f | ||
|  | 8bf5370b6c | ||
|  | ecefa05e03 | ||
|  | e013494fb2 | ||
|  | 4853f74dbf | ||
|  | 6f45ee6813 | ||
|  | c60ed32f3a | ||
|  | 178851589d | ||
|  | 5bcc679194 | ||
|  | 1e17b5ac66 | ||
|  | 19f12f3f2f | ||
|  | 71e8d9a490 | ||
|  | e3cd553f82 | ||
|  | b61c8cd104 | ||
|  | 8f288fe458 | ||
|  | 02a920feea | ||
|  | be2c4f2b3c | ||
|  | 7ac74b1c1f | ||
|  | 933cb1d5c7 | ||
|  | 6203e30152 | ||
|  | 7d94af0e31 | ||
|  | 564a2b5f1e | ||
|  | 1dbe7a3163 | ||
|  | 47f8a126ca | ||
|  | 693195f70b | ||
|  | 2267b7e7d7 | ||
|  | a06e605e67 | ||
|  | 47c67ecc99 | ||
|  | 4c4b7cbeae | ||
|  | ddececbfea | ||
|  | 71a6f3d1a4 | ||
|  | e86cf962e9 | ||
|  | 99a58d5c91 | ||
|  | eecbb5ca90 | ||
|  | fbb3bf869c | ||
|  | b887ea9623 | ||
|  | c68e3e1238 | ||
|  | c5080e4030 | ||
|  | 0d01365751 | ||
|  | f4a06ad65d | ||
|  | 05a22d5a54 | ||
|  | 2424ece0c5 | ||
|  | 2d02551d0a | ||
|  | ac416aeeb3 | ||
|  | d09af430e8 | ||
|  | 79454b5eed | ||
|  | 921c1fa412 | ||
|  | 1aba145bc6 | ||
|  | 290d9df3eb | ||
|  | aa76ccdd25 | ||
|  | abe8070c36 | ||
|  | 2d28c258fd | ||
|  | 1338839b52 | ||
|  | 058203a0ec | ||
|  | 8fdf664968 | ||
|  | 50555ec73e | ||
|  | 951a532a9f | ||
|  | e940044603 | ||
|  | babfbb0fcd | ||
|  | bbed312bdd | ||
|  | b593764ded | ||
|  | 483c840fc8 | ||
|  | de80f0ccff | ||
|  | d0b87f7f82 | ||
|  | bf32d3c39a | ||
|  | bc14f2cdaa | ||
|  | 06a21e038a | ||
|  | 4d5eba317e | ||
|  | d37a30e083 | ||
|  | 9170eea784 | ||
|  | 2769967e1e | ||
|  | 609f50d261 | ||
|  | 82f0eb1cbc | ||
|  | b47669403b | ||
|  | 91899acfe5 | ||
|  | ffedd33101 | ||
|  | c9ed930606 | ||
|  | af292b0ec2 | ||
|  | 1ead7f9b2b | ||
|  | 5c91877b69 | ||
|  | e57d834a0d | ||
|  | 0578cdb62e | ||
|  | b661afba01 | ||
|  | b1002dd4f9 | ||
|  | 8e69008699 | ||
|  | f45552f8f8 | ||
|  | a4fe091a51 | ||
|  | 216217e2c6 | ||
|  | 799775b3a7 | ||
|  | ae0384df29 | ||
|  | 8f57279dc7 | ||
|  | e8dbd12f22 | ||
|  | ca230d28b4 | ||
|  | c96065b187 | ||
|  | 2abcf4764d | ||
|  | 6a4c342e45 | ||
|  | bb0b1e88ef | ||
|  | 63c9135184 | ||
|  | 7fac0ef961 | ||
|  | 5a2e268160 | ||
|  | a4e4e8f440 | ||
|  | b62ce947a6 | ||
|  | 9538662262 | ||
|  | 09d7ae4f80 | ||
|  | d7ded366c7 | ||
|  | 09c77973a0 | ||
|  | 22f3c70234 | ||
|  | 6527b1386f | ||
|  | baabf97acd | ||
|  | 97005aca66 | ||
|  | 6e8ea50c19 | ||
|  | 1fcd706e11 | ||
|  | 008bb19b0b | ||
|  | 023acab779 | ||
|  | 68e8584520 | ||
|  | 5d120ebca0 | ||
|  | f91b89f723 | ||
|  | 1181b75e16 | ||
|  | 5f00b4f923 | ||
|  | 4c31193b82 | ||
|  | 17fc9d1886 | ||
|  | d7285d43dd | ||
|  | aa8a991d20 | ||
|  | 40ba51ac43 | ||
|  | d20430a778 | ||
|  | f08f749cd9 | ||
|  | a6c04f4f9a | ||
|  | 15b6c1590f | ||
|  | 4a8985278d | ||
|  | 996618a495 | ||
|  | 1f02d5fbbd | ||
|  | c58b9f00f0 | ||
|  | f131b18cbe | ||
|  | 118a998138 | ||
|  | 7ad6f036e7 | ||
|  | 1d29b824a8 | ||
|  | 3caf2dce28 | ||
|  | 1fc5b954f2 | ||
|  | 31d99c0bd2 | ||
|  | 0ac59c67ea | ||
|  | 8e8c74c621 | ||
|  | f996f3df74 | ||
|  | 9499c97e18 | ||
|  | c1c81fc07b | ||
|  | 072e86a2f0 | ||
|  | 70d6e763b0 | ||
|  | 15f4d4fee6 | ||
|  | 82e28dec43 | ||
|  | b407c0e6c6 | ||
|  | 27ea01ee05 | ||
|  | 7ed5829b2c | ||
|  | 5bf1dd55b1 | ||
|  | 36aebffcc0 | ||
|  | 84c42ed58c | ||
|  | 9634e44343 | ||
|  | 048a045966 | ||
|  | a18c8c0eb4 | ||
|  | 5fb0f46e3f | ||
|  | 962997ed16 | ||
|  | daca0ebc14 | ||
|  | 9ae8fe7c2d | ||
|  | 1907133f99 | ||
|  | 4334955e39 | ||
|  | f00c9dc4d6 | ||
|  | 7d0687ec73 | ||
|  | da3773bfe8 | ||
|  | 6e1c132ee8 | ||
|  | 24ba35d76f | ||
|  | 64b63e9d52 | ||
|  | 7848a82a1c | ||
|  | 6a843cc8b2 | ||
|  | ecdb0785a4 | ||
|  | 9a55caed75 | ||
|  | 2e01eb87db | ||
|  | 597b962ad5 | ||
|  | 7531f533e0 | ||
|  | 6b9d71554e | ||
|  | bb1089e03d | ||
|  | c82f0c937d | ||
|  | 00d2fd685a | ||
|  | f28e1b8c90 | ||
|  | 2b17985a11 | ||
|  | b392e3102e | ||
|  | 58b0b18ddd | ||
|  | 6a9ef319d0 | ||
|  | cf38ef70cb | ||
|  | ac64ade10f | ||
|  | ee85af34d8 | ||
|  | 9d53ad53e5 | ||
|  | 9cdc3ebee6 | ||
|  | 14a5e05d64 | ||
|  | f7b7d0f79e | ||
|  | d98f36ceff | ||
|  | abfabc30c9 | ||
|  | c1aff7a248 | ||
|  | e44f71eeb1 | ||
|  | cb578c84e2 | ||
|  | 565e1dc0ed | ||
|  | b1e28d02f7 | ||
|  | d1467c2f73 | ||
|  | c439150431 | ||
|  | 9bb3dfd639 | ||
|  | 4caa58b9ec | ||
|  | b5213097e8 | ||
|  | 61081651e4 | ||
|  | 4ccfdf051d | ||
|  | 9f2a9d9cda | ||
|  | 827de76345 | ||
|  | fdcaca42ae | ||
|  | 0744892244 | ||
|  | b70ffc69df | ||
|  | 73b12cc32f | ||
|  | ba6a37f315 | ||
|  | 6f8be8c8ac | ||
|  | 68497542b3 | ||
|  | 3d762fed10 | ||
|  | 48b849c031 | ||
|  | 88c4aa2d87 | ||
|  | fb8c0d8fe3 | ||
|  | 1a863725d1 | ||
|  | 7b4245c91c | ||
|  | 9bd0d6b99d | ||
|  | b640c766db | ||
|  | 50ffa8014e | ||
|  | 7ef688b256 | ||
|  | b4fe0b35e4 | ||
|  | a2cbbdf819 | ||
|  | 35b7efe3f4 | ||
|  | 7cea2a768f | ||
|  | 7247b9b68e | ||
|  | dca837b843 | ||
|  | c60c2ee8d0 | ||
|  | 3cdb5b5db2 | ||
|  | b9cc8a4ca9 | ||
|  | 28606e9985 | ||
|  | 5bbe782812 | ||
|  | d65861cdf7 | ||
|  | c8df3fd2a7 | ||
|  | 6cfe6652a3 | ||
|  | 6b711da69d | ||
|  | 9b02867293 | ||
|  | 595cb99b2d | ||
|  | f0a3445250 | ||
|  | 6d353dae1e | ||
|  | 57a38282a9 | ||
|  | db47604865 | ||
|  | 2a121fe202 | ||
|  | 36baff0d7f | ||
|  | 201f3008b1 | ||
|  | f4873fee18 | ||
|  | e02261be6d | ||
|  | 2919e6765c | ||
|  | b8fc4d0079 | ||
|  | 4a46f5f095 | ||
|  | 3484ceabb8 | ||
|  | cab659dce6 | ||
|  | a657f29439 | ||
|  | 4c054bf316 | ||
|  | dc7922c38b | ||
|  | c6c68abfcc | ||
|  | 6aacb0c898 | ||
|  | e7000db491 | ||
|  | fce994ea7f | ||
|  | 6c6446765e | ||
|  | 69a99c70c6 | ||
|  | 56d9f7a8af | ||
|  | 363aefe399 | ||
|  | 7fd4f792ba | ||
|  | 6fbdde63d8 | ||
|  | b04dc90cdf | ||
|  | b525c91bd3 | ||
|  | a32c893078 | ||
|  | 2c6a744848 | ||
|  | 4492874d08 | ||
|  | d3a592e5bf | ||
|  | cab21b1b21 | ||
|  | 1319e422ea | ||
|  | c88ea40b57 | ||
|  | 3194a37fcb | ||
|  | 72ebaa52e9 | ||
|  | 0e00695fc7 | ||
|  | 48a691e722 | ||
|  | cf54d6d6f8 | ||
|  | a03fe234d0 | ||
|  | d88d40cc08 | ||
|  | d3b4af116e | ||
|  | 352b23331b | ||
|  | bdd6041a5c | ||
|  | 1894003f8a | ||
|  | 220513ae42 | ||
|  | fcbabbe357 | ||
|  | 3627969fce | ||
|  | 8807c0dbef | ||
|  | 23cc9f6ff8 | ||
|  | e50799e9c4 | ||
|  | b92c4844eb | ||
|  | c306d42d08 | ||
|  | e31558318e | ||
|  | 78a9420f26 | ||
|  | b47c5b5bfc | ||
|  | 28a312accf | ||
|  | 611094e92e | ||
|  | 2a8579a6a5 | ||
|  | 47577f2f47 | ||
|  | 34e3e45843 | ||
|  | 364dc9ddfb | ||
|  | 23324f0f87 | ||
|  | 17fa9a3b77 | ||
|  | 424b3ca308 | ||
|  | 26e2fc8fd4 | ||
|  | 8e18484898 | ||
|  | 354cfe0f9c | ||
|  | 983474b2bd | ||
|  | 14d861bcbb | ||
|  | f6cd349a16 | ||
|  | 8e1c4dec87 | ||
|  | 18b47e4a73 | ||
|  | 4f157f50ed | ||
|  | f44a2f4857 | ||
|  | c685ace327 | ||
|  | f23b0faf41 | ||
|  | e0e2ca7ccd | ||
|  | 83fe7f7eef | ||
|  | 1feaa8f2e9 | ||
|  | 598d6bf4c5 | ||
|  | 0afd5a40d6 | ||
|  | 26b70e9ed3 | ||
|  | a1a93a4bdd | ||
|  | 4939a7dd7c | ||
|  | 0fa6610fdb | ||
|  | b0148e7860 | ||
|  | 59a06a242d | ||
|  | ffe902605d | ||
|  | 556f7e85fc | ||
|  | 45c86be402 | ||
|  | bf34f413de | ||
|  | 9b022b187f | ||
|  | c3409d64dc | ||
|  | 3c5c3b5026 | ||
|  | f240f00d84 | ||
|  | 68c7764c63 | ||
|  | adfb039ba6 | ||
|  | 89416d9856 | ||
|  | 9b6c972e0f | ||
|  | 55fc04752a | ||
|  | 96f0919633 | ||
|  | 17b140baf4 | ||
|  | 45c2151d0f | ||
|  | 1887f5b7e7 | ||
|  | 708d1c7a32 | ||
|  | acf8c3015a | ||
|  | f83ae5789b | ||
|  | 57ccfcfc1b | ||
|  | dd0fdcfdd4 | ||
|  | 5c805be067 | ||
|  | e423380d7f | ||
|  | 4d8bebc917 | ||
|  | 4314fa883f | ||
|  | d6e39b362b | ||
|  | f89214f9cf | ||
|  | d17cac8210 | ||
|  | aa49283fa9 | ||
|  | e79ea7a2cf | ||
|  | 8a1d280f19 | ||
|  | 6a8eb9562f | ||
|  | 8f76e1e344 | ||
|  | 7b9f084e6b | ||
|  | 5b1693a908 | ||
|  | fd7c00da49 | ||
|  | 7fc5ced3af | ||
|  | a86092fb64 | ||
|  | 003827e916 | ||
|  | b15673c525 | ||
|  | 00363303b1 | ||
|  | 48fbe890f8 | ||
|  | 4179877cc7 | ||
|  | 282b83ac08 | ||
|  | 193656e71b | ||
|  | a25d127f36 | ||
|  | cf9df548ca | ||
|  | f29b93c762 | ||
|  | 032ace40d1 | ||
|  | f74dd1cb3c | ||
|  | 29889d1e35 | ||
|  | d6d19c4229 | ||
|  | ab08e67eaf | ||
|  | 00bf6ac258 | ||
|  | b65478e7d9 | ||
|  | e83b529f1c | ||
|  | 408274152b | ||
|  | 8ff82996fb | ||
|  | d59c4044b7 | ||
|  | 3574e21e4f | ||
|  | 5a091956ef | ||
|  | 14e9c58444 | ||
|  | bfe5b03c69 | ||
|  | f96f7f840e | ||
|  | a3bcf26dce | ||
|  | a7852a89cc | ||
|  | 1b0c761fc0 | ||
|  | 5e4e8d4eda | ||
|  | bd524d2e1e | ||
|  | 60fe919992 | ||
|  | b90063b170 | ||
|  | d9fce49b08 | ||
|  | 5dbee2a270 | ||
|  | 4779106139 | ||
|  | bf2de81873 | ||
|  | 28cdedc9aa | ||
|  | 7e90571404 | ||
|  | 42bbe63927 | ||
|  | 7ddbea697e | ||
|  | b4860de34d | ||
|  | 576f23d5fb | ||
|  | 86548fc7bf | ||
|  | b3b4d992fe | ||
|  | d72daf5f39 | ||
|  | 9ad959a478 | ||
|  | cc00a321da | ||
|  | de74273108 | ||
|  | a7658c7573 | ||
|  | 48a85ee6e0 | ||
|  | 461b789515 | ||
|  | b71ff6fbb8 | ||
|  | 1bcdcce93a | ||
|  | c09bfca634 | ||
|  | 36c5f02bfb | ||
|  | eae6e5d9a1 | ||
|  | 364813dd73 | ||
|  | 1a2b1f283b | ||
|  | a0e5cf4ecc | ||
|  | 820f7b4d93 | ||
|  | 727866f090 | ||
|  | 3d45cdc339 | ||
|  | 02a557aa67 | ||
|  | 6da27e5976 | ||
|  | 19a6e324c4 | ||
|  | 62eadbc174 | ||
|  | ae783d4f45 | ||
|  | 1241a902e3 | ||
|  | fdba648afb | ||
|  | b070e7de07 | ||
|  | d0741946c7 | ||
|  | 080226dd72 | ||
|  | 3cb6a5cfac | ||
|  | 758971e068 | ||
|  | 8739ab9c66 | ||
|  | e8e47c39d7 | ||
|  | 446c101018 | ||
|  | 3654591a1b | ||
|  | 7fb1c9dd35 | ||
|  | 0fffaccdf4 | ||
|  | 5902b241f9 | ||
|  | 784386fddc | ||
|  | d424583cbf | ||
|  | 290b821a3a | ||
|  | a0dfa8d421 | ||
|  | ceb00f6748 | ||
|  | 9bd328e147 | ||
|  | 6fb5c312c3 | ||
|  | 3f9ff7254f | ||
|  | f7a3acfaf4 | ||
|  | e4451ccaf8 | ||
|  | 2adb640821 | ||
|  | 765038274c | ||
|  | 2cbdced974 | ||
|  | fc5d9ae100 | ||
|  | 506168ab83 | ||
|  | 088fd6334b | ||
|  | 94cda90a6e | ||
|  | 78601d90c9 | ||
|  | fa4ac95ecc | ||
|  | dd4d4e23ad | ||
|  | acba86993d | ||
|  | 0fc55451c2 | ||
|  | 5c0bd8a810 | ||
|  | 1aebc95145 | ||
|  | 1d3f20b666 | ||
|  | eb2e106871 | ||
|  | f9a887c8c6 | ||
|  | 67ab810cb2 | ||
|  | 3e0d84383e | ||
|  | d245ea3eaa | ||
|  | 843fc03bf4 | ||
|  | c83c635067 | ||
|  | f605eb14e8 | ||
|  | fd02d77c59 | ||
|  | 0da8fb379d | ||
|  | 257a43298b | ||
|  | a2d3bcd571 | ||
|  | d4142c2cdd | ||
|  | e50d66b303 | ||
|  | 08b6433843 | ||
|  | 8cd536aab5 | ||
|  | 2b495c648f | ||
|  | 06048b6d71 | ||
|  | bb22287336 | ||
|  | a45942a966 | ||
|  | 85d621846d | ||
|  | 534acf8df2 | ||
|  | 5a6d4387ea | ||
|  | 317e844886 | ||
|  | b1f62a2735 | ||
|  | 65e4fea4ef | ||
|  | faca8512c5 | ||
|  | 2121387aa2 | ||
|  | 72c4444a60 | ||
|  | 2d8d2e7e6f | ||
|  | 49bff5d544 | ||
|  | 806a80cef1 | ||
|  | c6f0d5e478 | ||
|  | bf30aba005 | ||
|  | 727778b730 | ||
|  | b081ffce50 | ||
|  | e46779f87b | ||
|  | dabe8c1bb7 | ||
|  | 4042f88bd8 | ||
|  | a0947d0c54 | ||
|  | a34fd9ac89 | ||
|  | aa68322641 | ||
|  | 2d76aebb8e | ||
|  | 7cc1d23bc7 | ||
|  | 0bd2103a8c | ||
|  | 7d8916b6e9 | ||
|  | 8b5df3ca17 | ||
|  | ffdfe99d37 | ||
|  | 7efa67e7e6 | ||
|  | d69808c204 | ||
|  | de360c61dd | ||
|  | 6b04ddfad1 | ||
|  | 0d854ce906 | ||
|  | 38fdf26405 | ||
|  | 6835c15d9b | ||
|  | fa38bfd4e8 | ||
|  | 4d5c6d11ab | ||
|  | 9e80da705a | ||
|  | 9b04391f82 | ||
|  | 8f6c0796e3 | ||
|  | 326fcf4398 | ||
|  | fdda27abd1 | ||
|  | 7e8c62104a | ||
|  | fb213f6e74 | ||
|  | 22e75c1691 | ||
|  | 919f221be9 | ||
|  | da7d64667e | ||
|  | d19c6a1573 | ||
|  | 5cd23039a0 | ||
|  | 19b18d3d0a | ||
|  | 101947da8b | ||
|  | d3c3c23630 | ||
|  | abc14316ea | ||
|  | b66621f9c6 | ||
|  | aa5510531d | ||
|  | 12b846586c | ||
|  | b705f5b743 | ||
|  | 18a5fba42b | ||
|  | b5a3b6f86a | ||
|  | 00f2eda576 | ||
|  | c70d252dc3 | ||
|  | 2f088ce29e | ||
|  | ff408c604b | ||
|  | 6621c318db | ||
|  | 22a8ad2fde | ||
|  | 7674dc9b34 | ||
|  | 9e0ca51c2f | ||
|  | 961629d156 | ||
|  | 2cbebf9c99 | ||
|  | 08a4deca17 | ||
|  | ce9ea7baad | ||
|  | b35efb9f72 | ||
|  | c45dfacb41 | ||
|  | 91152a7977 | ||
|  | 0ce081323f | ||
|  | 79486e3393 | ||
|  | 60758dd76b | ||
|  | e74f659015 | ||
|  | c1c09fa6b4 | ||
|  | 47c7cb9327 | ||
|  | 4d6256e1a1 | ||
|  | 13180d92e3 | ||
|  | 6b38ef3c9f | ||
|  | 4f5b0634ad | ||
|  | ea25972257 | ||
|  | b6168898ec | ||
|  | da33cb54fe | ||
|  | 35d0458228 | ||
|  | e6c0280b40 | ||
|  | 15451ff42b | ||
|  | 9ab856e186 | ||
|  | 6e2db1ced6 | ||
|  | 5c4ce8754e | ||
|  | 416486c370 | ||
|  | 2f075be6f8 | ||
|  | a1494c4c93 | ||
|  | d79ab5ffeb | ||
|  | 01526a7b37 | ||
|  | 091a02f737 | ||
|  | aa4996ef28 | ||
|  | 2f4e2bde6b | ||
|  | e90f6a2fa3 | ||
|  | be8f1b9fdd | ||
|  | ba99190f53 | ||
|  | 70088704e2 | ||
|  | 02733e6e58 | ||
|  | 44732a5dd9 | ||
|  | 5bdd35464b | ||
|  | 1eae97731f | ||
|  | 0325a62f18 | ||
|  | 3a5538813c | ||
|  | 1f1b4b95ce | ||
|  | 8c3ed57ecc | ||
|  | dc8a64fa7d | ||
|  | 0d1e72a764 | ||
|  | 9b3fe09508 | ||
|  | 7c0cfb1da2 | ||
|  | 66429ce331 | ||
|  | bce859569f | ||
|  | 425fb8905b | ||
|  | 4f59c7f77f | ||
|  | 21d1faa793 | ||
|  | b9f3991d03 | ||
|  | c4de879b20 | ||
|  | ee5686e91a | ||
|  | 2a795e9138 | ||
|  | 9a6aa8f8c6 | ||
|  | 3794b181d5 | ||
|  | f09256a24e | ||
|  | 34fca9d6f5 | ||
|  | 433f10ef93 | ||
|  | 9f02f71c52 | ||
|  | 3dcc9bc143 | ||
|  | 7311895894 | ||
|  | a7cab51369 | ||
|  | 437b11af9a | ||
|  | 820b5cbb86 | ||
|  | e6a30f899c | ||
|  | 0bc6507df3 | ||
|  | 71c3c632d7 | ||
|  | 99a5f2cd9d | ||
|  | fb00b79d19 | ||
|  | 7782aa7379 | ||
|  | f3ee4a5dac | ||
|  | a8d6e59a7a | ||
|  | 1d4b1870cf | ||
|  | f63ad2dd69 | ||
|  | 6903eed4e7 | ||
|  | b9e922c658 | ||
|  | 54d8c64ad5 | ||
|  | 2f1fe5468e | ||
|  | 24d15d4274 | ||
|  | 0bc7aa52d8 | ||
|  | e52603b4a7 | ||
|  | 3b88712402 | ||
|  | 33e9ef2106 | ||
|  | 689fe4ed9a | ||
|  | b82d026f39 | ||
|  | 009059def4 | ||
|  | 03ff61d113 | ||
|  | c00914bea2 | ||
|  | 944d1c0a4a | ||
|  | 2cf23e33e3 | ||
|  | e2a0b42d03 | ||
|  | 894e9818ac | ||
|  | de18e256ce | ||
|  | 1a3c70ce1b | ||
|  | bd4a603e16 | ||
|  | 358b80d782 | ||
|  | 824ec42005 | ||
|  | 466935e9a3 | ||
|  | b52d3e3a7b | ||
|  | 888a6da4a5 | ||
|  | 972ac73dd9 | ||
|  | d8b238d5f1 | ||
|  | 63206c3da2 | ||
|  | 5713de8966 | ||
|  | 58f293fef3 | ||
|  | ffbb2c9689 | ||
|  | 9cd3dcdebf | ||
|  | f2fe58c3c5 | ||
|  | b78010aa94 | ||
|  | 49035543b9 | ||
|  | f9ccf635ca | ||
|  | e8ea294964 | ||
|  | 19ef2be88b | ||
|  | 30e8b8186f | ||
|  | 741643af5f | ||
|  | 6aaf9ba470 | ||
|  | 5957dc72eb | ||
|  | e32a9777d7 | ||
|  | 84a8f1eb2b | ||
|  | 6810953014 | ||
|  | 398964945a | ||
|  | 5f43c032f2 | ||
|  | 627cf90de0 | ||
|  | 2bedb36d7f | ||
|  | e93a95d0cb | ||
|  | 3f31666796 | ||
|  | 3fe8031cf3 | ||
|  | b27c7ce11b | ||
|  | ed34c2ca68 | ||
|  | 3ca2e953fb | ||
|  | d8a7328365 | ||
|  | f33cd625bf | ||
|  | 80530bb13c | ||
|  | affc12df4b | ||
|  | 4eedf00025 | ||
|  | e5acbcc0dd | ||
|  | 1b6743ee53 | ||
|  | b5fb82d95d | ||
|  | 193aa4e1f2 | ||
|  | ebd34427c7 | ||
|  | 3d75573889 | ||
|  | c6240ca415 | ||
|  | 2ee8984b44 | ||
|  | b7ec587e5b | ||
|  | 47c58bce2b | ||
|  | 96e95ac533 | ||
|  | b013a065f7 | ||
|  | 74b37d11cf | ||
|  | c6cc013617 | ||
|  | f4e1d80a87 | ||
|  | 91dad4060f | ||
|  | e07cb82c15 | ||
|  | 2770cec187 | ||
|  | 5c3928190a | ||
|  | 9f4b04ea0f | ||
|  | 96d20756ca | ||
|  | b8454c7f5b | ||
|  | c84f703f92 | ||
|  | 57c2e867d8 | ||
|  | 553f496d84 | ||
|  | b1d8aca46a | ||
|  | 8e884fd3ea | ||
|  | 76524b7498 | ||
|  | 65914fb2b2 | ||
|  | a4d0da0085 | ||
|  | c9d496e9a0 | ||
|  | 88a951ba4f | ||
|  | 403ceb19dc | ||
|  | 835d3c3d18 | ||
|  | 3135b456be | ||
|  | 0be6d3661a | ||
|  | 6f5f5b4711 | ||
|  | c6c5f85abb | ||
|  | 7b860f7739 | ||
|  | e28804c03a | ||
|  | 1b9432824b | ||
|  | 3b71a6b5c5 | ||
|  | 7ce8768c19 | ||
|  | 25e0f12976 | ||
|  | f168682a68 | ||
|  | d25058a46d | ||
|  | 4d0c092d9f | ||
|  | 15714ef855 | ||
|  | eb743beaa3 | ||
|  | 0007535a46 | ||
|  | 8391af026c | ||
|  | 800f656dcf | ||
|  | 088c5f49d9 | ||
|  | d8d98b6143 | ||
|  | 02fb3b9315 | ||
|  | 4f87db784e | ||
|  | 7e6287b925 | ||
|  | 999cdfd997 | ||
|  | 8d6cb087c6 | ||
|  | 2b7417c728 | ||
|  | 3c455cf1c1 | ||
|  | 5135185e31 | ||
|  | b461f26e5d | ||
|  | faef5b8570 | ||
|  | 0a20e04c10 | ||
|  | d19bb2308d | ||
|  | d8dd07d9ef | ||
|  | 36c56243cd | ||
|  | 23d06b79a6 | ||
|  | e4c4e923ee | ||
|  | 936d2f1f47 | ||
|  | 07018b5060 | ||
|  | ac90d6ae5c | ||
|  | 2141f2c4c5 | ||
|  | 81870777a9 | ||
|  | 845092dcad | ||
|  | dd473d1e1e | ||
|  | d2869bf4ed | ||
|  | 891a3f4b29 | ||
|  | 6767b50d75 | ||
|  | d9e4b562a9 | ||
|  | fb3243f1bc | ||
|  | 5fe1497c92 | ||
|  | 5446592d44 | ||
|  | 40ed9a53c9 | ||
|  | f7ac8cea90 | ||
|  | 4ef5d1f0cd | ||
|  | 6992615c98 | ||
|  | 43dabb2825 | ||
|  | 05e40e5681 | ||
|  | 2c4536e137 | ||
|  | 3dc81058a0 | ||
|  | bd84667a2b | ||
|  | e5b6a12977 | ||
|  | ca415d5d62 | ||
|  | 99b4fe7278 | ||
|  | 327e164869 | ||
|  | 25bc571f30 | ||
|  | 38c7e8a1d2 | ||
|  | ca282e28e0 | ||
|  | 5ef59c06df | ||
|  | 8f55d385d6 | ||
|  | cd2fc25c19 | ||
|  | 709983eea6 | ||
|  | 40e99b1b80 | ||
|  | 488684d960 | ||
|  | f35034b989 | ||
|  | 9d6f9b1f26 | ||
|  | 6148a608fb | ||
|  | 3fa9e70383 | ||
|  | 16fea6f009 | ||
|  | df9ed835ca | ||
|  | e394c8f0f2 | ||
|  | 21974f7288 | ||
|  | 5ef0170d77 | ||
|  | c21dcf14de | ||
|  | a8d20d4e1e | ||
|  | 8b307485b0 | ||
|  | 4544afe422 | ||
|  | 9d7eba5f70 | ||
|  | be0aee95f2 | ||
|  | 3469ed7ab9 | ||
|  | 1f223aa7e6 | ||
|  | 0a431ead5e | ||
|  | f750796444 | ||
|  | c82bcd882a | ||
|  | 7d0ec33b54 | ||
|  | 43d48b3feb | ||
|  | 2e406d2687 | ||
|  | 3f30808104 | ||
|  | ab10217c86 | ||
|  | 00430491ca | ||
|  | 109202329f | ||
|  | 3b1509f307 | ||
|  | 7ad7b08bed | ||
|  | 4650e5e8fb | ||
|  | af59d4929e | ||
|  | e34100bab4 | ||
|  | d9b3a9fb60 | ||
|  | 39eec59c90 | ||
|  | d651d0d472 | ||
|  | 87a2358a65 | ||
|  | cef4e313e1 | ||
|  | 7cc1a4eba0 | ||
|  | c6cc0133b3 | ||
|  | 7748e68440 | ||
|  | 6c2230a076 | ||
|  | 66b233eaea | ||
|  | fed58f3920 | ||
|  | 815b2be7f7 | ||
|  | f420c9fb7c | ||
|  | 01bdf10b94 | ||
|  | ddedc1ee92 | ||
|  | 9e9703183f | ||
|  | adce9e6220 | ||
|  | c499133bbe | ||
|  | 8f505c2dcc | ||
|  | b320064418 | ||
|  | a643933d16 | ||
|  | 2659ec5887 | ||
|  | 9f8327926d | ||
|  | 7a568dc118 | ||
|  | c946b06be5 | ||
|  | c65fd0e477 | ||
|  | 8f8217e928 | ||
|  | 6c9e1799c7 | ||
|  | decd70eb23 | ||
|  | a20d40618f | ||
|  | b4af8ec751 | ||
|  | feb5eed8a5 | ||
|  | f4fa39c70e | ||
|  | 7b7165f5d8 | ||
|  | 13897db6d3 | ||
|  | c4afdb7198 | ||
|  | 0284975f3f | ||
|  | 269e3d1303 | ||
|  | 8c81f7ece9 | ||
|  | f6e0593774 | ||
|  | 3d80e549cb | ||
|  | acc7448dc5 | ||
|  | 35d3d3de72 | ||
|  | 0372e07eb0 | ||
|  | 00221e3410 | ||
|  | 9c264611cf | ||
|  | 31d7f70e27 | ||
|  | 04e8b83d45 | ||
|  | e87bf71f20 | ||
|  | 2dd70c8d62 | ||
|  | a3886702a3 | ||
|  | 713af133a0 | ||
|  | 057ffffbf2 | ||
|  | a81d6d124b | ||
|  | 23f07fde5e | ||
|  | b42b760393 | ||
|  | bf6f4c48c0 | ||
|  | 6133f04841 | ||
|  | 3c18f79ea4 | ||
|  | 2af8342fea | ||
|  | fc3db7942d | ||
|  | 164e2b2678 | ||
|  | b7b28390df | ||
|  | a6e996d921 | ||
|  | 07e666345d | ||
|  | 007f10d29d | ||
|  | f9284d20ca | ||
|  | 9050869781 | ||
|  | 54975de0f3 | ||
|  | a7aead5138 | ||
|  | 6868f66f24 | ||
|  | 3c0b00e42d | ||
|  | 3327388f1f | ||
|  | 04497aec36 | ||
|  | aa9d596930 | ||
|  | f96e68cd11 | ||
|  | 013227323d | ||
|  | 19cbb442ee | ||
|  | c0e7f341cb | ||
|  | 0a1ba7c434 | ||
|  | b708dabf98 | ||
|  | 899e56e5b8 | ||
|  | f6d3bd8ccb | ||
|  | deb5677a57 | ||
|  | 5c464c3f5a | ||
|  | cceef33fef | ||
|  | ed8174fe36 | ||
|  | 3c8906494f | ||
|  | 6e745e9882 | ||
|  | fb4e9c3772 | ||
|  | 2c282f9550 | ||
|  | d92d41cb05 | ||
|  | 82e7050561 | ||
|  | 44f92d4169 | ||
|  | 2f1fae38dd | ||
|  | 9fe99979fe | ||
|  | 6399de0b51 | ||
|  | 959740a585 | ||
|  | 159b082828 | ||
|  | 8e7c5af16c | ||
|  | c1645ab7a7 | ||
|  | 2ae2bfdde9 | ||
|  | 3fe93968a6 | ||
|  | 79a2d715b0 | ||
|  | 50b271c868 | ||
|  | a57f28ac83 | ||
|  | 3f3747a2fe | ||
|  | d133913c3d | ||
|  | e049cef00a | ||
|  | eb8176971c | ||
|  | 5bbfca45fa | ||
|  | 9b500cd867 | ||
|  | b52cae6575 | ||
|  | 35a0142f9b | ||
|  | d4f6ef4f1b | ||
|  | 11024deaae | ||
|  | 5a038de1d5 | ||
|  | 903982e896 | ||
|  | 6355c404cc | ||
|  | 92b9cb5d43 | ||
|  | 7580383d26 | ||
|  | ba0934e41e | ||
|  | a6a1021521 | ||
|  | 33b4d83c73 | ||
|  | 6cf630c74a | ||
|  | 736fe5b84e | ||
|  | 4241bde6ea | ||
|  | b4ce14d744 | ||
|  | 10832a2ccc | ||
|  | 91aca44f67 | ||
|  | 96cfbb201a | ||
|  | b2bc155701 | ||
|  | a70ef5594d | ||
|  | 6d991586fd | ||
|  | f8890ca841 | ||
|  | 0752c6b24f | ||
|  | 3ffaf2c0e1 | ||
|  | a3e0fbd606 | ||
|  | 9c8ceb6b4e | ||
|  | bebce2c053 | ||
|  | 34c6790762 | ||
|  | a5fb009b62 | ||
|  | 9671ca5ebf | ||
|  | 5334ea393e | ||
|  | 2aaacc02e3 | ||
|  | 222e929b2d | ||
|  | 6f16d35a92 | ||
|  | d7a2ccf5ac | ||
|  | 9ce605221a | ||
|  | 1e930fe950 | ||
|  | 4dc158589c | ||
|  | 4525eb457b | ||
|  | 56a2e07dc2 | ||
|  | 9b7fe9ac31 | ||
|  | c3da07ccf7 | ||
|  | b691a56d51 | ||
|  | 13e0a1b5bb | ||
|  | 646baddce4 | ||
|  | 02f61c323d | ||
|  | 1e3d2df9e7 | ||
|  | e43fae86f1 | ||
|  | c6151e34e0 | ||
|  | 45cb991254 | ||
|  | 839bc99f94 | ||
|  | 0aeb1ca408 | ||
|  | cd76a906f4 | ||
|  | e438491938 | ||
|  | 307b35a5bf | ||
|  | 217c9720ea | ||
|  | 778c7dc5f2 | ||
|  | 4c80154437 | ||
|  | 6bd9529a66 | ||
|  | 33ea2b4844 | ||
|  | 5c807f3dc8 | ||
|  | 9063b559c4 | ||
|  | 40f6df7160 | ||
|  | 95165aa92f | ||
|  | d96fcdb35c | ||
|  | 5efabdcea3 | ||
|  | 2d57dc0565 | ||
|  | 576629f825 | ||
|  | 5badb9d151 | ||
|  | 45dc379d9a | ||
|  | 49c0c9f44c | ||
|  | ef5fa4d062 | ||
|  | 35b66d5d94 | ||
|  | d0b749a43c | ||
|  | bcc4d4e8c6 | ||
|  | 41bff0b293 | ||
|  | dfc7f35ef1 | ||
|  | 0bbbbdde80 | ||
|  | 5fa5284b58 | ||
|  | b7ef82cb67 | ||
|  | 1233780265 | ||
|  | dd095279c8 | ||
|  | 4d5200c50f | ||
|  | 1bcd675ead | ||
|  | 2a3d3de0b2 | ||
|  | b124836f3a | ||
|  | 93ba95971b | ||
|  | 7b193b3745 | ||
|  | 2b647d2405 | ||
|  | 7714cca599 | ||
|  | 42511aa9cf | ||
|  | ace2a2f3d1 | ||
|  | 2062fe7a08 | ||
|  | d4c02c3988 | ||
|  | 4c1496b4a4 | ||
|  | eec876295d | ||
|  | 3093175f54 | ||
|  | dd05c4d34a | ||
|  | 57e3a40321 | ||
|  | 9e70152076 | ||
|  | e1da83a8f6 | ||
|  | 8108198613 | ||
|  | 915849b2ce | ||
|  | 2e96302336 | ||
|  | 051cd744ad | ||
|  | 53fbc165ba | ||
|  | 1862bcf867 | ||
|  | 8909d1d144 | ||
|  | a2f0f20284 | ||
|  | 1951b52aa5 | ||
|  | cd7a9345ec | ||
|  | dba4c33c81 | ||
|  | 153c239c9b | ||
|  | 4034ab4182 | ||
|  | 9c917c3bd3 | ||
|  | cca0222e1d | ||
|  | 682db9b81f | ||
|  | 3e000f9be1 | ||
|  | 548a552638 | ||
|  | 1d5b5b7d15 | ||
|  | 91aa4586e2 | ||
|  | 6d3bc43ef6 | ||
|  | 0f63e26641 | ||
|  | ab2ef69c6a | ||
|  | 621350515e | ||
|  | 03ed5c398a | ||
|  | 65d6f8c018 | ||
|  | 79d0673ae6 | ||
|  | cbd488e19f | ||
|  | 380d869195 | ||
|  | 73893f2a33 | ||
|  | ad81470d35 | ||
|  | fc140d04ef | ||
|  | a0257ed7e7 | ||
|  | 4769487c3b | ||
|  | 29def587ff | ||
|  | f35d0b2b37 | ||
|  | 283e92d55d | ||
|  | c82b26d334 | ||
|  | 2753e02cda | ||
|  | fde733c205 | ||
|  | f730591f2c | ||
|  | 94eac1e79d | ||
|  | 9f2b6d0ec6 | ||
|  | 7d7d0ea001 | ||
|  | 794101691c | ||
|  | a443144a5c | ||
|  | 73f0867061 | ||
|  | f97db93212 | ||
|  | d36708933c | ||
|  | 14f82ea0a9 | ||
|  | c41dd6495d | ||
|  | 1005c99e9c | ||
|  | f4478fc762 | ||
|  | c5ed308ea5 | ||
|  | 3ab5ba6149 | ||
|  | 9b2fde962c | ||
|  | 571a7dc42d | ||
|  | 3421fffa9b | ||
|  | c25619fd63 | ||
|  | 76adb13a64 | ||
|  | 33b1eed361 | ||
|  | c44891a1a8 | ||
|  | f31f52ff1c | ||
|  | 6ad9a56bd9 | ||
|  | a5c2fc4f9d | ||
|  | 0a65006bb4 | ||
|  | 3db896c4e2 | ||
|  | e80322021a | ||
|  | 48316ba60d | ||
|  | c0f1493473 | ||
|  | ccbd128fa2 | ||
|  | 46817caa68 | ||
|  | 775c8624d4 | ||
|  | 36eedc987c | ||
|  | 3b8f31c888 | ||
|  | a34fa74eaa | ||
|  | d6b2d8dcb5 | ||
|  | aab0599280 | ||
|  | dfa8eaf24e | ||
|  | 63d55cb797 | ||
|  | c642eee0d2 | ||
|  | 5f33d298d7 | ||
|  | fc39fd7519 | ||
|  | 7f442f7485 | ||
|  | 0ee3203a5a | ||
|  | 43a5df8780 | ||
|  | 0949df014b | ||
|  | 01f4dd8f97 | ||
|  | 8b7599f5d9 | ||
|  | 9bdc320cf8 | ||
|  | d9c8285806 | ||
|  | 4b8344082f | ||
|  | e5cf76b460 | ||
|  | 422ca87a12 | ||
|  | a512ccca28 | ||
|  | ba215be97c | ||
|  | ca16050681 | ||
|  | 06e4ed1bb4 | ||
|  | d4a8ae5743 | ||
|  | a4f2f811d3 | ||
|  | ebaba95eb3 | ||
|  | 31f7769199 | ||
|  | 7726be94be | ||
|  | f2cbcea6d7 | ||
|  | 5d6a28954b | ||
|  | 319f1deceb | ||
|  | 3f14958741 | ||
|  | 42ba4a5c56 | ||
|  | c804c395ed | ||
|  | 58c8cf1a3a | ||
|  | 76ea8c86b7 | ||
|  | 050378fa72 | ||
|  | 29d858d58c | ||
|  | dc45920afb | ||
|  | 15fcb57e2f | ||
|  | 91ee85152c | ||
|  | aa7bf7af1e | ||
|  | 02c1ba39ad | ||
|  | 8e8d9426df | ||
|  | 57f301815d | ||
|  | dfc9dc713c | ||
|  | 1a0cad7f5f | ||
|  | 3df436f0d8 | ||
|  | d737fca295 | ||
|  | da5a3532d7 | ||
|  | 27111e7b29 | ||
|  | b847bc0aba | ||
|  | 6eb0bc50e2 | ||
|  | 7530f03bf6 | ||
|  | 24a9633edc | ||
|  | 7e1a5ce445 | ||
|  | 2ffdbc7fc0 | ||
|  | 52c7b68cc3 | ||
|  | ddbcc8e84b | ||
|  | 2bfb195ad6 | ||
|  | cd2d9517a0 | ||
|  | 19dc312128 | ||
|  | 175659628d | ||
|  | 8fea2b09be | ||
|  | f77f45b70c | ||
|  | 103a287f11 | ||
|  | d600ade40c | ||
|  | a6a7cba121 | ||
|  | 7fff635a3f | ||
|  | 7a749b88c7 | ||
|  | 1ce6a7f4be | ||
|  | a092910fdd | ||
|  | bb77838b3e | ||
|  | 1001f1bd36 | ||
|  | de0e5583a5 | ||
|  | cbd2a44350 | ||
|  | c888e461ba | ||
|  | d135522087 | ||
|  | ce2b148dd2 | ||
|  | 2d075c4dd6 | ||
|  | bcd1841f71 | ||
|  | 029cf4ad1f | ||
|  | ed7fc86d69 | ||
|  | 82a9e43b6f | ||
|  | 9ae2c731ed | ||
|  | 7d1ba466b4 | ||
|  | 4f1d8678ea | ||
|  | 4bd72ebc63 | ||
|  | e5986e0ae2 | ||
|  | fae39e4bc9 | ||
|  | dbe8357dd5 | ||
|  | 3234f0bdd7 | ||
|  | 47a4d58009 | ||
|  | 4ae60da58d | ||
|  | 47f995bda3 | ||
|  | 42721628eb | ||
|  | f42ab957d4 | ||
|  | ce9d0d7e82 | ||
|  | baf79dda21 | ||
|  | b71a9bc097 | ||
|  | 129632cd6b | ||
|  | aca8899c4d | ||
|  | 5c3d91e65e | ||
|  | 0205d827f1 | ||
|  | 225c31d583 | ||
|  | b18d87ddba | ||
|  | 25298c72bb | ||
|  | 3df3d27533 | ||
|  | cbb0b57018 | ||
|  | 65f205bca8 | ||
|  | 1cc7f80109 | ||
|  | 213a0a18a5 | ||
|  | 1a24d599b3 | ||
|  | d80be60e2b | ||
|  | 0ffe79d76c | ||
|  | db36d0a375 | ||
|  | ff659a0be3 | ||
|  | 8485b12102 | ||
|  | d889cc3c5a | ||
|  | 7bb65fca4e | ||
|  | 8aaa5951ca | ||
|  | d58f3b7520 | ||
|  | e5a636a159 | ||
|  | 51f314e907 | ||
|  | 531fa30b69 | ||
|  | 2b3bb81fae | ||
|  | 80f80cd31f | ||
|  | 79705fbf11 | ||
|  | 191a4e569e | ||
|  | 1cac35be03 | ||
|  | 6d48100f44 | ||
|  | 4627af3e90 | ||
|  | 913952ffe1 | ||
|  | 67bf6afc89 | ||
|  | 06064decd2 | ||
|  | 4cca9f17df | ||
|  | 74a89223c0 | ||
|  | 2954017836 | ||
|  | a03262fc01 | ||
|  | d65ce6fc2c | ||
|  | d27e1eee25 | ||
|  | b1f00bb708 | ||
|  | e0f1e79e6a | ||
|  | d70b7d41e8 | ||
|  | 43af9f3fad | ||
|  | bc53dd6830 | ||
|  | 263616ef01 | ||
|  | 285da0542e | ||
|  | 17f7e2f892 | ||
|  | a29d8f1d68 | ||
|  | 8965172603 | ||
|  | 03c2967337 | ||
|  | 5b154a0da4 | ||
|  | b2c8c326d7 | ||
|  | 96aedaa91f | ||
|  | a22ad1ec32 | ||
|  | a4244defb5 | ||
|  | 57328e55f3 | ||
|  | 87c32aeb40 | ||
|  | 2e01e0c30e | ||
|  | a12b2de74a | ||
|  | 6b01d8f99b | ||
|  | eac4f6062e | ||
|  | 5583cf0a5f | ||
|  | 57d772fa23 | ||
|  | 1bdc3988a9 | ||
|  | 2af55baa9a | ||
|  | 0452eec11d | ||
|  | c4f7db6c04 | ||
|  | 3569529a84 | ||
|  | 70942ac0f6 | ||
|  | dc02e39918 | ||
|  | 73d6bc35ec | ||
|  | b1d558d700 | ||
|  | 897480265f | ||
|  | 73724f5a33 | ||
|  | bdbd495a9e | ||
|  | 1fcf009804 | ||
|  | 914c5752a5 | ||
|  | 201b12a886 | ||
|  | c5f23ad93d | ||
|  | 28d62009a7 | ||
|  | 1a5a436f82 | ||
|  | 1275ac0569 | ||
|  | 5112fb777e | ||
|  | f571a944c9 | ||
|  | bc9aff8c60 | ||
|  | c4c7ab7888 | ||
|  | d9819a990c | ||
|  | aea400e26a | ||
|  | eb4e7735c1 | ||
|  | 4b498ae8cd | ||
|  | 158e2a4ca9 | ||
|  | b011d48d82 | ||
|  | 8ac3e725f8 | ||
|  | 9a4aef0358 | ||
|  | 7d3146234a | ||
|  | 5d2ca6493d | ||
|  | 4752f9aa37 | ||
|  | 025d3a03d6 | ||
|  | aec06183e7 | ||
|  | aa28abd517 | ||
|  | 7430b31697 | ||
|  | 759f72169a | ||
|  | 1f7135be61 | ||
|  | 6942f9c1cf | ||
|  | d9da75d1c0 | ||
|  | 7ab7372be4 | ||
|  | 3503c98857 | ||
|  | 708c3f1e2a | ||
|  | 6f645e8619 | ||
|  | bce7ca7ac4 | ||
|  | 350465c25d | ||
|  | 5b9c70ae22 | ||
|  | 9b30afeca9 | ||
|  | c1b202c119 | ||
|  | 41cfe5d2ca | ||
|  | 05339e184f | ||
|  | 447127d956 | ||
|  | 394334fbea | ||
|  | 9f8cd33d43 | ||
|  | f066e28c35 | ||
|  | b349a449bb | ||
|  | 1c5898d396 | ||
|  | 6802967863 | ||
|  | 0462f18680 | ||
|  | af6699098f | ||
|  | 6b7e7dc124 | ||
|  | 6bae4c6a66 | ||
|  | 46da918dbe | ||
|  | bb7e5f17b5 | ||
|  | b9d03114c2 | ||
|  | 436b1ce176 | ||
|  | 50fb5d83f1 | ||
|  | fda672f806 | ||
|  | 2bf783b04d | ||
|  | 2f72b23a0d | ||
|  | 85336f9777 | ||
|  | 174d964553 | ||
|  | cf8677248e | ||
|  | 1e6a3163af | ||
|  | e008919978 | ||
|  | 4814066c67 | ||
|  | f17f8b48c2 | ||
|  | ab0aec0ac5 | ||
|  | b49a641ba5 | ||
|  | 2f50051426 | ||
|  | 43cc32db40 | ||
|  | b4d6f6b947 | ||
|  | 71ff533623 | ||
|  | e33a5bbef5 | ||
|  | 6c0112c2be | ||
|  | 15bbf26b93 | ||
|  | 87c97efce0 | ||
|  | 6c4aee1479 | ||
|  | 73549a9044 | ||
|  | 30fdd3e184 | ||
|  | c97eb5d63f | ||
|  | 5729c7d5e7 | ||
|  | d77b13efcb | ||
|  | c43faca7b9 | ||
|  | 892ddd5724 | ||
|  | a9de779f33 | ||
|  | 1c2f016ba0 | ||
|  | 7b4d9140af | ||
|  | c1fc87ff4e | ||
|  | cd5ea5d4e0 | ||
|  | 30c01089f5 | ||
|  | 89825a2b21 | ||
|  | a743b75bb4 | ||
|  | f7ebf8dedd | ||
|  | f6220cab3b | ||
|  | 0c5e1c4138 | ||
|  | 03fe431f1a | ||
|  | a8e4554fec | ||
|  | e81b09b9aa | ||
|  | c6e846e0ae | ||
|  | 03dcfb5c4b | ||
|  | 3e54da03e2 | ||
|  | c4b3196917 | ||
|  | 0d81e7933e | ||
|  | b2a2735034 | ||
|  | f865c5de90 | ||
|  | 4159369e8b | ||
|  | 170693cf0b | ||
|  | 4e7b5d4af8 | ||
|  | 67bf789fcf | ||
|  | f5cf616c2f | ||
|  | 7975f19817 | ||
|  | 017602056d | ||
|  | c63f43854b | ||
|  | 5cc71ec2ad | ||
|  | 80e81f8475 | ||
|  | 3685c8e015 | ||
|  | 99e943c365 | ||
|  | 21818e71f5 | ||
|  | bcc6d25e21 | ||
|  | 7b885ee0d3 | ||
|  | c10e808a4f | ||
|  | 54e9be0ed8 | ||
|  | 938cdf316a | ||
|  | 27c33911e6 | ||
|  | e88f8759e7 | ||
|  | f2992e3165 | ||
|  | c71fd1ee3b | ||
|  | fb45b19fdc | ||
|  | c4ea8d4942 | ||
|  | 646aa131ef | ||
|  | 0adb40bf92 | ||
|  | 17d6014bf1 | ||
|  | ff57cd4eaf | ||
|  | 74bd7c3744 | ||
|  | cfbb283f85 | ||
|  | 74a3c4451b | ||
|  | be3643c962 | ||
|  | f4aa546af8 | ||
|  | 67b876a7f4 | ||
|  | 94e177c0ef | ||
|  | 1bd83cc9bc | ||
|  | ecda3f4a7d | ||
|  | 8f972a965d | ||
|  | 0f051fc57c | ||
|  | c3f8925f46 | ||
|  | 5d0cab2052 | ||
|  | 4d7492f682 | ||
|  | fc9d99080f | ||
|  | 47ebac0276 | ||
|  | cb3fca03e9 | ||
|  | abbbd83729 | ||
|  | 1743ab7812 | ||
|  | 324e3972a6 | ||
|  | 1502dda2ab | ||
|  | f31b2c4a79 | ||
|  | 89b9b60e0c | ||
|  | de9ba12779 | ||
|  | 9cc4359c04 | ||
|  | 67eaf120b9 | ||
|  | b8353c4a33 | ||
|  | 7013033ae4 | ||
|  | cb8cd03852 | ||
|  | f63fb62014 | ||
|  | 2e4fb86b86 | ||
|  | 5e776a07dd | ||
|  | 81e637e50e | ||
|  | 0971ad0a80 | ||
|  | 8267ded7ec | ||
|  | 7f36ea55f5 | ||
|  | 72a051f2d3 | ||
|  | 51b197888c | ||
|  | cd63865d31 | ||
|  | 5be5685a09 | ||
|  | 76b2f25d46 | ||
|  | 58607d4a7f | ||
|  | c0a5b16a7f | ||
|  | 3a0c69005b | ||
|  | 5c295fb9e3 | ||
|  | 4ee212e7d5 | ||
|  | 70651ce994 | ||
|  | a778a91106 | ||
|  | cfc31eead3 | ||
|  | da0a1bbe9f | ||
|  | bc66fb33e9 | ||
|  | b1b6493755 | ||
|  | 1d189f239b | ||
|  | 5b90691bcc | ||
|  | d1d5972277 | ||
|  | 2c07d77368 | ||
|  | 642cfbf59a | ||
|  | bb1367cfb9 | ||
|  | 11724aa555 | ||
|  | 4d374712de | ||
|  | eb9003187d | ||
|  | caba444962 | ||
|  | 5b6c8c191f | ||
|  | dd51589f67 | ||
|  | b02a31d4b9 | ||
|  | 0e7878b406 | ||
|  | cae91ce0c5 | ||
|  | 67a65a2aa9 | ||
|  | 364b0a7163 | ||
|  | d6419f2059 | ||
|  | 6f7ad7ef91 | ||
|  | 5ae588833b | ||
|  | a70dbac0e6 | ||
|  | 4d34a02afe | ||
|  | 4db4f45897 | ||
|  | 2d5280fc95 | ||
|  | b8d568761e | ||
|  | 29309dac9a | ||
|  | 7f7745071a | ||
|  | 1914032e35 | ||
|  | f44c8f1205 | ||
|  | fe2ef4e61c | ||
|  | fc3eda55c7 | ||
|  | 8adf1cdd02 | ||
|  | adbbc656d4 | ||
|  | 8e852bce02 | ||
|  | bb461b009f | ||
|  | 03559a3cc4 | ||
|  | 7bb2fe128a | ||
|  | 2312e17a8e | ||
|  | 9835b382da | ||
|  | 1eacc6fbff | ||
|  | 85187239b6 | ||
|  | 819ff2a902 | ||
|  | c744104a18 | ||
|  | c87801f0a9 | ||
|  | 39735594bd | ||
|  | 30964f65e4 | ||
|  | ee0c7fd8bf | ||
|  | dfdecef8e7 | ||
|  | edcdfeb057 | ||
|  | 47f0de9836 | ||
|  | 9ba657797e | ||
|  | 07442a6f84 | ||
|  | 3faf3c84be | ||
|  | abcacc82f3 | ||
|  | 9544b7d968 | ||
|  | babbc8bcd6 | ||
|  | 12809ebc74 | ||
|  | b45a601ad2 | ||
|  | f099dc6a37 | ||
|  | 803caddbd4 | ||
|  | 4d7b988018 | ||
|  | c1f88a4e14 | ||
|  | 5d9ec0b208 | ||
|  | 1877cacf9c | ||
|  | 2f4978cfea | ||
|  | d27a1103fa | ||
|  | b85bb95082 | ||
|  | db7f93cff3 | ||
|  | 85e271098f | ||
|  | 17001e2f74 | ||
|  | c82f4f0d45 | ||
|  | 88247a3af9 | ||
|  | 158578a406 | ||
|  | 19314e7e06 | ||
|  | 8bcbc6d545 | ||
|  | ef55e6d476 | ||
|  | 295ef3dc1d | ||
|  | 9d125c9e79 | ||
|  | 86363986fc | ||
|  | 0a2dbbc58b | ||
|  | 673a966541 | ||
|  | db1e69813b | ||
|  | e60d56f060 | ||
|  | 328e062ae9 | ||
|  | 0523c2ea4b | ||
|  | c5c7378c63 | ||
|  | 9b2080d036 | ||
|  | d4b3649640 | ||
|  | b085993901 | ||
|  | 0d4afad342 | ||
|  | 0da694b845 | ||
|  | 6d5e7d9e81 | ||
|  | bc08bea284 | ||
|  | 0e5a0661e1 | ||
|  | a839bd428f | ||
|  | 0277062693 | ||
|  | 7affa5ab69 | ||
|  | ed22af4e73 | ||
|  | 63ebb6998e | ||
|  | 7914cd47ca | ||
|  | 708dbac70e | ||
|  | 1b62dd5c40 | ||
|  | 4911545843 | ||
|  | c5cc4b7867 | ||
|  | eacb614750 | ||
|  | 341e1e7a6d | ||
|  | a02c820c2d | ||
|  | 2f6890c78a | ||
|  | 516591fe88 | ||
|  | d2941a9110 | ||
|  | f7302f710b | ||
|  | 6a02ac7e80 | ||
|  | d1b86fdef5 | ||
|  | 57ac38ddca | ||
|  | 7a73a92074 | ||
|  | d1b30f4792 | ||
|  | 16dcf78cab | ||
|  | d868cfdeb0 | ||
|  | c074f4d925 | ||
|  | 453024c58d | ||
|  | fe8340617a | ||
|  | b024dd913d | ||
|  | a2a698ab0e | ||
|  | bb56f92213 | ||
|  | 8dcd998945 | ||
|  | bcbbbe4046 | ||
|  | 7200a8cb84 | ||
|  | 6925344807 | ||
|  | 60ceeb0ddd | ||
|  | 06caabf333 | ||
|  | 954131bd51 | ||
|  | 855efe7fe8 | ||
|  | d902a74ab0 | ||
|  | 499e11f730 | ||
|  | 6db59a9c31 | ||
|  | 6465726008 | ||
|  | 3a3b96e0be | ||
|  | 992c91dc0c | ||
|  | 809473c15c | ||
|  | d79a5ec3d6 | ||
|  | 237469ceaf | ||
|  | c28d9135d9 | ||
|  | 48a5679087 | ||
|  | 7c938712f2 | ||
|  | 4df12bebc2 | ||
|  | dfe8987aaa | ||
|  | 02dbe401d8 | ||
|  | c18f8c92e7 | ||
|  | 857cd718df | ||
|  | 11d4f6499a | ||
|  | f2c25b4744 | ||
|  | 27b846717f | ||
|  | 9ed138f896 | ||
|  | 1978dc80eb | ||
|  | fc4b247f4f | ||
|  | ebf7056f4a | ||
|  | eb975d7e13 | ||
|  | a2dd8cb6b9 | ||
|  | 7c254c6136 | ||
|  | c8a33b83f1 | ||
|  | 1145c72b01 | ||
|  | 7fc45fb711 | ||
|  | e146262c38 | ||
|  | 6f808bd06e | ||
|  | 0b6ab49325 | ||
|  | 66d9182e50 | ||
|  | 654cca82a9 | ||
|  | 89785da1c5 | ||
|  | 2f9964e46e | ||
|  | 168ecd67b0 | ||
|  | bcbe740598 | ||
|  | 86c8929d77 | ||
|  | 6738a9433b | ||
|  | 23843ec86e | ||
|  | f4db0da585 | ||
|  | 9ee3b796cd | ||
|  | f57569f553 | ||
|  | fffd0e8990 | ||
|  | 200e52bab5 | ||
|  | a0ef649dd8 | ||
|  | 0dd01bda01 | ||
|  | a707598042 | ||
|  | 8a3171308a | ||
|  | 29c887f30b | ||
|  | 661398d891 | ||
|  | 2cd722d751 | ||
|  | 49f5b4fa5c | ||
|  | 67baf465f4 | ||
|  | ee7666ddea | ||
|  | 02fc41ff1c | ||
|  | d07a9d2ef8 | ||
|  | 3622ebfabd | ||
|  | 70b320633f | ||
|  | f30208f345 | ||
|  | 5bcc454678 | ||
|  | 473110568f | ||
|  | 88ca0f8196 | ||
|  | a171005010 | ||
|  | f56ad2fa58 | ||
|  | c9dc441915 | ||
|  | a0d255369a | ||
|  | 40b0a15b35 | ||
|  | b98b06ff79 | ||
|  | a448c9aebf | ||
|  | b3f462a39d | ||
|  | 7ce34ca019 | ||
|  | 719bb53c3a | ||
|  | 214415969f | ||
|  | 7431b1f123 | ||
|  | d8ffa843a9 | ||
|  | a69db231cc | ||
|  | c17f94422f | ||
|  | b4777f7f4f | ||
|  | a57d9a9303 | ||
|  | 5e70e1bcb2 | ||
|  | 0c43787996 | ||
|  | dc310b99f9 | ||
|  | e98c5e10bc | ||
|  | f1b1090263 | ||
|  | 6efd6faa3f | ||
|  | 1e4d48d371 | ||
|  | 93a2adb3e6 | ||
|  | a66d516777 | ||
|  | 7a97d42338 | ||
|  | b66cdc8fa0 | ||
|  | 67f43b2aad | ||
|  | d143e50238 | ||
|  | e27439be6a | ||
|  | 2ad5ffbda2 | ||
|  | dae9e662a5 | ||
|  | f22737d6a4 | ||
|  | a458d5a176 | ||
|  | d92ed04538 | ||
|  | 80b3df8953 | ||
|  | bcf83ec761 | ||
|  | e44e72bce3 | ||
|  | 35f2781518 | ||
|  | dc5512e403 | ||
|  | 48ef176e28 | ||
|  | 1aa2b86df3 | ||
|  | 73026047e9 | ||
|  | 6c2c33cac8 | ||
|  | d593f7e04b | ||
|  | 6c599ef506 | ||
|  | f48a0b7b7d | ||
|  | d9f538170b | ||
|  | 1785ced655 | ||
|  | e155e1fa86 | ||
|  | e28fab0550 | ||
|  | fb0dd2c1ca | ||
|  | 6e89e736b7 | ||
|  | 634b874c46 | ||
|  | 9d16364394 | ||
|  | daeecef59e | ||
|  | 8131f0a752 | ||
|  | f4ea1ad517 | ||
|  | f34e8a0ff6 | ||
|  | 4209d61b13 | ||
|  | fa83fba637 | ||
|  | af86aee970 | ||
|  | f26f1a526c | ||
|  | 7cb46d0761 | ||
|  | 0cb4070364 | ||
|  | bc008c2597 | ||
|  | a1d142d3a4 | ||
|  | aa00dc1031 | ||
|  | 592c654916 | ||
|  | 5021b10535 | ||
|  | 43d6e64cfa | ||
|  | 8d21e5f3c1 | ||
|  | fbe5df84c0 | ||
|  | caff44c663 | ||
|  | d6edef98c6 | ||
|  | e0d2fab3c3 | ||
|  | 9867e918fa | ||
|  | e6374ab425 | ||
|  | e116bb9227 | ||
|  | f1a1aa54d8 | ||
|  | 574f3c23d3 | ||
|  | c31d6a6898 | ||
|  | 44a2a164c0 | ||
|  | a7ca9950fc | ||
|  | e0dd33e6be | ||
|  | 2e718e1130 | ||
|  | ede9fcfb00 | ||
|  | a3d43b77ca | ||
|  | e2b32b4bb3 | ||
|  | 025c16c95d | ||
|  | 000eff73cc | ||
|  | 254efdde79 | ||
|  | f0d4e76418 | ||
|  | ba7101ff92 | ||
|  | a2457df45e | ||
|  | 305540f0fd | ||
|  | c2928d8a57 | ||
|  | 7451244cd2 | ||
|  | d935b5764a | ||
|  | f3af76e38c | ||
|  | a7631223a3 | ||
|  | 8aae4f0ed0 | ||
|  | 542049f252 | ||
|  | 9f3394dc6d | ||
|  | 06f5dc6ad7 | ||
|  | dc3b09c218 | ||
|  | ad15781d8f | ||
|  | ea53612822 | ||
|  | c3a065dd33 | ||
|  | 5cb2812231 | ||
|  | f8904a5504 | ||
|  | eb1df23e68 | ||
|  | e5648a4af9 | ||
|  | a246154961 | ||
|  | ce44843e27 | ||
|  | 1a54dad643 | ||
|  | 940dfff625 | ||
|  | c2b15183cb | ||
|  | 27e8aa9c68 | ||
|  | e1d8c6516a | ||
|  | eba81e368b | ||
|  | 74a3fd7596 | ||
|  | eeb5a83e98 | ||
|  | d47134bbf1 | ||
|  | ee725354db | ||
|  | 985bfd22de | ||
|  | 0d35e3a3e9 | ||
|  | d94a191656 | ||
|  | 0eafa4acd8 | ||
|  | f27a53653b | ||
|  | 3b60adc8da | ||
|  | 626a3369b5 | ||
|  | 4244e7569b | ||
|  | ef4b32aca7 | ||
|  | dcd23a0b4d | ||
|  | 5447c6e947 | ||
|  | f1b97fbc8b | ||
|  | 4c8dfc3fc2 | ||
|  | ceece5a7e2 | ||
|  | 7e6b035ca2 | ||
|  | fbc46a52af | ||
|  | 8d2e7b4372 | ||
|  | e7da9144f5 | ||
|  | 2128e169f3 | ||
|  | 8410d64daa | ||
|  | b2f78fadd9 | ||
|  | 3656323f25 | ||
|  | 2fe1c20475 | ||
|  | 0fb976a80a | ||
|  | 3cf62de753 | ||
|  | 06119b306d | ||
|  | 0493bbbc76 | ||
|  | 4c9e90732e | ||
|  | 35f084ba76 | ||
|  | f28f336026 | ||
|  | 122d75f677 | ||
|  | 12f6a3f5a3 | ||
|  | 5d44e1d6ca | ||
|  | 04592c876b | ||
|  | c0571beec8 | ||
|  | 1302316eb0 | ||
|  | 18d8008b89 | ||
|  | 4670f09a67 | ||
|  | 159ef12ed7 | ||
|  | 7a760f5640 | ||
|  | 2b6c42a56c | ||
|  | ab4ff99105 | ||
|  | 774895ec8c | ||
|  | c5ce96c391 | ||
|  | b4a98a4000 | ||
|  | 5f0d86f509 | ||
|  | c96a1b00cf | ||
|  | 1eb6436682 | ||
|  | a84e1f17bb | ||
|  | 3ffc9dffc2 | ||
|  | 048c84ab95 | ||
|  | a7470360d2 | ||
|  | 50f1ca91d4 | ||
|  | 0d37e1cd98 | ||
|  | 9aa77bb3c9 | ||
|  | fd11244966 | ||
|  | d060da094f | ||
|  | 306f9c5ffd | ||
|  | 5ef5611682 | ||
|  | ebdd2d730c | ||
|  | 1ddf8b3159 | ||
|  | a6bc870815 | ||
|  | 56cd73823e | ||
|  | 6299015039 | ||
|  | 11b7cfb5ff | ||
|  | 367f49ce1c | ||
|  | 8165131419 | ||
|  | e402157b4d | ||
|  | 967da7944f | ||
|  | 89f1c21f20 | ||
|  | 7e706190a5 | ||
|  | 36a3770673 | ||
|  | bc92f78afb | ||
|  | f7e22d2b8b | ||
|  | 0b1e11ba1f | ||
|  | 10e0b1daec | ||
|  | 731d8fc6be | ||
|  | f6d0b53ae5 | ||
|  | 0efb90deb6 | ||
|  | b16eabd2b6 | ||
|  | f8350409ad | ||
|  | 5b498bd8d6 | ||
|  | 941042d0ba | ||
|  | 9251ce312b | ||
|  | 96a964a183 | ||
|  | 9e513e08ae | ||
|  | 9dfee83e68 | ||
|  | 7cde979736 | ||
|  | 870ff1d4d9 | ||
|  | 52c162a478 | ||
|  | ddd11c7ed2 | ||
|  | 2c119dea47 | ||
|  | ebd1561682 | ||
|  | 3ccc495c75 | ||
|  | 0eda7a5a3c | ||
|  | f2c16452c6 | ||
|  | a2c429a4a5 | ||
|  | 4a71c5b424 | ||
|  | 268dd80cd0 | ||
|  | 3002e79c98 | ||
|  | 5eab348e82 | ||
|  | 1cdbade761 | ||
|  | 8c9afbd278 | ||
|  | cd73654683 | ||
|  | 9654fe0d8d | ||
|  | 3d49c33c6a | ||
|  | e58b3390aa | ||
|  | 92a1f5736b | ||
|  | 00a57f6cea | ||
|  | 1c345edc49 | ||
|  | 7aa1f47378 | ||
|  | 473d5ead7b | ||
|  | 68f760b563 | ||
|  | 9c1cd81adb | ||
|  | 85b81fb12a | ||
|  | 5d7444c115 | ||
|  | b0c1ec04b5 | ||
|  | 5cfd8909a8 | ||
|  | 6e2d2f33de | ||
|  | 5e65d27832 | ||
|  | 36993097b4 | ||
|  | 2447349383 | ||
|  | 7765f272ac | ||
|  | 13d8dfdb5f | ||
|  | 5e94637adc | ||
|  | ac6e793bbe | ||
|  | d0d9c3ea26 | ||
|  | f7bc58a767 | ||
|  | bafdf0381a | ||
|  | 3fc5dc8523 | ||
|  | df4dc3492c | ||
|  | 10731b0fd8 | ||
|  | cb9166aba4 | ||
|  | fe62c3aacb | ||
|  | c60ea40828 | ||
|  | c59ea26845 | ||
|  | 9bd8b3e9a5 | ||
|  | 5271f3b4a0 | ||
|  | 8a7b619b77 | ||
|  | 88f96b0838 | ||
|  | 1e1e48732a | ||
|  | 3537897fc5 | ||
|  | 3653981416 | ||
|  | 94d1e566c0 | ||
|  | a692316293 | ||
|  | e2f3406e89 | ||
|  | 81c7007f80 | ||
|  | e4f38b5665 | ||
|  | 14b6c471cf | ||
|  | 0d0befe23e | ||
|  | efad628a87 | ||
|  | c16e6d74e6 | ||
|  | 80db9e7716 | ||
|  | 7cf2a3e978 | ||
|  | 681b74a41c | ||
|  | d39d10b9fb | ||
|  | dff44ef74e | ||
|  | 485047f20b | ||
|  | 6affbbe865 | ||
|  | e3600ef4de | ||
|  | f0eaec98c7 | ||
|  | 6dcd7006d0 | ||
|  | 5de4812477 | ||
|  | d5b28356bc | ||
|  | 76fddd0db0 | ||
|  | 1108586303 | ||
|  | 3f49923298 | ||
|  | c277be8b6b | ||
|  | 6e083fa6a1 | ||
|  | 073091a06e | ||
|  | 03bfd01862 | ||
|  | 539f01d08e | ||
|  | dcf3c86dce | ||
|  | ec639cd6e9 | ||
|  | 420376d036 | ||
|  | 51e50bf0a9 | ||
|  | c2d77f51bb | ||
|  | b4d87d9128 | ||
|  | 4401a309ee | ||
|  | b562e209d1 | ||
|  | 3a85422e8f | ||
|  | e45397c975 | ||
|  | 1f9ec0c888 | ||
|  | f8ee470e70 | ||
|  | d02de0798f | ||
|  | 6fe074fb13 | ||
|  | 4db339c5f4 | ||
|  | a525764359 | ||
|  | f970d5878a | ||
|  | cc0a2cbc6f | ||
|  | add0b463f5 | ||
|  | d80b1a7749 | ||
|  | 6186691259 | ||
|  | b451cc567d | ||
|  | 757ff31661 | ||
|  | 97a98f0045 | ||
|  | 8f05896bc9 | ||
|  | da7a8939df | ||
|  | b6977a88ea | ||
|  | eafbc7f20d | ||
|  | d92f992c01 | ||
|  | 20a5d9051d | ||
|  | c9a5710554 | ||
|  | f10e946896 | ||
|  | 2f19b22bb2 | ||
|  | d134e11c6d | ||
|  | 63edd16a92 | ||
|  | 37740dc010 | ||
|  | 04b85ddbf2 | ||
|  | 836dc96f67 | ||
|  | 49a7542b14 | ||
|  | a84ffce5a0 | ||
|  | 210b3e5192 | ||
|  | 5f1d5ea056 | ||
|  | 19a7372ff9 | ||
|  | cc5b60b004 | ||
|  | b06f9dbf8d | ||
|  | d9b8ee7895 | ||
|  | e9ff655b0e | ||
|  | d58341d7ae | ||
|  | 669d21a114 | ||
|  | 7e980a16d0 | ||
|  | 47df8deb58 | ||
|  | dd006a502e | ||
|  | 782d48594a | ||
|  | 07d3e52e6a | ||
|  | fc1ce6d39b | ||
|  | 32d5c0c946 | ||
|  | dfabfce01b | ||
|  | 74f3f4eb15 | ||
|  | 20cb0285f0 | ||
|  | faf840f924 | ||
|  | 165bea5bb9 | ||
|  | f7515cfca8 | ||
|  | a762a10dec | ||
|  | a192029901 | ||
|  | 67182713d9 | ||
|  | e9464e32db | ||
|  | 2d6ae16912 | ||
|  | f9cd8b1841 | ||
|  | 41a698b442 | ||
|  | 9f58bc9207 | ||
|  | d36f6e7f24 | ||
|  | eeb672feb9 | ||
|  | 063a162ce0 | ||
|  | 3e4a900279 | ||
|  | 43327ea4e1 | ||
|  | 0d2e84b16b | ||
|  | 3c78757778 | ||
|  | d0245bb5ba | ||
|  | 3477b0107a | ||
|  | 8df9ff90cb | ||
|  | d6b4ca7a98 | ||
|  | 2e18199eb2 | ||
|  | 025e17701b | ||
|  | 156ca44a13 | ||
|  | 39dac7d4db | ||
|  | 9ca632d518 | ||
|  | 4177fc6df2 | ||
|  | d90890c08e | ||
|  | 1ca098c402 | ||
|  | 3208a7f15d | ||
|  | 8eda52e8e0 | ||
|  | 5b161b7445 | ||
|  | 8c1f8e54cd | ||
|  | 03d3c26a99 | ||
|  | 0cbd3663e4 | ||
|  | f182daa85e | ||
|  | de2f774e85 | ||
|  | 9d9a4afee9 | ||
|  | 0ea363c7fc | ||
|  | d7ee47ee25 | ||
|  | eb1b6e34c7 | ||
|  | 621b2b3f72 | ||
|  | 83da08ef7d | ||
|  | 9f551121fb | ||
|  | ba48dfb4bf | ||
|  | ed2ea24b75 | ||
|  | eefbd3f597 | ||
|  | e38bf63be0 | ||
|  | e7ba5eb160 | ||
|  | fff27f9b87 | ||
|  | d58f594c17 | ||
|  | 9797d7a7fb | ||
|  | c8b65317ef | ||
|  | 3a6dc77d36 | ||
|  | 4f70c27b56 | ||
|  | ea46edf50a | ||
|  | e5e88d792e | ||
|  | 6d68ad735c | ||
|  | c44b98a7e1 | ||
|  | 445f9453c4 | ||
|  | 3364e040c8 | ||
|  | 692f00864d | ||
|  | 344dc64df8 | ||
|  | 473425a36a | ||
|  | 3ba58ebaae | ||
|  | 2c7b12c022 | ||
|  | 17eeeb7536 | ||
|  | de5fbfde2c | ||
|  | f5d02e1b10 | ||
|  | e508625935 | ||
|  | 0b177ec4c1 | ||
|  | 87c965edd3 | ||
|  | 72dd9daa23 | ||
|  | a68529fba8 | ||
|  | 06681a453f | ||
|  | 5907dde4a8 | ||
|  | 8e038dd563 | ||
|  | 50905ab459 | ||
|  | 7bb9c7d47f | ||
|  | 5c45eee817 | ||
|  | 0f9e4ef352 | ||
|  | 85173d188b | ||
|  | d9ed33d1b1 | ||
|  | e6ac8cab53 | ||
|  | f890ebd0f4 | ||
|  | e537369d98 | ||
|  | 9bbd8dbe62 | ||
|  | 09a5f5c8f3 | ||
|  | b9e0f52526 | ||
|  | 1cdf71b647 | ||
|  | 3aff461039 | ||
|  | bf74d7537c | ||
|  | 0c2fb6807e | ||
|  | b9c9d127a2 | ||
|  | 286beca6c5 | ||
|  | 3a1521a34e | ||
|  | c5b047d0cd | ||
|  | 485b811bd0 | ||
|  | f335591045 | ||
|  | 1c10f3020b | ||
|  | 3074dad293 | ||
|  | 42f506adc6 | ||
|  | 50b755db0c | ||
|  | 420c3e0073 | ||
|  | 4a57fc33e4 | ||
|  | 25cdf16cc0 | ||
|  | 7f732459a1 | ||
|  | 9cc02d4dbe | ||
|  | c528ac09d6 | ||
|  | 1a131ff120 | ||
|  | accdd82970 | ||
|  | 3e8f02c64b | ||
|  | 3425264077 | ||
|  | 148f8b8a3a | ||
|  | 74343841e4 | ||
|  | 3b3738b36b | ||
|  | b15c3f6a3f | ||
|  | 2459f9b0aa | ||
|  | 6ff1bd9b3c | ||
|  | 1bc2d2ec37 | ||
|  | d7fd6a4628 | ||
|  | 9236f365fa | ||
|  | 90d22c2a28 | ||
|  | c9f6e6b62a | ||
|  | 260d9377f5 | ||
|  | 22d1ce6319 | ||
|  | 6997e02476 | ||
|  | 155d79ff4d | ||
|  | 452cd125fa | ||
|  | e62c35b040 | ||
|  | d5ec3c6a31 | ||
|  | ad983dc279 | ||
|  | bb15bf8d13 | ||
|  | 94adc207ad | ||
|  | 376d1c97ab | ||
|  | 4fe87b40da | ||
|  | b10d76cf4b | ||
|  | 3bdc9a2f09 | ||
|  | 9d52e18659 | ||
|  | f6f7c12f0e | ||
|  | 219b28c97b | ||
|  | 3598fe0fb4 | ||
|  | f9dd051ec9 | ||
|  | 68e4a27aaf | ||
|  | b849c719a8 | ||
|  | 59e7617e82 | ||
|  | b5e868655e | ||
|  | 027b3d36de | ||
|  | 653c4259ee | ||
|  | 9f5ab8149f | ||
|  | 66c6d14f7a | ||
|  | 2c0fc142a3 | ||
|  | 003454573c | ||
|  | aa5a9ff1f4 | ||
|  | 28ef54986d | ||
|  | 0da2dfd191 | ||
|  | 787fc1cd8b | ||
|  | dfdc0d92c3 | ||
|  | f265915aa2 | ||
|  | 4228d06934 | ||
|  | 1a93b9b226 | ||
|  | 363e50abbe | ||
|  | b8d53a6f0d | ||
|  | 4b45c0cd14 | ||
|  | e7c0da38c2 | ||
|  | 8706fbe461 | ||
|  | 9ca96e4e17 | ||
|  | 99fe1da345 | ||
|  | 1986e82783 | ||
|  | 7073b9d395 | ||
|  | f2049e9c18 | ||
|  | f0f1308465 | ||
|  | 7d90aa76ff | ||
|  | 3cc2c617fd | ||
|  | c31488add9 | ||
|  | 3d5b6ae332 | ||
|  | 59826c8cfd | ||
|  | 6f29d12386 | ||
|  | 0a89899ad0 | ||
|  | e4af0e361a | ||
|  | 31ec7907b5 | ||
|  | 12f3f8c694 | ||
|  | 79098e997e | ||
|  | dc1849bad5 | ||
|  | e2d826c412 | ||
|  | e6d796832e | ||
|  | 6f0a6df4f6 | ||
|  | 7a877a00d5 | ||
|  | e8604d100e | ||
|  | 1647441ce8 | ||
|  | 9f8d6b3a00 | ||
|  | 0bfc96e459 | ||
|  | 3425574ddc | ||
|  | 4b2ad25405 | ||
|  | 3ce163b1a0 | ||
|  | 7c1ee28f13 | ||
|  | 2645e43da1 | ||
|  | 59bfe551a3 | ||
|  | 6a31736644 | ||
|  | e2c78047b1 | ||
|  | 6a4351e44f | ||
|  | adb60ef1ac | ||
|  | 3090adac04 | ||
|  | b9253d86cc | ||
|  | ab4d4e6230 | ||
|  | 7cd38c56c6 | ||
|  | 864053615b | ||
|  | db2366f112 | ||
|  | 4defc82192 | ||
|  | 5949970a95 | ||
|  | 0ea4abda81 | ||
|  | 5c6035d636 | ||
|  | a2183e3dcc | ||
|  | 99637151b5 | ||
|  | a8e787c120 | ||
|  | 53339c7c72 | ||
|  | 3534bf7d70 | ||
|  | 1cf3989664 | ||
|  | fd296918da | ||
|  | 8ad1f03dc5 | ||
|  | fe7e17dbd5 | ||
|  | d582394a42 | ||
|  | 02ef0df019 | ||
|  | 0dfd6aa518 | ||
|  | 0b23bc9cf2 | ||
|  | f108c4288e | ||
|  | 9b9696aefd | ||
|  | 576e198ece | ||
|  | 52f85aab18 | ||
|  | ab60fd0490 | ||
|  | d79ae30f31 | ||
|  | f27debe7f9 | ||
|  | 735e043ff6 | ||
|  | 6e7f2b73cf | ||
|  | d645ce9745 | ||
|  | 7c08c140da | ||
|  | 81d402dc17 | ||
|  | 966fa12358 | ||
|  | 87792e1921 | ||
|  | 4c8296acc6 | ||
|  | 9989da07ed | ||
|  | 1c5e6a3425 | ||
|  | eedf908770 | ||
|  | 5c9ef41403 | ||
|  | 0bf2ad5b67 | ||
|  | a0e3f382cd | ||
|  | f09c39b5d7 | ||
|  | 89c67bf259 | ||
|  | ea666d4607 | ||
|  | b8af154439 | ||
|  | f594ece32a | ||
|  | 03beb6852a | ||
|  | ab9e9a3329 | ||
|  | a4b09344af | ||
|  | 8cb8aa392c | ||
|  | 3255519792 | ||
|  | 7e64bb2503 | ||
|  | 86a78402c3 | ||
|  | ba276452fb | ||
|  | 4ffa8d0124 | ||
|  | 4bc5082681 | ||
|  | 0e3c34e1da | ||
|  | 658b3784ae | ||
|  | 0526f577ff | ||
|  | bb1b9bc1d3 | ||
|  | b1eeb77ddc | ||
|  | 999d4a7676 | ||
|  | 1b80193aac | ||
|  | be8d39a48c | ||
|  | a2f3d70f28 | ||
|  | 676a7bf712 | ||
|  | e990a6c70c | ||
|  | 90fa0f6c4a | ||
|  | 22010d7d95 | ||
|  | 66279bd90f | ||
|  | 19da228855 | ||
|  | 9e67941bad | ||
|  | 0454fc74e9 | ||
|  | 2f6b1c7611 | ||
|  | f00bed6058 | ||
|  | 529c522594 | ||
|  | 2bb9493fcf | ||
|  | 839ed8a64a | ||
|  | 500eb920e4 | ||
|  | 017a31ffd0 | ||
|  | 83b961c84d | ||
|  | fa07423ca5 | ||
|  | dd4af2df81 | ||
|  | 44bd8cb85b | ||
|  | 52d80ac23c | ||
|  | 43a5d73e14 | ||
|  | abc764951d | ||
|  | 9cc6164026 | ||
|  | 475488b9f2 | ||
|  | 95b1783834 | ||
|  | 12c8b5c0b9 | ||
|  | f99b7a811b | ||
|  | 0575abab23 | ||
|  | 9eebcf7beb | ||
|  | ed74477150 | ||
|  | 2801b38c75 | ||
|  | dc3fea875e | ||
|  | aab8c2b687 | ||
|  | 3577773af3 | ||
|  | dd023edc0f | ||
|  | 8ac9e6dc19 | ||
|  | f45d4d781d | ||
|  | c95652d6a8 | ||
|  | 97b37f75d3 | ||
|  | 95dae48778 | ||
|  | 73635033bd | ||
|  | c1619d2a62 | ||
|  | b87ef982f6 | ||
|  | 91aa90ad4a | ||
|  | 4b3cea9e78 | ||
|  | 2420b5e937 | ||
|  | f23a976bea | ||
|  | 4226cd08f1 | ||
|  | 7a230f1693 | ||
|  | a43d0d4612 | ||
|  | 78a40a0c70 | ||
|  | 2c69d8f0b0 | ||
|  | 0018c38b83 | ||
|  | 8df81571fc | ||
|  | d1add62a06 | ||
|  | c419f3379a | ||
|  | 69d57209f7 | ||
|  | 7ca81d6fb8 | ||
|  | 8a046bfa5d | ||
|  | 3628a7653c | ||
|  | 48f988acd7 | ||
|  | 6526923345 | ||
|  | 24fd1acce6 | ||
|  | cbb9235dc5 | ||
|  | 19ec2c9bc9 | ||
|  | 6459d4c0b6 | ||
|  | 1304f2721f | ||
|  | 8bde0c0e53 | ||
|  | 598ffd3e5c | ||
|  | 1a4533a9cf | ||
|  | 601f0eb168 | ||
|  | 3070e0bf5d | ||
|  | 83c11a9834 | ||
|  | 5c912b930e | ||
|  | 1b17fb0ae7 | ||
|  | d83e67c121 | ||
|  | ae39ed94c9 | ||
|  | 1e51180d42 | ||
|  | 87ba69d02e | ||
|  | 8879d5560b | ||
|  | c1621ee39c | ||
|  | b0aa98edb4 | ||
|  | a7a2fe0216 | ||
|  | 8e50f5fa3c | ||
|  | 31793520bf | ||
|  | 0b6b0368c5 | ||
|  | d1d30a9280 | ||
|  | 420c6f2d1e | ||
|  | 34f06c4971 | ||
|  | 9cc4bbd49d | ||
|  | f66b312869 | ||
|  | 2405ba8708 | ||
|  | a91b6bff8b | ||
|  | 450dc11a68 | ||
|  | 1ce2f84ce5 | ||
|  | f55b241cfa | ||
|  | 34d08ce8ef | ||
|  | 4f5aa8c43b | ||
|  | 27b375060d | ||
|  | cbfdc401f7 | ||
|  | b58bf3e0ce | ||
|  | 1fff7e9aca | ||
|  | 494b981b13 | ||
|  | dd93995bd0 | ||
|  | b3bb4add9c | ||
|  | d305e71c27 | ||
|  | 0d92baa670 | ||
|  | 7a1b110f62 | ||
|  | db8df057ce | ||
|  | 5d8ffded40 | ||
|  | 07f3e5356d | ||
|  | 1ece62f960 | ||
|  | 056c604dc3 | ||
|  | 2d08eec093 | ||
|  | 614b590551 | ||
|  | 6d90ce250a | ||
|  | ea31846a19 | ||
|  | e6317776c1 | ||
|  | efeaba39a4 | ||
|  | 1a97dfd479 | ||
|  | 9fecf2b303 | ||
|  | 3d0d2f48ad | ||
|  | 581605e0e2 | ||
|  | 45d3a7f6ff | ||
|  | 7ca2ea0766 | ||
|  | 89220c142b | ||
|  | c73ce3d220 | ||
|  | b0f127af4e | ||
|  | 766d54795f | ||
|  | bd41c6eea4 | ||
|  | 2435786713 | ||
|  | 9e7ea64bd2 | ||
|  | 89a6eee6af | ||
|  | 2ec1476e50 | ||
|  | 2d9b581f34 | ||
|  | 5bb63f645b | ||
|  | a856c7cc37 | ||
|  | 26db9d8a9d | ||
|  | 8060179f6d | ||
|  | 77ebd87fed | ||
|  | e4bc92235d | ||
|  | 27a4d83ce8 | ||
|  | ece9b902f8 | ||
|  | 65a2f8a68b | ||
|  | 9c212306b8 | ||
|  | 1fdc7ce6bb | ||
|  | 0b22c140c5 | ||
|  | 944aa45459 | ||
|  | c9842ba13a | ||
|  | 8840680303 | ||
|  | 376b9b1316 | ||
|  | 54bb1cb3d9 | ||
|  | 43468b474e | ||
|  | 28a957c684 | ||
|  | ec5ddbf391 | ||
|  | bab186e195 | ||
|  | bc7e874476 | ||
|  | 97114b5948 | ||
|  | 45e015d71d | ||
|  | 0ff6531953 | ||
|  | ba298c3cfc | ||
|  | 0479bea40b | ||
|  | a536097804 | ||
|  | bbefd0fdf9 | ||
|  | 2aa8b04c21 | ||
|  | aeebdfec51 | ||
|  | debfcdf498 | ||
|  | 5c4b33e8e6 | ||
|  | eb54037b66 | ||
|  | f48af8db3b | ||
|  | 97c5b957dd | ||
|  | 95e7397803 | ||
|  | 43a989978a | ||
|  | 27734a7c26 | ||
|  | dd786d6fc4 | ||
|  | be1c28fc45 | ||
|  | 20e41b3523 | ||
|  | e07ecc5cf8 | ||
|  | 3360b72531 | ||
|  | 233b13d670 | ||
|  | 5bcbb4fdaa | ||
|  | dbe2f5f2b8 | ||
|  | ca8b58d66d | ||
|  | f80f0b416f | ||
|  | d7765511ee | ||
|  | 0240a09056 | ||
|  | ab15c4eec9 | ||
|  | 4ce1ba81a6 | ||
|  | 530440b333 | ||
|  | b80fda36af | ||
|  | 42d24263ef | ||
|  | 1e2797e7ce | ||
|  | f7075766fc | ||
|  | 5647ca70bb | ||
|  | 2b8aa6bafc | ||
|  | 410443471c | ||
|  | 0bb9781b91 | ||
|  | 2769d6d7ca | ||
|  | 120b9433c2 | ||
|  | 605092bd88 | ||
|  | a4a8c94374 | ||
|  | 0e93f6c0db | ||
|  | aa2add39ad | ||
|  | a928047147 | ||
|  | c474ca0f13 | ||
|  | 88dc64653e | ||
|  | 5f4b70f3a9 | ||
|  | 51b429e5b0 | ||
|  | 360624eb6e | ||
|  | d9d2291837 | ||
|  | cbdf816232 | ||
|  | 2d71eb8a18 | ||
|  | 64d2532ce9 | ||
|  | 0376910f33 | ||
|  | 6d503119a1 | ||
|  | bfae93e57e | ||
|  | 49a66ba81a | ||
|  | a1d43fecd9 | ||
|  | d0e42a4798 | ||
|  | 2a34358abc | ||
|  | fd2bb8ea45 | ||
|  | 98e5daa0e0 | ||
|  | ad2e119282 | ||
|  | c20c30d8d1 | ||
|  | 66d215c9c1 | ||
|  | 46e088d379 | ||
|  | bbdd15161a | ||
|  | ea9dc8cfb8 | ||
|  | 6bd2ccc9bf | ||
|  | 56327c6b58 | ||
|  | 712e8a51e4 | ||
|  | 421f324f9e | ||
|  | 8fe4a70299 | ||
|  | 3af6d0dbfd | ||
|  | e2bef076d3 | ||
|  | 1bf9f28f4b | ||
|  | f1e7b97a93 | ||
|  | 8cfe13ad90 | ||
|  | 0f420abc8e | ||
|  | 3b5b715567 | ||
|  | 520051af25 | ||
|  | 7e376b40bb | ||
|  | fd18a48608 | ||
|  | 64860c6287 | ||
|  | 58635b24ba | ||
|  | 3ec9dfc108 | ||
|  | bd1572f11a | ||
|  | 540a0cc59c | ||
|  | 83eb4f6b16 | ||
|  | 95c58bd793 | ||
|  | 65591c7727 | ||
|  | 737cbf5f60 | ||
|  | 4c67cbb4b7 | ||
|  | ed2cc2a60b | ||
|  | 61411bb259 | ||
|  | fcdb0eff8f | ||
|  | 30d9347272 | 
							
								
								
									
										33
									
								
								.github/workflows/main.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/main.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | # This is a basic workflow to help you get started with Actions | ||||||
|  |  | ||||||
|  | name: CI | ||||||
|  |  | ||||||
|  | # Controls when the action will run. Triggers the workflow on push or pull request | ||||||
|  | # events but only for the master branch | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: [ master ] | ||||||
|  |   pull_request: | ||||||
|  |     branches: [ master ] | ||||||
|  |  | ||||||
|  | # A workflow run is made up of one or more jobs that can run sequentially or in parallel | ||||||
|  | jobs: | ||||||
|  |   # This workflow contains a single job called "build" | ||||||
|  |   build: | ||||||
|  |     # The type of runner that the job will run on | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|  |     # Steps represent a sequence of tasks that will be executed as part of the job | ||||||
|  |     steps: | ||||||
|  |       # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it | ||||||
|  |       - uses: actions/checkout@v2 | ||||||
|  |  | ||||||
|  |       # Runs a single command using the runners shell | ||||||
|  |       - name: Run a one-line script | ||||||
|  |         run: echo Hello, world! | ||||||
|  |  | ||||||
|  |       # Runs a set of commands using the runners shell | ||||||
|  |       - name: Run a multi-line script | ||||||
|  |         run: | | ||||||
|  |           echo Add other actions to build, | ||||||
|  |           echo test, and deploy your project. | ||||||
							
								
								
									
										13
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,15 @@ | |||||||
| .* |  | ||||||
| !.gitignore | !.gitignore | ||||||
| *~ | *~ | ||||||
| *.py[co] | *.py[co] | ||||||
| .*.sw[po] | .*.sw[po] | ||||||
|  | .cache/ | ||||||
|  | .coverage | ||||||
|  | .coveragerc | ||||||
|  | .env | ||||||
|  | .idea/ | ||||||
|  | .pytest_cache/ | ||||||
|  | .tox/ | ||||||
|  | .eggs/ | ||||||
| *.egg | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
| @@ -13,4 +20,6 @@ env/ | |||||||
| .settings | .settings | ||||||
| .project | .project | ||||||
| .pydevproject | .pydevproject | ||||||
| tests/bugfix.py | htmlcov/ | ||||||
|  | venv | ||||||
|  | venv3 | ||||||
|   | |||||||
							
								
								
									
										17
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | pylint: | ||||||
|  |     disable: | ||||||
|  |         # We use this a lot (e.g. via document._meta) | ||||||
|  |         - protected-access | ||||||
|  |  | ||||||
|  |     options: | ||||||
|  |         additional-builtins: | ||||||
|  |             # add long as valid built-ins. | ||||||
|  |             - long | ||||||
|  |  | ||||||
|  | pyflakes: | ||||||
|  |     disable: | ||||||
|  |         # undefined variables are already covered by pylint (and exclude long) | ||||||
|  |         - F821 | ||||||
|  |  | ||||||
|  | ignore-paths: | ||||||
|  |     - benchmark.py | ||||||
							
								
								
									
										12
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | fail_fast: false | ||||||
|  | repos: | ||||||
|  |     - repo: https://github.com/ambv/black | ||||||
|  |       rev: 19.10b0 | ||||||
|  |       hooks: | ||||||
|  |         - id: black | ||||||
|  |     - repo: https://gitlab.com/pycqa/flake8 | ||||||
|  |       rev: 3.8.0a2 | ||||||
|  |       hooks: | ||||||
|  |         - id: flake8 | ||||||
|  |           additional_dependencies: | ||||||
|  |             - flake8-import-order | ||||||
							
								
								
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | # .readthedocs.yml | ||||||
|  | # Read the Docs configuration file | ||||||
|  | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details | ||||||
|  |  | ||||||
|  | # Required | ||||||
|  | version: 2 | ||||||
|  |  | ||||||
|  | # Build documentation in the docs/ directory with Sphinx | ||||||
|  | sphinx: | ||||||
|  |   configuration: docs/conf.py | ||||||
|  |  | ||||||
|  | # Optionally set the version of Python and requirements required to build your docs | ||||||
|  | python: | ||||||
|  |   version: 3.7 | ||||||
|  |   install: | ||||||
|  |     - requirements: docs/requirements.txt | ||||||
|  |     # docs/conf.py is importing mongoengine | ||||||
|  |     # so mongoengine needs to be installed as well | ||||||
|  |     - method: setuptools | ||||||
|  |       path: . | ||||||
							
								
								
									
										107
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										107
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,107 @@ | |||||||
|  | # For full coverage, we'd have to test all supported Python, MongoDB, and | ||||||
|  | # PyMongo combinations. However, that would result in an overly long build | ||||||
|  | # with a very large number of jobs, hence we only test a subset of all the | ||||||
|  | # combinations. | ||||||
|  | # * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | ||||||
|  | # Other combinations are tested. See below for the details or check the travis jobs | ||||||
|  |  | ||||||
|  | # We should periodically check MongoDB Server versions supported by MongoDB | ||||||
|  | # Inc., add newly released versions to the test matrix, and remove versions | ||||||
|  | # which have reached their End of Life. See: | ||||||
|  | # 1. https://www.mongodb.com/support-policy. | ||||||
|  | # 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility | ||||||
|  | # | ||||||
|  | # Reminder: Update README.rst if you change MongoDB versions we test. | ||||||
|  |  | ||||||
|  | language: python | ||||||
|  | dist: xenial | ||||||
|  | python: | ||||||
|  | - 3.6 | ||||||
|  | - 3.7 | ||||||
|  | - 3.8 | ||||||
|  | - 3.9 | ||||||
|  | - pypy3 | ||||||
|  |  | ||||||
|  | env: | ||||||
|  |   global: | ||||||
|  |     - MONGODB_3_4=3.4.19 | ||||||
|  |     - MONGODB_3_6=3.6.13 | ||||||
|  |     - MONGODB_4_0=4.0.13 | ||||||
|  |  | ||||||
|  |     - PYMONGO_3_4=3.4 | ||||||
|  |     - PYMONGO_3_6=3.6 | ||||||
|  |     - PYMONGO_3_9=3.9 | ||||||
|  |     - PYMONGO_3_11=3.11 | ||||||
|  |  | ||||||
|  |     - MAIN_PYTHON_VERSION=3.7 | ||||||
|  |   matrix: | ||||||
|  |     - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11} | ||||||
|  |  | ||||||
|  | matrix: | ||||||
|  |   # Finish the build as soon as one job fails | ||||||
|  |   fast_finish: true | ||||||
|  |  | ||||||
|  |   include: | ||||||
|  |   - python: 3.7 | ||||||
|  |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} | ||||||
|  |   - python: 3.7 | ||||||
|  |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} | ||||||
|  |   - python: 3.7 | ||||||
|  |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11} | ||||||
|  |   - python: 3.8 | ||||||
|  |     env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11} | ||||||
|  |  | ||||||
|  | install: | ||||||
|  |   # Install Mongo | ||||||
|  |   - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|  |   - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|  |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | ||||||
|  |   # Install Python dependencies. | ||||||
|  |   - pip install --upgrade pip | ||||||
|  |   - pip install coveralls | ||||||
|  |   - pip install pre-commit | ||||||
|  |   - pip install tox | ||||||
|  |   # tox dryrun to setup the tox venv (we run a mock test). | ||||||
|  |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" | ||||||
|  |  | ||||||
|  | before_script: | ||||||
|  |   - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | ||||||
|  |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork | ||||||
|  |   # Run pre-commit hooks (black, flake8, etc) on entire codebase | ||||||
|  |   - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi | ||||||
|  |   - mongo --eval 'db.version();'    # Make sure mongo is awake | ||||||
|  |  | ||||||
|  | script: | ||||||
|  |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||||
|  |  | ||||||
|  | after_success: | ||||||
|  |   - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi | ||||||
|  |  | ||||||
|  | notifications: | ||||||
|  |   irc: irc.freenode.org#mongoengine | ||||||
|  |  | ||||||
|  | # Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z) | ||||||
|  | branches: | ||||||
|  |   only: | ||||||
|  |   - master | ||||||
|  |   - /^v.*$/ | ||||||
|  |  | ||||||
|  | # Whenever a new release is created via GitHub, publish it on PyPI. | ||||||
|  | deploy: | ||||||
|  |   provider: pypi | ||||||
|  |   user: the_drow | ||||||
|  |   password: | ||||||
|  |     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||||
|  |  | ||||||
|  |   # Create a source distribution and a pure python wheel for faster installs. | ||||||
|  |   distributions: "sdist bdist_wheel" | ||||||
|  |  | ||||||
|  |   # Only deploy on tagged commits (aka GitHub releases) and only for the parent | ||||||
|  |   # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. | ||||||
|  |   # We run Travis against many different Python, PyMongo, and MongoDB versions | ||||||
|  |   # and we don't want the deploy to occur multiple times). | ||||||
|  |   on: | ||||||
|  |     tags: true | ||||||
|  |     repo: MongoEngine/mongoengine | ||||||
|  |     condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4}) | ||||||
|  |     python: 3.7 | ||||||
							
								
								
									
										172
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										172
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -8,15 +8,14 @@ Florian Schlachter <flori@n-schlachter.de> | |||||||
| Steve Challis <steve@stevechallis.com> | Steve Challis <steve@stevechallis.com> | ||||||
| Wilson Júnior <wilsonpjunior@gmail.com> | Wilson Júnior <wilsonpjunior@gmail.com> | ||||||
| Dan Crosta https://github.com/dcrosta | Dan Crosta https://github.com/dcrosta | ||||||
|  | Laine Herron https://github.com/LaineHerron | ||||||
|  |  | ||||||
| CONTRIBUTORS | CONTRIBUTORS | ||||||
|  |  | ||||||
| Dervived from the git logs, inevitably incomplete but all of whom and others | Derived from the git logs, inevitably incomplete but all of whom and others | ||||||
| have submitted patches, reported bugs and generally helped make MongoEngine | have submitted patches, reported bugs and generally helped make MongoEngine | ||||||
| that much better: | that much better: | ||||||
|  |  | ||||||
|  * Harry Marr |  | ||||||
|  * Ross Lawley |  | ||||||
|  * blackbrrr |  * blackbrrr | ||||||
|  * Florian Schlachter |  * Florian Schlachter | ||||||
|  * Vincent Driessen |  * Vincent Driessen | ||||||
| @@ -24,7 +23,7 @@ that much better: | |||||||
|  * flosch |  * flosch | ||||||
|  * Deepak Thukral |  * Deepak Thukral | ||||||
|  * Colin Howe |  * Colin Howe | ||||||
|  * Wilson Júnior |  * Wilson Júnior (https://github.com/wpjunior) | ||||||
|  * Alistair Roche |  * Alistair Roche | ||||||
|  * Dan Crosta |  * Dan Crosta | ||||||
|  * Viktor Kerkez |  * Viktor Kerkez | ||||||
| @@ -76,7 +75,7 @@ that much better: | |||||||
|  * Adam Parrish |  * Adam Parrish | ||||||
|  * jpfarias |  * jpfarias | ||||||
|  * jonrscott |  * jonrscott | ||||||
|  * Alice Zoë Bevan-McGregor |  * Alice Zoë Bevan-McGregor (https://github.com/amcgregor/) | ||||||
|  * Stephen Young |  * Stephen Young | ||||||
|  * tkloc |  * tkloc | ||||||
|  * aid |  * aid | ||||||
| @@ -97,3 +96,166 @@ that much better: | |||||||
|  * Shalabh Aggarwal |  * Shalabh Aggarwal | ||||||
|  * Chris Williams |  * Chris Williams | ||||||
|  * Robert Kajic |  * Robert Kajic | ||||||
|  |  * Jacob Peddicord | ||||||
|  |  * Nils Hasenbanck | ||||||
|  |  * mostlystatic | ||||||
|  |  * Greg Banks | ||||||
|  |  * swashbuckler | ||||||
|  |  * Adam Reeve | ||||||
|  |  * Anthony Nemitz | ||||||
|  |  * deignacio | ||||||
|  |  * Shaun Duncan | ||||||
|  |  * Meir Kriheli | ||||||
|  |  * Andrey Fedoseev | ||||||
|  |  * aparajita | ||||||
|  |  * Tristan Escalada | ||||||
|  |  * Alexander Koshelev | ||||||
|  |  * Jaime Irurzun | ||||||
|  |  * Alexandre González | ||||||
|  |  * Thomas Steinacher | ||||||
|  |  * Tommi Komulainen | ||||||
|  |  * Peter Landry | ||||||
|  |  * biszkoptwielki | ||||||
|  |  * Anton Kolechkin | ||||||
|  |  * Sergey Nikitin | ||||||
|  |  * psychogenic | ||||||
|  |  * Stefan Wójcik (https://github.com/wojcikstefan) | ||||||
|  |  * dimonb | ||||||
|  |  * Garry Polley | ||||||
|  |  * James Slagle | ||||||
|  |  * Adrian Scott | ||||||
|  |  * Peter Teichman | ||||||
|  |  * Jakub Kot | ||||||
|  |  * Jorge Bastida | ||||||
|  |  * Aleksandr Sorokoumov | ||||||
|  |  * Yohan Graterol | ||||||
|  |  * bool-dev | ||||||
|  |  * Russ Weeks | ||||||
|  |  * Paul Swartz | ||||||
|  |  * Sundar Raman | ||||||
|  |  * Benoit Louy | ||||||
|  |  * Loic Raucy (https://github.com/lraucy) | ||||||
|  |  * hellysmile | ||||||
|  |  * Jaepil Jeong | ||||||
|  |  * Daniil Sharou | ||||||
|  |  * Pete Campton | ||||||
|  |  * Martyn Smith | ||||||
|  |  * Marcelo Anton | ||||||
|  |  * Aleksey Porfirov (https://github.com/lexqt) | ||||||
|  |  * Nicolas Trippar | ||||||
|  |  * Manuel Hermann | ||||||
|  |  * Gustavo Gawryszewski | ||||||
|  |  * Max Countryman | ||||||
|  |  * caitifbrito | ||||||
|  |  * lcya86 刘春洋 | ||||||
|  |  * Martin Alderete (https://github.com/malderete) | ||||||
|  |  * Nick Joyce | ||||||
|  |  * Jared Forsyth | ||||||
|  |  * Kenneth Falck | ||||||
|  |  * Lukasz Balcerzak | ||||||
|  |  * Nicolas Cortot | ||||||
|  |  * Alex (https://github.com/kelsta) | ||||||
|  |  * Jin Zhang | ||||||
|  |  * Daniel Axtens | ||||||
|  |  * Leo-Naeka | ||||||
|  |  * Ryan Witt (https://github.com/ryanwitt) | ||||||
|  |  * Jiequan (https://github.com/Jiequan) | ||||||
|  |  * hensom (https://github.com/hensom) | ||||||
|  |  * zhy0216 (https://github.com/zhy0216) | ||||||
|  |  * istinspring (https://github.com/istinspring) | ||||||
|  |  * Massimo Santini (https://github.com/mapio) | ||||||
|  |  * Nigel McNie (https://github.com/nigelmcnie) | ||||||
|  |  * ygbourhis (https://github.com/ygbourhis) | ||||||
|  |  * Bob Dickinson (https://github.com/BobDickinson) | ||||||
|  |  * Michael Bartnett (https://github.com/michaelbartnett) | ||||||
|  |  * Alon Horev (https://github.com/alonho) | ||||||
|  |  * Kelvin Hammond (https://github.com/kelvinhammond) | ||||||
|  |  * Jatin Chopra (https://github.com/jatin) | ||||||
|  |  * Paul Uithol (https://github.com/PaulUithol) | ||||||
|  |  * Thom Knowles (https://github.com/fleat) | ||||||
|  |  * Paul (https://github.com/squamous) | ||||||
|  |  * Olivier Cortès (https://github.com/Karmak23) | ||||||
|  |  * crazyzubr (https://github.com/crazyzubr) | ||||||
|  |  * FrankSomething (https://github.com/FrankSomething) | ||||||
|  |  * Alexandr Morozov (https://github.com/LK4D4) | ||||||
|  |  * mishudark (https://github.com/mishudark) | ||||||
|  |  * Joe Friedl (https://github.com/grampajoe) | ||||||
|  |  * Daniel Ward (https://github.com/danielward) | ||||||
|  |  * Aniket Deshpande (https://github.com/anicake) | ||||||
|  |  * rfkrocktk (https://github.com/rfkrocktk) | ||||||
|  |  * Gustavo Andrés Angulo (https://github.com/woakas) | ||||||
|  |  * Dmytro Popovych (https://github.com/drudim) | ||||||
|  |  * Tom (https://github.com/tomprimozic) | ||||||
|  |  * j0hnsmith (https://github.com/j0hnsmith) | ||||||
|  |  * Damien Churchill (https://github.com/damoxc) | ||||||
|  |  * Jonathan Simon Prates (https://github.com/jonathansp) | ||||||
|  |  * Thiago Papageorgiou (https://github.com/tmpapageorgiou) | ||||||
|  |  * Omer Katz (https://github.com/thedrow) | ||||||
|  |  * Falcon Dai (https://github.com/falcondai) | ||||||
|  |  * Polyrabbit (https://github.com/polyrabbit) | ||||||
|  |  * Sagiv Malihi (https://github.com/sagivmalihi) | ||||||
|  |  * Dmitry Konishchev (https://github.com/KonishchevDmitry) | ||||||
|  |  * Martyn Smith (https://github.com/martynsmith) | ||||||
|  |  * Andrei Zbikowski (https://github.com/b1naryth1ef) | ||||||
|  |  * Ronald van Rij (https://github.com/ronaldvanrij) | ||||||
|  |  * François Schmidts (https://github.com/jaesivsm) | ||||||
|  |  * Eric Plumb (https://github.com/professorplumb) | ||||||
|  |  * Damien Churchill (https://github.com/damoxc) | ||||||
|  |  * Aleksandr Sorokoumov (https://github.com/Gerrrr) | ||||||
|  |  * Clay McClure (https://github.com/claymation) | ||||||
|  |  * Bruno Rocha (https://github.com/rochacbruno) | ||||||
|  |  * Norberto Leite (https://github.com/nleite) | ||||||
|  |  * Bob Cribbs (https://github.com/bocribbz) | ||||||
|  |  * Jay Shirley (https://github.com/jshirley) | ||||||
|  |  * David Bordeynik (https://github.com/DavidBord) | ||||||
|  |  * Axel Haustant (https://github.com/noirbizarre) | ||||||
|  |  * David Czarnecki (https://github.com/czarneckid) | ||||||
|  |  * Vyacheslav Murashkin (https://github.com/a4tunado) | ||||||
|  |  * André Ericson https://github.com/aericson) | ||||||
|  |  * Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky) | ||||||
|  |  * Diego Berrocal (https://github.com/cestdiego) | ||||||
|  |  * Matthew Ellison (https://github.com/seglberg) | ||||||
|  |  * Jimmy Shen (https://github.com/jimmyshen) | ||||||
|  |  * J. Fernando Sánchez (https://github.com/balkian) | ||||||
|  |  * Michael Chase (https://github.com/rxsegrxup) | ||||||
|  |  * Eremeev Danil (https://github.com/elephanter) | ||||||
|  |  * Catstyle Lee (https://github.com/Catstyle) | ||||||
|  |  * Kiryl Yermakou (https://github.com/rma4ok) | ||||||
|  |  * Matthieu Rigal (https://github.com/MRigal) | ||||||
|  |  * Charanpal Dhanjal (https://github.com/charanpald) | ||||||
|  |  * Emmanuel Leblond (https://github.com/touilleMan) | ||||||
|  |  * Breeze.Kay (https://github.com/9nix00) | ||||||
|  |  * Vicki Donchenko (https://github.com/kivistein) | ||||||
|  |  * Emile Caron (https://github.com/emilecaron) | ||||||
|  |  * Amit Lichtenberg (https://github.com/amitlicht) | ||||||
|  |  * Gang Li (https://github.com/iici-gli) | ||||||
|  |  * Lars Butler (https://github.com/larsbutler) | ||||||
|  |  * George Macon (https://github.com/gmacon) | ||||||
|  |  * Ashley Whetter (https://github.com/AWhetter) | ||||||
|  |  * Paul-Armand Verhaegen (https://github.com/paularmand) | ||||||
|  |  * Steven Rossiter (https://github.com/BeardedSteve) | ||||||
|  |  * Luo Peng (https://github.com/RussellLuo) | ||||||
|  |  * Bryan Bennett (https://github.com/bbenne10) | ||||||
|  |  * Gilb's Gilb's (https://github.com/gilbsgilbs) | ||||||
|  |  * Joshua Nedrud (https://github.com/Neurostack) | ||||||
|  |  * Shu Shen (https://github.com/shushen) | ||||||
|  |  * xiaost7 (https://github.com/xiaost7) | ||||||
|  |  * Victor Varvaryuk | ||||||
|  |  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||||
|  |  * Dmitry Yantsen (https://github.com/mrTable) | ||||||
|  |  * Renjianxin (https://github.com/Davidrjx) | ||||||
|  |  * Erdenezul Batmunkh (https://github.com/erdenezul) | ||||||
|  |  * Andy Yankovsky (https://github.com/werat) | ||||||
|  |  * Bastien Gérard (https://github.com/bagerard) | ||||||
|  |  * Trevor Hall (https://github.com/tjhall13) | ||||||
|  |  * Gleb Voropaev (https://github.com/buggyspace) | ||||||
|  |  * Paulo Amaral (https://github.com/pauloAmaral) | ||||||
|  |  * Gaurav Dadhania (https://github.com/GVRV) | ||||||
|  |  * Yurii Andrieiev (https://github.com/yandrieiev) | ||||||
|  |  * Filip Kucharczyk (https://github.com/Pacu2) | ||||||
|  |  * Eric Timmons (https://github.com/daewok) | ||||||
|  |  * Matthew Simpson (https://github.com/mcsimps2) | ||||||
|  |  * Leonardo Domingues (https://github.com/leodmgs) | ||||||
|  |  * Agustin Barto (https://github.com/abarto) | ||||||
|  |  * Stankiewicz Mateusz (https://github.com/mas15) | ||||||
|  |  * Felix Schultheiß (https://github.com/felix-smashdocs) | ||||||
|   | |||||||
							
								
								
									
										105
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,105 @@ | |||||||
|  | Contributing to MongoEngine | ||||||
|  | =========================== | ||||||
|  |  | ||||||
|  | MongoEngine has a large `community | ||||||
|  | <https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and | ||||||
|  | contributions are always encouraged. Contributions can be as simple as | ||||||
|  | minor tweaks to the documentation. Please read these guidelines before | ||||||
|  | sending a pull request. | ||||||
|  |  | ||||||
|  | Bugfixes and New Features | ||||||
|  | ------------------------- | ||||||
|  |  | ||||||
|  | Before starting to write code, look for existing `tickets | ||||||
|  | <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one | ||||||
|  | <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific | ||||||
|  | issue or feature request. That way you avoid working on something | ||||||
|  | that might not be of interest or that has already been addressed. If in doubt | ||||||
|  | post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||||
|  |  | ||||||
|  | Supported Interpreters | ||||||
|  | ---------------------- | ||||||
|  |  | ||||||
|  | MongoEngine supports CPython 3.5 and newer as well as Pypy3. | ||||||
|  | Language features not supported by all interpreters can not be used. | ||||||
|  |  | ||||||
|  | Python3 codebase | ||||||
|  | ---------------------- | ||||||
|  |  | ||||||
|  | Since 0.20, the codebase is exclusively Python 3. | ||||||
|  |  | ||||||
|  | Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. | ||||||
|  | Travis runs the tests against the main Python 3.x versions. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Style Guide | ||||||
|  | ----------- | ||||||
|  |  | ||||||
|  | MongoEngine's codebase is formatted with `black <https://github.com/python/black>`_, other tools like | ||||||
|  | flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly. | ||||||
|  |  | ||||||
|  | To install all development tools, simply run the following commands: | ||||||
|  |  | ||||||
|  | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ python -m pip install -r requirements-dev.txt | ||||||
|  |  | ||||||
|  |  | ||||||
|  | You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks, | ||||||
|  | to automatically check and fix any formatting issue before creating a | ||||||
|  | git commit. | ||||||
|  |  | ||||||
|  | To enable ``pre-commit`` simply run: | ||||||
|  |  | ||||||
|  | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ pre-commit install | ||||||
|  |  | ||||||
|  | See the ``.pre-commit-config.yaml`` configuration file for more information | ||||||
|  | on how it works. | ||||||
|  |  | ||||||
|  | Testing | ||||||
|  | ------- | ||||||
|  |  | ||||||
|  | All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | ||||||
|  | and any pull requests are automatically tested. Any pull requests without | ||||||
|  | tests will take longer to be integrated and might be refused. | ||||||
|  |  | ||||||
|  | You may also submit a simple failing test as a pull request if you don't know | ||||||
|  | how to fix it, it will be easier for other people to work on it and it may get | ||||||
|  | fixed faster. | ||||||
|  |  | ||||||
|  | General Guidelines | ||||||
|  | ------------------ | ||||||
|  |  | ||||||
|  | - Avoid backward breaking changes if at all possible. | ||||||
|  | - If you *have* to introduce a breaking change, make it very clear in your | ||||||
|  |   pull request's description. Also, describe how users of this package | ||||||
|  |   should adapt to the breaking change in docs/upgrade.rst. | ||||||
|  | - Write inline documentation for new classes and methods. | ||||||
|  | - Write tests and make sure they pass (make sure you have a mongod | ||||||
|  |   running on the default port, then execute ``python setup.py test`` | ||||||
|  |   from the cmd line to run the test suite). | ||||||
|  | - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | ||||||
|  |   You can test various Python and PyMongo versions locally by executing | ||||||
|  |   ``tox``. For different MongoDB versions, you can rely on our automated | ||||||
|  |   Travis tests. | ||||||
|  | - Add enhancements or problematic bug fixes to docs/changelog.rst. | ||||||
|  | - Add yourself to AUTHORS :) | ||||||
|  |  | ||||||
|  | Documentation | ||||||
|  | ------------- | ||||||
|  |  | ||||||
|  | To contribute to the `API documentation | ||||||
|  | <http://docs.mongoengine.org/en/latest/apireference.html>`_ | ||||||
|  | just make your changes to the inline documentation of the appropriate | ||||||
|  | `source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file | ||||||
|  | <https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a | ||||||
|  | branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | ||||||
|  | You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | ||||||
|  | button. | ||||||
|  |  | ||||||
|  | If you want to test your documentation changes locally, you need to install | ||||||
|  | the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed, | ||||||
|  | go to the ``docs`` directory, run ``make html`` and inspect the updated docs | ||||||
|  | by running ``open _build/html/index.html``. | ||||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | |||||||
| Copyright (c) 2009-2010 Harry Marr | Copyright (c) 2009 See AUTHORS | ||||||
|  |  | ||||||
| Permission is hereby granted, free of charge, to any person | Permission is hereby granted, free of charge, to any person | ||||||
| obtaining a copy of this software and associated documentation | obtaining a copy of this software and associated documentation | ||||||
|   | |||||||
							
								
								
									
										126
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										126
									
								
								README.rst
									
									
									
									
									
								
							| @@ -2,37 +2,84 @@ | |||||||
| MongoEngine | MongoEngine | ||||||
| =========== | =========== | ||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
|  | :Repository: https://github.com/MongoEngine/mongoengine | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | :Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) | ||||||
|  |  | ||||||
|  | .. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master | ||||||
|  |   :target: https://travis-ci.org/MongoEngine/mongoengine | ||||||
|  |  | ||||||
|  | .. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master | ||||||
|  |   :target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master | ||||||
|  |  | ||||||
|  | .. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat | ||||||
|  |   :target: https://landscape.io/github/MongoEngine/mongoengine/master | ||||||
|  |   :alt: Code Health | ||||||
|  |  | ||||||
|  | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg | ||||||
|  |   :target: https://github.com/ambv/black | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||||
| Documentation available at http://mongoengine-odm.rtfd.org - there is currently | Documentation is available at https://mongoengine-odm.readthedocs.io - there | ||||||
| a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, | ||||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference | a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and | ||||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | ||||||
|  |  | ||||||
|  | Supported MongoDB Versions | ||||||
|  | ========================== | ||||||
|  | MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions | ||||||
|  | should be supported as well, but aren't actively tested at the moment. Make | ||||||
|  | sure to open an issue or submit a pull request if you experience any problems | ||||||
|  | with MongoDB version > 4.0. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||||
| you can use ``easy_install -U mongoengine``. Otherwise, you can download the | `pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``. | ||||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| setup.py install``. | and thus you can use ``easy_install -U mongoengine``. Another option is | ||||||
|  | `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | ||||||
|  | to both create the virtual environment and install the package. Otherwise, you can | ||||||
|  | download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||||
|  | run ``python setup.py install``. | ||||||
|  |  | ||||||
|  | The support for Python2 was dropped with MongoEngine 0.20.0 | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| - pymongo 1.1+ | All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_. | ||||||
| - sphinx (optional - for documentation generation) | At the very least, you'll need these two packages to use MongoEngine: | ||||||
|  |  | ||||||
|  | - pymongo>=3.4 | ||||||
|  |  | ||||||
|  | If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||||
|  |  | ||||||
|  | - dateutil>=2.1.0 | ||||||
|  |  | ||||||
|  | If you need to use an ``ImageField`` or ``ImageGridFsProxy``: | ||||||
|  |  | ||||||
|  | - Pillow>=2.0.0 | ||||||
|  |  | ||||||
|  | If you need to use signals: | ||||||
|  |  | ||||||
|  | - blinker>=1.3 | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| ======== | ======== | ||||||
| Some simple examples of what MongoEngine code looks like:: | Some simple examples of what MongoEngine code looks like: | ||||||
|  |  | ||||||
|  | .. code :: python | ||||||
|  |  | ||||||
|  |     from mongoengine import * | ||||||
|  |     connect('mydb') | ||||||
|  |  | ||||||
|     class BlogPost(Document): |     class BlogPost(Document): | ||||||
|         title = StringField(required=True, max_length=200) |         title = StringField(required=True, max_length=200) | ||||||
|         posted = DateTimeField(default=datetime.datetime.now) |         posted = DateTimeField(default=datetime.datetime.utcnow) | ||||||
|         tags = ListField(StringField(max_length=50)) |         tags = ListField(StringField(max_length=50)) | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|     class TextPost(BlogPost): |     class TextPost(BlogPost): | ||||||
|         content = StringField(required=True) |         content = StringField(required=True) | ||||||
| @@ -52,36 +99,52 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
|  |  | ||||||
|     # Iterate over all posts using the BlogPost superclass |     # Iterate over all posts using the BlogPost superclass | ||||||
|     >>> for post in BlogPost.objects: |     >>> for post in BlogPost.objects: | ||||||
|     ...     print '===', post.title, '===' |     ...     print('===', post.title, '===') | ||||||
|     ...     if isinstance(post, TextPost): |     ...     if isinstance(post, TextPost): | ||||||
|     ...         print post.content |     ...         print(post.content) | ||||||
|     ...     elif isinstance(post, LinkPost): |     ...     elif isinstance(post, LinkPost): | ||||||
|     ...         print 'Link:', post.url |     ...         print('Link:', post.url) | ||||||
|     ...     print |  | ||||||
|     ... |     ... | ||||||
|     === Using MongoEngine === |  | ||||||
|     See the tutorial |  | ||||||
|  |  | ||||||
|     === MongoEngine Docs === |     # Count all blog posts and its subtypes | ||||||
|     Link: hmarr.com/mongoengine |     >>> BlogPost.objects.count() | ||||||
|  |  | ||||||
|     >>> len(BlogPost.objects) |  | ||||||
|     2 |     2 | ||||||
|     >>> len(HtmlPost.objects) |     >>> TextPost.objects.count() | ||||||
|     1 |     1 | ||||||
|     >>> len(LinkPost.objects) |     >>> LinkPost.objects.count() | ||||||
|     1 |     1 | ||||||
|  |  | ||||||
|     # Find tagged posts |     # Count tagged posts | ||||||
|     >>> len(BlogPost.objects(tags='mongoengine')) |     >>> BlogPost.objects(tags='mongoengine').count() | ||||||
|     2 |     2 | ||||||
|     >>> len(BlogPost.objects(tags='mongodb')) |     >>> BlogPost.objects(tags='mongodb').count() | ||||||
|     1 |     1 | ||||||
|  |  | ||||||
| Tests | Tests | ||||||
| ===== | ===== | ||||||
| To run the test suite, ensure you are running a local instance of MongoDB on | To run the test suite, ensure you are running a local instance of MongoDB on | ||||||
| the standard port, and run ``python setup.py test``. | the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` | ||||||
|  | or simply ``pytest``. | ||||||
|  |  | ||||||
|  | To run the test suite on every supported Python and PyMongo version, you can | ||||||
|  | use ``tox``. You'll need to make sure you have each supported Python version | ||||||
|  | installed in your environment and then: | ||||||
|  |  | ||||||
|  | .. code-block:: shell | ||||||
|  |  | ||||||
|  |     # Install tox | ||||||
|  |     $ python -m pip install tox | ||||||
|  |     # Run the test suites | ||||||
|  |     $ tox | ||||||
|  |  | ||||||
|  | If you wish to run a subset of tests, use the pytest convention: | ||||||
|  |  | ||||||
|  | .. code-block:: shell | ||||||
|  |  | ||||||
|  |     # Run all the tests in a particular test file | ||||||
|  |     $ pytest tests/fields/test_fields.py | ||||||
|  |     # Run only particular test class in that file | ||||||
|  |     $ pytest tests/fields/test_fields.py::TestField | ||||||
|  |  | ||||||
| Community | Community | ||||||
| ========= | ========= | ||||||
| @@ -89,10 +152,7 @@ Community | |||||||
|   <http://groups.google.com/group/mongoengine-users>`_ |   <http://groups.google.com/group/mongoengine-users>`_ | ||||||
| - `MongoEngine Developers mailing list | - `MongoEngine Developers mailing list | ||||||
|   <http://groups.google.com/group/mongoengine-dev>`_ |   <http://groups.google.com/group/mongoengine-dev>`_ | ||||||
| - `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_ |  | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||||
| contribute to the project, fork it on GitHub and send a pull request, all |  | ||||||
| contributions and suggestions are welcome! |  | ||||||
|   | |||||||
							
								
								
									
										182
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										182
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,182 +0,0 @@ | |||||||
| #!/usr/bin/env python |  | ||||||
|  |  | ||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def cprofile_main(): |  | ||||||
|     from pymongo import Connection |  | ||||||
|     connection = Connection() |  | ||||||
|     connection.drop_database('timeit_test') |  | ||||||
|     connection.disconnect() |  | ||||||
|  |  | ||||||
|     from mongoengine import Document, DictField, connect |  | ||||||
|     connect("timeit_test") |  | ||||||
|  |  | ||||||
|     class Noddy(Document): |  | ||||||
|         fields = DictField() |  | ||||||
|  |  | ||||||
|     for i in xrange(1): |  | ||||||
|         noddy = Noddy() |  | ||||||
|         for j in range(20): |  | ||||||
|             noddy.fields["key" + str(j)] = "value " + str(j) |  | ||||||
|         noddy.save() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     """ |  | ||||||
|     0.4 Performance Figures ... |  | ||||||
|  |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.1141769886 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     2.37724113464 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     1.92479610443 |  | ||||||
|  |  | ||||||
|     0.5.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.10552310944 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     16.5169169903 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     14.9446101189 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     14.912801981 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     14.9617750645 |  | ||||||
|  |  | ||||||
|     Performance |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.10072994232 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     5.27341103554 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     4.49365401268 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     4.43459296227 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     4.40114378929 |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in xrange(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.insert(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - Pymongo""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| connection.disconnect() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect("timeit_test") |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(safe=False, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(safe=False, validate=False, cascade=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, safe=False, validate=False, cascade=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, force=True""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
							
								
								
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,142 @@ | |||||||
|  | from timeit import repeat | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import ( | ||||||
|  |     BooleanField, | ||||||
|  |     Document, | ||||||
|  |     EmailField, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     IntField, | ||||||
|  |     ListField, | ||||||
|  |     StringField, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | mongoengine.connect(db="mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def timeit(f, n=10000): | ||||||
|  |     return min(repeat(f, repeat=3, number=n)) / float(n) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_basic(): | ||||||
|  |     class Book(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         pages = IntField() | ||||||
|  |         tags = ListField(StringField()) | ||||||
|  |         is_published = BooleanField() | ||||||
|  |         author_email = EmailField() | ||||||
|  |  | ||||||
|  |     Book.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_book(): | ||||||
|  |         return Book( | ||||||
|  |             name="Always be closing", | ||||||
|  |             pages=100, | ||||||
|  |             tags=["self-help", "sales"], | ||||||
|  |             is_published=True, | ||||||
|  |             author_email="alec@example.com", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     b = init_book() | ||||||
|  |     print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Doc setattr: %.3fus" | ||||||
|  |         % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     def save_book(): | ||||||
|  |         b._mark_as_changed("name") | ||||||
|  |         b._mark_as_changed("tags") | ||||||
|  |         b.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     son = b.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_book(): | ||||||
|  |         b = init_book() | ||||||
|  |         b.save() | ||||||
|  |         b.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_book, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_big_doc(): | ||||||
|  |     class Contact(EmbeddedDocument): | ||||||
|  |         name = StringField() | ||||||
|  |         title = StringField() | ||||||
|  |         address = StringField() | ||||||
|  |  | ||||||
|  |     class Company(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         contacts = ListField(EmbeddedDocumentField(Contact)) | ||||||
|  |  | ||||||
|  |     Company.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_company(): | ||||||
|  |         return Company( | ||||||
|  |             name="MongoDB, Inc.", | ||||||
|  |             contacts=[ | ||||||
|  |                 Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) | ||||||
|  |                 for x in range(1000) | ||||||
|  |             ], | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     company = init_company() | ||||||
|  |     print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     company.save() | ||||||
|  |  | ||||||
|  |     def save_company(): | ||||||
|  |         company._mark_as_changed("name") | ||||||
|  |         company._mark_as_changed("contacts") | ||||||
|  |         company.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     son = company.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fms" | ||||||
|  |         % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fms" | ||||||
|  |         % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_company(): | ||||||
|  |         c = init_company() | ||||||
|  |         c.save() | ||||||
|  |         c.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_company, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     test_basic() | ||||||
|  |     print("-" * 100) | ||||||
|  |     test_big_doc() | ||||||
							
								
								
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | |||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("PyMongo: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient, WriteConcern | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | connection.close() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     fields = {} | ||||||
|  |     for j in range(20): | ||||||
|  |         fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.fields = fields | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
							
								
								
									
										229
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										229
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							| @@ -1,229 +0,0 @@ | |||||||
| /** |  | ||||||
|  * Sphinx stylesheet -- default theme |  | ||||||
|  * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |  | ||||||
|  */ |  | ||||||
|   |  | ||||||
| @import url("basic.css"); |  | ||||||
|   |  | ||||||
| /* -- page layout ----------------------------------------------------------- */ |  | ||||||
|   |  | ||||||
| body { |  | ||||||
|     font-family: Arial, sans-serif; |  | ||||||
|     font-size: 100%; |  | ||||||
|     background-color: #111; |  | ||||||
|     color: #555; |  | ||||||
|     margin: 0; |  | ||||||
|     padding: 0; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.documentwrapper { |  | ||||||
|     float: left; |  | ||||||
|     width: 100%; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.bodywrapper { |  | ||||||
|     margin: 0 0 0 230px; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| hr{ |  | ||||||
|     border: 1px solid #B1B4B6; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.document { |  | ||||||
|     background-color: #eee; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.body { |  | ||||||
|     background-color: #ffffff; |  | ||||||
|     color: #3E4349; |  | ||||||
|     padding: 0 30px 30px 30px; |  | ||||||
|     font-size: 0.8em; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.footer { |  | ||||||
|     color: #555; |  | ||||||
|     width: 100%; |  | ||||||
|     padding: 13px 0; |  | ||||||
|     text-align: center; |  | ||||||
|     font-size: 75%; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.footer a { |  | ||||||
|     color: #444; |  | ||||||
|     text-decoration: underline; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.related { |  | ||||||
|     background-color: #6BA81E; |  | ||||||
|     line-height: 32px; |  | ||||||
|     color: #fff; |  | ||||||
|     text-shadow: 0px 1px 0 #444; |  | ||||||
|     font-size: 0.80em; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.related a { |  | ||||||
|     color: #E2F3CC; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar { |  | ||||||
|     font-size: 0.75em; |  | ||||||
|     line-height: 1.5em; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.sphinxsidebarwrapper{ |  | ||||||
|     padding: 20px 0; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar h3, |  | ||||||
| div.sphinxsidebar h4 { |  | ||||||
|     font-family: Arial, sans-serif; |  | ||||||
|     color: #222; |  | ||||||
|     font-size: 1.2em; |  | ||||||
|     font-weight: normal; |  | ||||||
|     margin: 0; |  | ||||||
|     padding: 5px 10px; |  | ||||||
|     background-color: #ddd; |  | ||||||
|     text-shadow: 1px 1px 0 white |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.sphinxsidebar h4{ |  | ||||||
|     font-size: 1.1em; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar h3 a { |  | ||||||
|     color: #444; |  | ||||||
| } |  | ||||||
|   |  | ||||||
|   |  | ||||||
| div.sphinxsidebar p { |  | ||||||
|     color: #888; |  | ||||||
|     padding: 5px 20px; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar p.topless { |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar ul { |  | ||||||
|     margin: 10px 20px; |  | ||||||
|     padding: 0; |  | ||||||
|     color: #000; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar a { |  | ||||||
|     color: #444; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.sphinxsidebar input { |  | ||||||
|     border: 1px solid #ccc; |  | ||||||
|     font-family: sans-serif; |  | ||||||
|     font-size: 1em; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.sphinxsidebar input[type=text]{ |  | ||||||
|     margin-left: 20px; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| /* -- body styles ----------------------------------------------------------- */ |  | ||||||
|   |  | ||||||
| a { |  | ||||||
|     color: #005B81; |  | ||||||
|     text-decoration: none; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| a:hover { |  | ||||||
|     color: #E32E00; |  | ||||||
|     text-decoration: underline; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.body h1, |  | ||||||
| div.body h2, |  | ||||||
| div.body h3, |  | ||||||
| div.body h4, |  | ||||||
| div.body h5, |  | ||||||
| div.body h6 { |  | ||||||
|     font-family: Arial, sans-serif; |  | ||||||
|     background-color: #BED4EB; |  | ||||||
|     font-weight: normal; |  | ||||||
|     color: #212224; |  | ||||||
|     margin: 30px 0px 10px 0px; |  | ||||||
|     padding: 5px 0 5px 10px; |  | ||||||
|     text-shadow: 0px 1px 0 white |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } |  | ||||||
| div.body h2 { font-size: 150%; background-color: #C8D5E3; } |  | ||||||
| div.body h3 { font-size: 120%; background-color: #D8DEE3; } |  | ||||||
| div.body h4 { font-size: 110%; background-color: #D8DEE3; } |  | ||||||
| div.body h5 { font-size: 100%; background-color: #D8DEE3; } |  | ||||||
| div.body h6 { font-size: 100%; background-color: #D8DEE3; } |  | ||||||
|   |  | ||||||
| a.headerlink { |  | ||||||
|     color: #c60f0f; |  | ||||||
|     font-size: 0.8em; |  | ||||||
|     padding: 0 4px 0 4px; |  | ||||||
|     text-decoration: none; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| a.headerlink:hover { |  | ||||||
|     background-color: #c60f0f; |  | ||||||
|     color: white; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.body p, div.body dd, div.body li { |  | ||||||
|     line-height: 1.5em; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.admonition p.admonition-title + p { |  | ||||||
|     display: inline; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.highlight{ |  | ||||||
|     background-color: white; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| div.note { |  | ||||||
|     background-color: #eee; |  | ||||||
|     border: 1px solid #ccc; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.seealso { |  | ||||||
|     background-color: #ffc; |  | ||||||
|     border: 1px solid #ff6; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.topic { |  | ||||||
|     background-color: #eee; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| div.warning { |  | ||||||
|     background-color: #ffe4e4; |  | ||||||
|     border: 1px solid #f66; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| p.admonition-title { |  | ||||||
|     display: inline; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| p.admonition-title:after { |  | ||||||
|     content: ":"; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| pre { |  | ||||||
|     padding: 10px; |  | ||||||
|     background-color: White; |  | ||||||
|     color: #222; |  | ||||||
|     line-height: 1.2em; |  | ||||||
|     border: 1px solid #C6C9CB; |  | ||||||
|     font-size: 1.2em; |  | ||||||
|     margin: 1.5em 0 1.5em 0; |  | ||||||
|     -webkit-box-shadow: 1px 1px 1px #d8d8d8; |  | ||||||
|     -moz-box-shadow: 1px 1px 1px #d8d8d8; |  | ||||||
| } |  | ||||||
|   |  | ||||||
| tt { |  | ||||||
|     background-color: #ecf0f3; |  | ||||||
|     color: #222; |  | ||||||
|     padding: 1px 2px; |  | ||||||
|     font-size: 1.2em; |  | ||||||
|     font-family: monospace; |  | ||||||
| } |  | ||||||
							
								
								
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							| @@ -1,54 +0,0 @@ | |||||||
| .c { color: #999988; font-style: italic } /* Comment */ |  | ||||||
| .k { font-weight: bold } /* Keyword */ |  | ||||||
| .o { font-weight: bold } /* Operator */ |  | ||||||
| .cm { color: #999988; font-style: italic } /* Comment.Multiline */ |  | ||||||
| .cp { color: #999999; font-weight: bold } /* Comment.preproc */ |  | ||||||
| .c1 { color: #999988; font-style: italic } /* Comment.Single */ |  | ||||||
| .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ |  | ||||||
| .ge { font-style: italic } /* Generic.Emph */ |  | ||||||
| .gr { color: #aa0000 } /* Generic.Error */ |  | ||||||
| .gh { color: #999999 } /* Generic.Heading */ |  | ||||||
| .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ |  | ||||||
| .go { color: #111 } /* Generic.Output */ |  | ||||||
| .gp { color: #555555 } /* Generic.Prompt */ |  | ||||||
| .gs { font-weight: bold } /* Generic.Strong */ |  | ||||||
| .gu { color: #aaaaaa } /* Generic.Subheading */ |  | ||||||
| .gt { color: #aa0000 } /* Generic.Traceback */ |  | ||||||
| .kc { font-weight: bold } /* Keyword.Constant */ |  | ||||||
| .kd { font-weight: bold } /* Keyword.Declaration */ |  | ||||||
| .kp { font-weight: bold } /* Keyword.Pseudo */ |  | ||||||
| .kr { font-weight: bold } /* Keyword.Reserved */ |  | ||||||
| .kt { color: #445588; font-weight: bold } /* Keyword.Type */ |  | ||||||
| .m { color: #009999 } /* Literal.Number */ |  | ||||||
| .s { color: #bb8844 } /* Literal.String */ |  | ||||||
| .na { color: #008080 } /* Name.Attribute */ |  | ||||||
| .nb { color: #999999 } /* Name.Builtin */ |  | ||||||
| .nc { color: #445588; font-weight: bold } /* Name.Class */ |  | ||||||
| .no { color: #ff99ff } /* Name.Constant */ |  | ||||||
| .ni { color: #800080 } /* Name.Entity */ |  | ||||||
| .ne { color: #990000; font-weight: bold } /* Name.Exception */ |  | ||||||
| .nf { color: #990000; font-weight: bold } /* Name.Function */ |  | ||||||
| .nn { color: #555555 } /* Name.Namespace */ |  | ||||||
| .nt { color: #000080 } /* Name.Tag */ |  | ||||||
| .nv { color: purple } /* Name.Variable */ |  | ||||||
| .ow { font-weight: bold } /* Operator.Word */ |  | ||||||
| .mf { color: #009999 } /* Literal.Number.Float */ |  | ||||||
| .mh { color: #009999 } /* Literal.Number.Hex */ |  | ||||||
| .mi { color: #009999 } /* Literal.Number.Integer */ |  | ||||||
| .mo { color: #009999 } /* Literal.Number.Oct */ |  | ||||||
| .sb { color: #bb8844 } /* Literal.String.Backtick */ |  | ||||||
| .sc { color: #bb8844 } /* Literal.String.Char */ |  | ||||||
| .sd { color: #bb8844 } /* Literal.String.Doc */ |  | ||||||
| .s2 { color: #bb8844 } /* Literal.String.Double */ |  | ||||||
| .se { color: #bb8844 } /* Literal.String.Escape */ |  | ||||||
| .sh { color: #bb8844 } /* Literal.String.Heredoc */ |  | ||||||
| .si { color: #bb8844 } /* Literal.String.Interpol */ |  | ||||||
| .sx { color: #bb8844 } /* Literal.String.Other */ |  | ||||||
| .sr { color: #808000 } /* Literal.String.Regex */ |  | ||||||
| .s1 { color: #bb8844 } /* Literal.String.Single */ |  | ||||||
| .ss { color: #bb8844 } /* Literal.String.Symbol */ |  | ||||||
| .bp { color: #999999 } /* Name.Builtin.Pseudo */ |  | ||||||
| .vc { color: #ff99ff } /* Name.Variable.Class */ |  | ||||||
| .vg { color: #ff99ff } /* Name.Variable.Global */ |  | ||||||
| .vi { color: #ff99ff } /* Name.Variable.Instance */ |  | ||||||
| .il { color: #009999 } /* Literal.Number.Integer.Long */ |  | ||||||
							
								
								
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +0,0 @@ | |||||||
| [theme] |  | ||||||
| inherit = basic |  | ||||||
| stylesheet = nature.css |  | ||||||
| pygments_style = tango |  | ||||||
| @@ -13,6 +13,7 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.Document | .. autoclass:: mongoengine.Document | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
|    .. attribute:: objects |    .. attribute:: objects | ||||||
|  |  | ||||||
| @@ -21,48 +22,113 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicDocument | .. autoclass:: mongoengine.DynamicDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|    :members: |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ValidationError | ||||||
|  |   :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.FieldDoesNotExist | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Context Managers | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.context_managers.switch_db | ||||||
|  | .. autoclass:: mongoengine.context_managers.switch_collection | ||||||
|  | .. autoclass:: mongoengine.context_managers.no_dereference | ||||||
|  | .. autoclass:: mongoengine.context_managers.query_counter | ||||||
|  |  | ||||||
| Querying | Querying | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.queryset.QuerySet | .. automodule:: mongoengine.queryset | ||||||
|  |     :synopsis: Queryset level operations | ||||||
|  |  | ||||||
|  |     .. autoclass:: mongoengine.queryset.QuerySet | ||||||
|  |       :members: | ||||||
|  |       :inherited-members: | ||||||
|  |  | ||||||
|  |       .. automethod:: QuerySet.__call__ | ||||||
|  |  | ||||||
|  |     .. autoclass:: mongoengine.queryset.QuerySetNoCache | ||||||
|       :members: |       :members: | ||||||
|  |  | ||||||
|    .. automethod:: mongoengine.queryset.QuerySet.__call__ |        .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ | ||||||
|  |  | ||||||
| .. autofunction:: mongoengine.queryset.queryset_manager |     .. autofunction:: mongoengine.queryset.queryset_manager | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.StringField | .. autoclass:: mongoengine.base.fields.BaseField | ||||||
| .. autoclass:: mongoengine.URLField | .. autoclass:: mongoengine.fields.StringField | ||||||
| .. autoclass:: mongoengine.EmailField | .. autoclass:: mongoengine.fields.URLField | ||||||
| .. autoclass:: mongoengine.IntField | .. autoclass:: mongoengine.fields.EmailField | ||||||
| .. autoclass:: mongoengine.FloatField | .. autoclass:: mongoengine.fields.IntField | ||||||
| .. autoclass:: mongoengine.DecimalField | .. autoclass:: mongoengine.fields.LongField | ||||||
| .. autoclass:: mongoengine.DateTimeField | .. autoclass:: mongoengine.fields.FloatField | ||||||
| .. autoclass:: mongoengine.ComplexDateTimeField | .. autoclass:: mongoengine.fields.DecimalField | ||||||
| .. autoclass:: mongoengine.ListField | .. autoclass:: mongoengine.fields.BooleanField | ||||||
| .. autoclass:: mongoengine.SortedListField | .. autoclass:: mongoengine.fields.DateTimeField | ||||||
| .. autoclass:: mongoengine.DictField | .. autoclass:: mongoengine.fields.ComplexDateTimeField | ||||||
| .. autoclass:: mongoengine.MapField | .. autoclass:: mongoengine.fields.EmbeddedDocumentField | ||||||
| .. autoclass:: mongoengine.ObjectIdField | .. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField | ||||||
| .. autoclass:: mongoengine.ReferenceField | .. autoclass:: mongoengine.fields.DynamicField | ||||||
| .. autoclass:: mongoengine.GenericReferenceField | .. autoclass:: mongoengine.fields.ListField | ||||||
| .. autoclass:: mongoengine.EmbeddedDocumentField | .. autoclass:: mongoengine.fields.EmbeddedDocumentListField | ||||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField | .. autoclass:: mongoengine.fields.SortedListField | ||||||
| .. autoclass:: mongoengine.BooleanField | .. autoclass:: mongoengine.fields.DictField | ||||||
| .. autoclass:: mongoengine.FileField | .. autoclass:: mongoengine.fields.MapField | ||||||
| .. autoclass:: mongoengine.BinaryField | .. autoclass:: mongoengine.fields.ReferenceField | ||||||
| .. autoclass:: mongoengine.GeoPointField | .. autoclass:: mongoengine.fields.LazyReferenceField | ||||||
| .. autoclass:: mongoengine.SequenceField | .. autoclass:: mongoengine.fields.GenericReferenceField | ||||||
|  | .. autoclass:: mongoengine.fields.GenericLazyReferenceField | ||||||
|  | .. autoclass:: mongoengine.fields.CachedReferenceField | ||||||
|  | .. autoclass:: mongoengine.fields.BinaryField | ||||||
|  | .. autoclass:: mongoengine.fields.FileField | ||||||
|  | .. autoclass:: mongoengine.fields.ImageField | ||||||
|  | .. autoclass:: mongoengine.fields.SequenceField | ||||||
|  | .. autoclass:: mongoengine.fields.ObjectIdField | ||||||
|  | .. autoclass:: mongoengine.fields.UUIDField | ||||||
|  | .. autoclass:: mongoengine.fields.GeoPointField | ||||||
|  | .. autoclass:: mongoengine.fields.PointField | ||||||
|  | .. autoclass:: mongoengine.fields.LineStringField | ||||||
|  | .. autoclass:: mongoengine.fields.PolygonField | ||||||
|  | .. autoclass:: mongoengine.fields.MultiPointField | ||||||
|  | .. autoclass:: mongoengine.fields.MultiLineStringField | ||||||
|  | .. autoclass:: mongoengine.fields.MultiPolygonField | ||||||
|  | .. autoclass:: mongoengine.fields.GridFSError | ||||||
|  | .. autoclass:: mongoengine.fields.GridFSProxy | ||||||
|  | .. autoclass:: mongoengine.fields.ImageGridFsProxy | ||||||
|  | .. autoclass:: mongoengine.fields.ImproperlyConfigured | ||||||
|  |  | ||||||
|  | Embedded Document Querying | ||||||
|  | ========================== | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.9 | ||||||
|  |  | ||||||
|  | Additional queries for Embedded Documents are available when using the | ||||||
|  | :class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded | ||||||
|  | documents. | ||||||
|  |  | ||||||
|  | A list of embedded documents is returned as a special list with the | ||||||
|  | following methods: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList | ||||||
|  |     :members: | ||||||
|  |  | ||||||
|  | Misc | ||||||
|  | ==== | ||||||
|  |  | ||||||
|  | .. autofunction:: mongoengine.common._import_class | ||||||
|   | |||||||
| @@ -1,10 +1,815 @@ | |||||||
|  |  | ||||||
| ========= | ========= | ||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
| Changes in 0.6.x | Development | ||||||
|  | =========== | ||||||
|  | - (Fill this out as you fix issues and develop your features). | ||||||
|  | - Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412 | ||||||
|  | - When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count | ||||||
|  |     and Cursor.count that got deprecated in pymongo >= 3.7. | ||||||
|  |     This should have a negative impact on performance of count see Issue #2219 | ||||||
|  | - Fix a bug that made the queryset drop the read_preference after clone(). | ||||||
|  | - Remove Py3.5 from CI as it reached EOL and add Python 3.9 | ||||||
|  | - Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 | ||||||
|  | - Bug fix in ListField when updating the first item, it was saving the whole list, instead of | ||||||
|  |     just replacing the first item (as it's usually done) #2392 | ||||||
|  | - Add EnumField: ``mongoengine.fields.EnumField`` | ||||||
|  | - Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields | ||||||
|  |  | ||||||
|  | Changes in 0.20.0 | ||||||
|  | ================= | ||||||
|  | - ATTENTION: Drop support for Python2 | ||||||
|  | - Add Mongo 4.0 to Travis | ||||||
|  | - Fix error when setting a string as a ComplexDateTimeField #2253 | ||||||
|  | - Bump development Status classifier to Production/Stable #2232 | ||||||
|  | - Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 | ||||||
|  | - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 | ||||||
|  | - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 | ||||||
|  | - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 | ||||||
|  | - Remove methods that were deprecated years ago: | ||||||
|  |     - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field | ||||||
|  |     - Queryset.slave_okay() was deprecated since pymongo3 | ||||||
|  |     - dropDups was dropped with MongoDB3 | ||||||
|  |     - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` | ||||||
|  | - Added pre-commit for development/CI #2212 | ||||||
|  | - Renamed requirements-lint.txt to requirements-dev.txt #2212 | ||||||
|  | - Support for setting ReadConcern #2255 | ||||||
|  |  | ||||||
|  | Changes in 0.19.1 | ||||||
|  | ================= | ||||||
|  | - Tests require Pillow < 7.0.0 as it dropped Python2 support | ||||||
|  | - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | ||||||
|  |     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 | ||||||
|  |  | ||||||
|  | Changes in 0.19.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. | ||||||
|  |     - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. | ||||||
|  | - BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 | ||||||
|  | - BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 | ||||||
|  |     - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. | ||||||
|  | - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 | ||||||
|  |     - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. | ||||||
|  | - BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 | ||||||
|  | - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 | ||||||
|  |     - Added ability to check if Q or QNode are empty by parsing them to bool. | ||||||
|  |     - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. | ||||||
|  | - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 | ||||||
|  | - Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 | ||||||
|  | - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 | ||||||
|  | - Improve error message related to InvalidDocumentError #2180 | ||||||
|  | - Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 | ||||||
|  | - Added ability to compare Q and Q operations #2204 | ||||||
|  | - Added ability to use a db alias on query_counter #2194 | ||||||
|  | - Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 | ||||||
|  | - Fix updates of a list field by negative index #2094 | ||||||
|  | - Switch from nosetest to pytest as test runner #2114 | ||||||
|  | - The codebase is now formatted using ``black``. #2109 | ||||||
|  | - Documentation improvements: | ||||||
|  |     - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | Changes in 0.18.2 | ||||||
|  | ================= | ||||||
|  | - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 | ||||||
|  | - Various code clarity and documentation improvements. | ||||||
|  |  | ||||||
|  | Changes in 0.18.1 | ||||||
|  | ================= | ||||||
|  | - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 | ||||||
|  | - Add Python 3.7 to Travis CI. #2058 | ||||||
|  |  | ||||||
|  | Changes in 0.18.0 | ||||||
|  | ================= | ||||||
|  | - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | ||||||
|  | - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 | ||||||
|  | - Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 | ||||||
|  | - Connection/disconnection improvements: | ||||||
|  |     - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. | ||||||
|  |     - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 | ||||||
|  |     - Improve documentation of ``connect``/``disconnect``. | ||||||
|  |     - Fix issue when using multiple connections to the same mongo with different credentials. #2047 | ||||||
|  |     - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 | ||||||
|  | - Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 | ||||||
|  | - Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 | ||||||
|  | - Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 | ||||||
|  | - Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 | ||||||
|  | - Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 | ||||||
|  | - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 | ||||||
|  | - BREAKING CHANGES (associated with connection/disconnection fixes): | ||||||
|  |     - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). | ||||||
|  |     - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. | ||||||
|  |     - ``disconnect`` now clears the cached attribute ``Document._collection``. | ||||||
|  | - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 | ||||||
|  |  | ||||||
|  | Changes in 0.17.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 | ||||||
|  | - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 | ||||||
|  | - DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 | ||||||
|  | - Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. | ||||||
|  | - Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 | ||||||
|  | - Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 | ||||||
|  | - Fix test suite and CI to support MongoDB v3.4. #1445 | ||||||
|  | - Fix reference fields querying the database on each access if value contains orphan DBRefs. | ||||||
|  |  | ||||||
|  | Changes in 0.16.3 | ||||||
|  | ================= | ||||||
|  | - Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 | ||||||
|  |  | ||||||
|  | Changes in 0.16.2 | ||||||
|  | ================= | ||||||
|  | - Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 | ||||||
|  |  | ||||||
|  | Changes in 0.16.1 | ||||||
|  | ================= | ||||||
|  | - Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 | ||||||
|  | - Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 | ||||||
|  | - Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 | ||||||
|  |  | ||||||
|  | Changes in 0.16.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGES: | ||||||
|  |     - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 | ||||||
|  |     - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 | ||||||
|  |     - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 | ||||||
|  | - Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 | ||||||
|  | - Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 | ||||||
|  | - Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 | ||||||
|  | - Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 | ||||||
|  | - Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 | ||||||
|  | - Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 | ||||||
|  | - Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 | ||||||
|  | - Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 | ||||||
|  | - Improve Python 2-3 codebase compatibility. #1889 | ||||||
|  | - Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 | ||||||
|  | - Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 | ||||||
|  | - Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 | ||||||
|  | - Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 | ||||||
|  | - Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 | ||||||
|  | - Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 | ||||||
|  | - Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 | ||||||
|  | - ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 | ||||||
|  | - Bulk insert updates the IDs of the input documents instances. #1919 | ||||||
|  | - Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 | ||||||
|  | - Improve validation of the ``BinaryField``. #273 | ||||||
|  | - Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 | ||||||
|  | - Update ``GridFSProxy.__str__``  so that it would always print both the filename and grid_id. #710 | ||||||
|  | - Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 | ||||||
|  | - Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 | ||||||
|  | - Add a ``DateField``. #513 | ||||||
|  | - Various improvements to the documentation. | ||||||
|  | - Various code quality improvements. | ||||||
|  |  | ||||||
|  | Changes in 0.15.3 | ||||||
|  | ================= | ||||||
|  | - ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 | ||||||
|  | - Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 | ||||||
|  | - Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 | ||||||
|  | - Use each modifier only with ``$position``. #1673 #1675 | ||||||
|  | - Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 | ||||||
|  | - Update cached fields when a ``fields`` argument is given. #1712 | ||||||
|  | - Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. | ||||||
|  | - Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 | ||||||
|  | - Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 | ||||||
|  | - Fix how ``reload(fields)`` affects changed fields. #1371 | ||||||
|  | - Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 | ||||||
|  |  | ||||||
|  | Changes in 0.15.0 | ||||||
|  | ================= | ||||||
|  | - Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 | ||||||
|  |  | ||||||
|  | Changes in 0.14.1 | ||||||
|  | ================= | ||||||
|  | - Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 | ||||||
|  | - Add support for the ``$position`` param in the ``$push`` operator. #1566 | ||||||
|  | - Fix ``DateTimeField`` interpreting an empty string as today. #1533 | ||||||
|  | - Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 | ||||||
|  | - Fix ``BaseQuerySet._fields_to_db_fields``. #1553 | ||||||
|  |  | ||||||
|  | Changes in 0.14.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 | ||||||
|  | - POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 | ||||||
|  | - Improve code quality. #1531, #1540, #1541, #1547 | ||||||
|  |  | ||||||
|  | Changes in 0.13.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. | ||||||
|  |  | ||||||
|  | Changes in 0.12.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 | ||||||
|  | - POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 | ||||||
|  | - Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 | ||||||
|  | - Fix connecting to a replica set with PyMongo 2.x. #1436 | ||||||
|  | - Fix using sets in field choices. #1481 | ||||||
|  | - Fix deleting items from a ``ListField``. #1318 | ||||||
|  | - Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 | ||||||
|  | - Fix behavior of a ``dec`` update operator. #1450 | ||||||
|  | - Add a ``rename`` update operator. #1454 | ||||||
|  | - Add validation for the ``db_field`` parameter. #1448 | ||||||
|  | - Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 | ||||||
|  | - Fix the error message displayed when validating Unicode URLs. #1486 | ||||||
|  | - Raise an error when trying to save an abstract document. #1449 | ||||||
|  |  | ||||||
|  | Changes in 0.11.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 | ||||||
|  | - BREAKING CHANGE: Drop Python v2.6 support. #1428 | ||||||
|  | - BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 | ||||||
|  | - BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 | ||||||
|  | - Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 | ||||||
|  |  | ||||||
|  | Changes in 0.10.8 | ||||||
|  | ================= | ||||||
|  | - Add support for ``QuerySet.batch_size``. (#1426) | ||||||
|  | - Fix a query set iteration within an iteration. #1427 | ||||||
|  | - Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 | ||||||
|  | - Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 | ||||||
|  | - Fix cascading deletes for models with a custom primary key field. #1247 | ||||||
|  | - Add ability to specify an authentication mechanism (e.g. X.509). #1333 | ||||||
|  | - Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 | ||||||
|  | - Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 | ||||||
|  | - Fix filtering by ``embedded_doc=None``. #1422 | ||||||
|  | - Add support for ``Cursor.comment``. #1420 | ||||||
|  | - Fix ``doc.get_<field>_display`` methods. #1419 | ||||||
|  | - Fix the ``__repr__`` method of the ``StrictDict`` #1424 | ||||||
|  | - Add a deprecation warning for Python v2.6. | ||||||
|  |  | ||||||
|  | Changes in 0.10.7 | ||||||
|  | ================= | ||||||
|  | - Drop Python 3.2 support #1390 | ||||||
|  | - Fix a bug where a dynamic doc has an index inside a dict field. #1278 | ||||||
|  | - Fix: ``ListField`` minus index assignment does not work. #1128 | ||||||
|  | - Fix cascade delete mixing among collections. #1224 | ||||||
|  | - Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 | ||||||
|  | - Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. | ||||||
|  | - Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 | ||||||
|  | - Fix ``LongField`` values stored as int32 in Python 3. #1253 | ||||||
|  | - ``MapField`` now handles unicode keys correctly. #1267 | ||||||
|  | - ``ListField`` now handles negative indicies correctly. #1270 | ||||||
|  | - Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 | ||||||
|  | - Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 | ||||||
|  | - Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 | ||||||
|  | - Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 | ||||||
|  | - Fix ``BaseDocument._mark_as_changed``. #1369 | ||||||
|  | - Add support for pickling ``QuerySet`` instances. #1397 | ||||||
|  | - Fix connecting to a list of hosts. #1389 | ||||||
|  | - Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 | ||||||
|  | - Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 | ||||||
|  | - Improvements to the dictionary field's docs. #1383 | ||||||
|  |  | ||||||
|  | Changes in 0.10.6 | ||||||
|  | ================= | ||||||
|  | - Add support for mocking MongoEngine based on mongomock. #1151 | ||||||
|  | - Fix not being able to run tests on Windows. #1153 | ||||||
|  | - Allow creation of sparse compound indexes. #1114 | ||||||
|  |  | ||||||
|  | Changes in 0.10.5 | ||||||
|  | ================= | ||||||
|  | - Fix for reloading of strict with special fields. #1156 | ||||||
|  |  | ||||||
|  | Changes in 0.10.4 | ||||||
|  | ================= | ||||||
|  | - ``SaveConditionError`` is now importable from the top level package. #1165 | ||||||
|  | - Add a ``QuerySet.upsert_one`` method. #1157 | ||||||
|  |  | ||||||
|  | Changes in 0.10.3 | ||||||
|  | ================= | ||||||
|  | - Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 | ||||||
|  |  | ||||||
|  | Changes in 0.10.2 | ||||||
|  | ================= | ||||||
|  | - Allow shard key to point to a field in an embedded document. #551 | ||||||
|  | - Allow arbirary metadata in fields. #1129 | ||||||
|  | - ReferenceFields now support abstract document types. #837 | ||||||
|  |  | ||||||
|  | Changes in 0.10.1 | ||||||
|  | ================= | ||||||
|  | - Fix infinite recursion with cascade delete rules under specific conditions. #1046 | ||||||
|  | - Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 | ||||||
|  | - Fix ignored chained options. #842 | ||||||
|  | - ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 | ||||||
|  | - Fix ``Document.reload`` for the ``DynamicDocument``. #1050 | ||||||
|  | - ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 | ||||||
|  | - Fix ``ListField`` negative index assignment not working. #1119 | ||||||
|  | - Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 | ||||||
|  | - Remove test dependencies (nose and rednose) from install dependencies. #1079 | ||||||
|  | - Recursively build a query when using the ``elemMatch`` operator. #1130 | ||||||
|  | - Fix instance back references for lists of embedded documents. #1131 | ||||||
|  |  | ||||||
|  | Changes in 0.10.0 | ||||||
|  | ================= | ||||||
|  | - Django support was removed and will be available as a separate extension. #958 | ||||||
|  | - Allow to load undeclared field with meta attribute 'strict': False #957 | ||||||
|  | - Support for PyMongo 3+ #946 | ||||||
|  | - Removed get_or_create() deprecated since 0.8.0. #300 | ||||||
|  | - Improve Document._created status when switch collection and db #1020 | ||||||
|  | - Queryset update doesn't go through field validation #453 | ||||||
|  | - Added support for specifying authentication source as option ``authSource`` in URI. #967 | ||||||
|  | - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | ||||||
|  | - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | ||||||
|  | - Support += and *= for ListField #595 | ||||||
|  | - Use sets for populating dbrefs to dereference | ||||||
|  | - Fixed unpickled documents replacing the global field's list. #888 | ||||||
|  | - Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910 | ||||||
|  | - Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769) | ||||||
|  | - Fix for updating sorting in SortedListField. #978 | ||||||
|  | - Added __ support to escape field name in fields lookup keywords that match operators names #949 | ||||||
|  | - Fix for issue where FileField deletion did not free space in GridFS. | ||||||
|  | - No_dereference() not respected on embedded docs containing reference. #517 | ||||||
|  | - Document save raise an exception if save_condition fails #1005 | ||||||
|  | - Fixes some internal _id handling issue. #961 | ||||||
|  | - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | ||||||
|  | - Capped collection multiple of 256. #1011 | ||||||
|  | - Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. | ||||||
|  | - Fix for delete with write_concern {'w': 0}. #1008 | ||||||
|  | - Allow dynamic lookup for more than two parts. #882 | ||||||
|  | - Added support for min_distance on geo queries. #831 | ||||||
|  | - Allow to add custom metadata to fields #705 | ||||||
|  |  | ||||||
|  | Changes in 0.9.0 | ||||||
|  | ================ | ||||||
|  | - Update FileField when creating a new file #714 | ||||||
|  | - Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 | ||||||
|  | - ComplexDateTimeField should fall back to None when null=True #864 | ||||||
|  | - Request Support for $min, $max Field update operators #863 | ||||||
|  | - ``BaseDict`` does not follow ``setdefault`` #866 | ||||||
|  | - Add support for $type operator # 766 | ||||||
|  | - Fix tests for pymongo 2.8+ #877 | ||||||
|  | - No module named 'django.utils.importlib' (Django dev) #872 | ||||||
|  | - Field Choices Now Accept Subclasses of Documents | ||||||
|  | - Ensure Indexes before Each Save #812 | ||||||
|  | - Generate Unique Indices for Lists of EmbeddedDocuments #358 | ||||||
|  | - Sparse fields #515 | ||||||
|  | - write_concern not in params of Collection#remove #801 | ||||||
|  | - Better BaseDocument equality check when not saved #798 | ||||||
|  | - OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771 | ||||||
|  | - with_limit_and_skip for count should default like in pymongo #759 | ||||||
|  | - Fix storing value of precision attribute in DecimalField #787 | ||||||
|  | - Set attribute to None does not work (at least for fields with default values) #734 | ||||||
|  | - Querying by a field defined in a subclass raises InvalidQueryError #744 | ||||||
|  | - Add Support For MongoDB 2.6.X's maxTimeMS #778 | ||||||
|  | - abstract shouldn't be inherited in EmbeddedDocument # 789 | ||||||
|  | - Allow specifying the '_cls' as a field for indexes #397 | ||||||
|  | - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | ||||||
|  | - Not overriding default values when loading a subset of fields #399 | ||||||
|  | - Saving document doesn't create new fields in existing collection #620 | ||||||
|  | - Added ``Queryset.aggregate`` wrapper to aggregation framework #703 | ||||||
|  | - Added support to show original model fields on to_json calls instead of db_field #697 | ||||||
|  | - Added Queryset.search_text to Text indexes searchs #700 | ||||||
|  | - Fixed tests for Django 1.7 #696 | ||||||
|  | - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | ||||||
|  | - Added preliminary support for text indexes #680 | ||||||
|  | - Added ``elemMatch`` operator as well - ``match`` is too obscure #653 | ||||||
|  | - Added support for progressive JPEG #486 #548 | ||||||
|  | - Allow strings to be used in index creation #675 | ||||||
|  | - Fixed EmbeddedDoc weakref proxy issue #592 | ||||||
|  | - Fixed nested reference field distinct error #583 | ||||||
|  | - Fixed change tracking on nested MapFields #539 | ||||||
|  | - Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507 | ||||||
|  | - Add authentication_source option to register_connection #178 #464 #573 #580 #590 | ||||||
|  | - Implemented equality between Documents and DBRefs #597 | ||||||
|  | - Fixed ReferenceField inside nested ListFields dereferencing problem #368 | ||||||
|  | - Added the ability to reload specific document fields #100 | ||||||
|  | - Added db_alias support and fixes for custom map/reduce output #586 | ||||||
|  | - post_save signal now has access to delta information about field changes #594 #589 | ||||||
|  | - Don't query with $orderby for qs.get() #600 | ||||||
|  | - Fix id shard key save issue #636 | ||||||
|  | - Fixes issue with recursive embedded document errors #557 | ||||||
|  | - Fix clear_changed_fields() clearing unsaved documents bug #602 | ||||||
|  | - Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x. | ||||||
|  | - Removing support for Python < 2.6.6 | ||||||
|  | - Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664 | ||||||
|  | - QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773 | ||||||
|  | - Added support for the using() method on a queryset #676 | ||||||
|  | - PYPY support #673 | ||||||
|  | - Connection pooling #674 | ||||||
|  | - Avoid to open all documents from cursors in an if stmt #655 | ||||||
|  | - Ability to clear the ordering #657 | ||||||
|  | - Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626 | ||||||
|  | - Slots - memory improvements #625 | ||||||
|  | - Fixed incorrectly split a query key when it ends with "_" #619 | ||||||
|  | - Geo docs updates #613 | ||||||
|  | - Workaround a dateutil bug #608 | ||||||
|  | - Conditional save for atomic-style operations #511 | ||||||
|  | - Allow dynamic dictionary-style field access #559 | ||||||
|  | - Increase email field length to accommodate new TLDs #726 | ||||||
|  | - index_cls is ignored when deciding to set _cls as index prefix #733 | ||||||
|  | - Make 'db' argument to connection optional #737 | ||||||
|  | - Allow atomic update for the entire ``DictField`` #742 | ||||||
|  | - Added MultiPointField, MultiLineField, MultiPolygonField | ||||||
|  | - Fix multiple connections aliases being rewritten #748 | ||||||
|  | - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | ||||||
|  | - Make ``in_bulk()`` respect ``no_dereference()`` #775 | ||||||
|  | - Handle None from model __str__; Fixes #753 #754 | ||||||
|  | - _get_changed_fields fix for embedded documents with id field. #925 | ||||||
|  |  | ||||||
|  | Changes in 0.8.7 | ||||||
|  | ================ | ||||||
|  | - Calling reload on deleted / nonexistent documents raises DoesNotExist (#538) | ||||||
|  | - Stop ensure_indexes running on a secondaries (#555) | ||||||
|  | - Fix circular import issue with django auth (#531) (#545) | ||||||
|  |  | ||||||
|  | Changes in 0.8.6 | ||||||
|  | ================ | ||||||
|  | - Fix django auth import (#531) | ||||||
|  |  | ||||||
|  | Changes in 0.8.5 | ||||||
|  | ================ | ||||||
|  | - Fix multi level nested fields getting marked as changed (#523) | ||||||
|  | - Django 1.6 login fix (#522) (#527) | ||||||
|  | - Django 1.6 session fix (#509) | ||||||
|  | - EmbeddedDocument._instance is now set when setting the attribute (#506) | ||||||
|  | - Fixed EmbeddedDocument with ReferenceField equality issue (#502) | ||||||
|  | - Fixed GenericReferenceField serialization order (#499) | ||||||
|  | - Fixed count and none bug (#498) | ||||||
|  | - Fixed bug with .only() and DictField with digit keys (#496) | ||||||
|  | - Added user_permissions to Django User object (#491, #492) | ||||||
|  | - Fix updating Geo Location fields (#488) | ||||||
|  | - Fix handling invalid dict field value (#485) | ||||||
|  | - Added app_label to MongoUser (#484) | ||||||
|  | - Use defaults when host and port are passed as None (#483) | ||||||
|  | - Fixed distinct casting issue with ListField of EmbeddedDocuments (#470) | ||||||
|  | - Fixed Django 1.6 sessions (#454, #480) | ||||||
|  |  | ||||||
|  | Changes in 0.8.4 | ||||||
|  | ================ | ||||||
|  | - Remove database name necessity in uri connection schema (#452) | ||||||
|  | - Fixed "$pull" semantics for nested ListFields (#447) | ||||||
|  | - Allow fields to be named the same as query operators (#445) | ||||||
|  | - Updated field filter logic - can now exclude subclass fields (#443) | ||||||
|  | - Fixed dereference issue with embedded listfield referencefields (#439) | ||||||
|  | - Fixed slice when using inheritance causing fields to be excluded (#437) | ||||||
|  | - Fixed ._get_db() attribute after a Document.switch_db() (#441) | ||||||
|  | - Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) | ||||||
|  | - Handle dynamic fieldnames that look like digits (#434) | ||||||
|  | - Added get_user_document and improve mongo_auth module (#423) | ||||||
|  | - Added str representation of GridFSProxy (#424) | ||||||
|  | - Update transform to handle docs erroneously passed to unset (#416) | ||||||
|  | - Fixed indexing - turn off _cls (#414) | ||||||
|  | - Fixed dereference threading issue in ComplexField.__get__ (#412) | ||||||
|  | - Fixed QuerySetNoCache.count() caching (#410) | ||||||
|  | - Don't follow references in _get_changed_fields (#422, #417) | ||||||
|  | - Allow args and kwargs to be passed through to_json (#420) | ||||||
|  |  | ||||||
|  | Changes in 0.8.3 | ||||||
|  | ================ | ||||||
|  | - Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) | ||||||
|  | - Added get_proxy_object helper to filefields (#391) | ||||||
|  | - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | ||||||
|  | - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | ||||||
|  | - Fixed as_pymongo to return the id (#386) | ||||||
|  | - Document.select_related() now respects ``db_alias`` (#377) | ||||||
|  | - Reload uses shard_key if applicable (#384) | ||||||
|  | - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||||
|  | - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | ||||||
|  | - Fixed ListField setslice and delslice dirty tracking (#390) | ||||||
|  | - Added Django 1.5 PY3 support (#392) | ||||||
|  | - Added match ($elemMatch) support for EmbeddedDocuments (#379) | ||||||
|  | - Fixed weakref being valid after reload (#374) | ||||||
|  | - Fixed queryset.get() respecting no_dereference (#373) | ||||||
|  | - Added full_result kwarg to update (#380) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Changes in 0.8.2 | ||||||
|  | ================ | ||||||
|  | - Added compare_indexes helper (#361) | ||||||
|  | - Fixed cascading saves which weren't turned off as planned (#291) | ||||||
|  | - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) | ||||||
|  | - Improved cascading saves write performance (#361) | ||||||
|  | - Fixed ambiguity and differing behaviour regarding field defaults (#349) | ||||||
|  | - ImageFields now include PIL error messages if invalid error (#353) | ||||||
|  | - Added lock when calling doc.Delete() for when signals have no sender (#350) | ||||||
|  | - Reload forces read preference to be PRIMARY (#355) | ||||||
|  | - Querysets are now lest restrictive when querying duplicate fields (#332, #333) | ||||||
|  | - FileField now honouring db_alias (#341) | ||||||
|  | - Removed customised __set__ change tracking in ComplexBaseField (#344) | ||||||
|  | - Removed unused var in _get_changed_fields (#347) | ||||||
|  | - Added pre_save_post_validation signal (#345) | ||||||
|  | - DateTimeField now auto converts valid datetime isostrings into dates (#343) | ||||||
|  | - DateTimeField now uses dateutil for parsing if available (#343) | ||||||
|  | - Fixed Doc.objects(read_preference=X) not setting read preference (#352) | ||||||
|  | - Django session ttl index expiry fixed (#329) | ||||||
|  | - Fixed pickle.loads (#342) | ||||||
|  | - Documentation fixes | ||||||
|  |  | ||||||
|  | Changes in 0.8.1 | ||||||
|  | ================ | ||||||
|  | - Fixed Python 2.6 django auth importlib issue (#326) | ||||||
|  | - Fixed pickle unsaved document regression (#327) | ||||||
|  |  | ||||||
|  | Changes in 0.8.0 | ||||||
|  | ================ | ||||||
|  | - Fixed querying ReferenceField custom_id (#317) | ||||||
|  | - Fixed pickle issues with collections (#316) | ||||||
|  | - Added ``get_next_value`` preview for SequenceFields (#319) | ||||||
|  | - Added no_sub_classes context manager and queryset helper (#312) | ||||||
|  | - Querysets now utilises a local cache | ||||||
|  | - Changed __len__ behaviour in the queryset (#247, #311) | ||||||
|  | - Fixed querying string versions of ObjectIds issue with ReferenceField (#307) | ||||||
|  | - Added $setOnInsert support for upserts (#308) | ||||||
|  | - Upserts now possible with just query parameters (#309) | ||||||
|  | - Upserting is the only way to ensure docs are saved correctly (#306) | ||||||
|  | - Fixed register_delete_rule inheritance issue | ||||||
|  | - Fix cloning of sliced querysets (#303) | ||||||
|  | - Fixed update_one write concern (#302) | ||||||
|  | - Updated minimum requirement for pymongo to 2.5 | ||||||
|  | - Add support for new geojson fields, indexes and queries (#299) | ||||||
|  | - If values cant be compared mark as changed (#287) | ||||||
|  | - Ensure as_pymongo() and to_json honour only() and exclude() (#293) | ||||||
|  | - Document serialization uses field order to ensure a strict order is set (#296) | ||||||
|  | - DecimalField now stores as float not string (#289) | ||||||
|  | - UUIDField now stores as a binary by default (#292) | ||||||
|  | - Added Custom User Model for Django 1.5 (#285) | ||||||
|  | - Cascading saves now default to off (#291) | ||||||
|  | - ReferenceField now store ObjectId's by default rather than DBRef (#290) | ||||||
|  | - Added ImageField support for inline replacements (#86) | ||||||
|  | - Added SequenceField.set_next_value(value) helper (#159) | ||||||
|  | - Updated .only() behaviour - now like exclude it is chainable (#202) | ||||||
|  | - Added with_limit_and_skip support to count() (#235) | ||||||
|  | - Objects queryset manager now inherited (#256) | ||||||
|  | - Updated connection to use MongoClient (#262, #274) | ||||||
|  | - Fixed db_alias and inherited Documents (#143) | ||||||
|  | - Documentation update for document errors (#124) | ||||||
|  | - Deprecated ``get_or_create`` (#35) | ||||||
|  | - Updated inheritable objects created by upsert now contain _cls (#118) | ||||||
|  | - Added support for creating documents with embedded documents in a single operation (#6) | ||||||
|  | - Added to_json and from_json to Document (#1) | ||||||
|  | - Added to_json and from_json to QuerySet (#131) | ||||||
|  | - Updated index creation now tied to Document class (#102) | ||||||
|  | - Added none() to queryset (#127) | ||||||
|  | - Updated SequenceFields to allow post processing of the calculated counter value (#141) | ||||||
|  | - Added clean method to documents for pre validation data cleaning (#60) | ||||||
|  | - Added support setting for read prefrence at a query level (#157) | ||||||
|  | - Added _instance to EmbeddedDocuments pointing to the parent (#139) | ||||||
|  | - Inheritance is off by default (#122) | ||||||
|  | - Remove _types and just use _cls for inheritance (#148) | ||||||
|  | - Only allow QNode instances to be passed as query objects (#199) | ||||||
|  | - Dynamic fields are now validated on save (#153) (#154) | ||||||
|  | - Added support for multiple slices and made slicing chainable. (#170) (#190) (#191) | ||||||
|  | - Fixed GridFSProxy __getattr__ behaviour (#196) | ||||||
|  | - Fix Django timezone support (#151) | ||||||
|  | - Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) | ||||||
|  | - FileFields now copyable (#198) | ||||||
|  | - Querysets now return clones and are no longer edit in place (#56) | ||||||
|  | - Added support for $maxDistance (#179) | ||||||
|  | - Uses getlasterror to test created on updated saves (#163) | ||||||
|  | - Fixed inheritance and unique index creation (#140) | ||||||
|  | - Fixed reverse delete rule with inheritance (#197) | ||||||
|  | - Fixed validation for GenericReferences which haven't been dereferenced | ||||||
|  | - Added switch_db context manager (#106) | ||||||
|  | - Added switch_db method to document instances (#106) | ||||||
|  | - Added no_dereference context manager (#82) (#61) | ||||||
|  | - Added switch_collection context manager (#220) | ||||||
|  | - Added switch_collection method to document instances (#220) | ||||||
|  | - Added support for compound primary keys (#149) (#121) | ||||||
|  | - Fixed overriding objects with custom manager (#58) | ||||||
|  | - Added no_dereference method for querysets (#82) (#61) | ||||||
|  | - Undefined data should not override instance methods (#49) | ||||||
|  | - Added Django Group and Permission (#142) | ||||||
|  | - Added Doc class and pk to Validation messages (#69) | ||||||
|  | - Fixed Documents deleted via a queryset don't call any signals (#105) | ||||||
|  | - Added the "get_decoded" method to the MongoSession class (#216) | ||||||
|  | - Fixed invalid choices error bubbling (#214) | ||||||
|  | - Updated Save so it calls $set and $unset in a single operation (#211) | ||||||
|  | - Fixed inner queryset looping (#204) | ||||||
|  |  | ||||||
|  | Changes in 0.7.10 | ||||||
|  | ================= | ||||||
|  | - Fix UnicodeEncodeError for dbref (#278) | ||||||
|  | - Allow construction using positional parameters (#268) | ||||||
|  | - Updated EmailField length to support long domains (#243) | ||||||
|  | - Added 64-bit integer support (#251) | ||||||
|  | - Added Django sessions TTL support (#224) | ||||||
|  | - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) | ||||||
|  | - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) | ||||||
|  | - Added "id" back to _data dictionary (#255) | ||||||
|  | - Only mark a field as changed if the value has changed (#258) | ||||||
|  | - Explicitly check for Document instances when dereferencing (#261) | ||||||
|  | - Fixed order_by chaining issue (#265) | ||||||
|  | - Added dereference support for tuples (#250) | ||||||
|  | - Resolve field name to db field name when using distinct(#260, #264, #269) | ||||||
|  | - Added kwargs to doc.save to help interop with django (#223, #270) | ||||||
|  | - Fixed cloning querysets in PY3 | ||||||
|  | - Int fields no longer unset in save when changed to 0 (#272) | ||||||
|  | - Fixed ReferenceField query chaining bug fixed (#254) | ||||||
|  |  | ||||||
|  | Changes in 0.7.9 | ||||||
|  | ================ | ||||||
|  | - Better fix handling for old style _types | ||||||
|  | - Embedded SequenceFields follow collection naming convention | ||||||
|  |  | ||||||
|  | Changes in 0.7.8 | ||||||
|  | ================ | ||||||
|  | - Fix sequence fields in embedded documents (#166) | ||||||
|  | - Fix query chaining with .order_by() (#176) | ||||||
|  | - Added optional encoding and collection config for Django sessions (#180, #181, #183) | ||||||
|  | - Fixed EmailField so can add extra validation (#173, #174, #187) | ||||||
|  | - Fixed bulk inserts can now handle custom pk's (#192) | ||||||
|  | - Added as_pymongo method to return raw or cast results from pymongo (#193) | ||||||
|  |  | ||||||
|  | Changes in 0.7.7 | ||||||
|  | ================ | ||||||
|  | - Fix handling for old style _types | ||||||
|  |  | ||||||
|  | Changes in 0.7.6 | ||||||
|  | ================ | ||||||
|  | - Unicode fix for repr (#133) | ||||||
|  | - Allow updates with match operators (#144) | ||||||
|  | - Updated URLField - now can have a override the regex (#136) | ||||||
|  | - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573) | ||||||
|  | - Fixed reload issue with ReferenceField where dbref=False (#138) | ||||||
|  |  | ||||||
|  | Changes in 0.7.5 | ||||||
|  | ================ | ||||||
|  | - ReferenceFields with dbref=False use ObjectId instead of strings (#134) | ||||||
|  |   See ticket for upgrade notes (#134) | ||||||
|  |  | ||||||
|  | Changes in 0.7.4 | ||||||
|  | ================ | ||||||
|  | - Fixed index inheritance issues - firmed up testcases (#123) (#125) | ||||||
|  |  | ||||||
|  | Changes in 0.7.3 | ||||||
|  | ================ | ||||||
|  | - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119) | ||||||
|  |  | ||||||
|  | Changes in 0.7.2 | ||||||
|  | ================ | ||||||
|  | - Update index spec generation so its not destructive (#113) | ||||||
|  |  | ||||||
|  | Changes in 0.7.1 | ||||||
|  | ================ | ||||||
|  | - Fixed index spec inheritance (#111) | ||||||
|  |  | ||||||
|  | Changes in 0.7.0 | ||||||
|  | ================ | ||||||
|  | - Updated queryset.delete so you can use with skip / limit (#107) | ||||||
|  | - Updated index creation allows kwargs to be passed through refs (#104) | ||||||
|  | - Fixed Q object merge edge case (#109) | ||||||
|  | - Fixed reloading on sharded documents (hmarr/mongoengine#569) | ||||||
|  | - Added NotUniqueError for duplicate keys (#62) | ||||||
|  | - Added custom collection / sequence naming for SequenceFields (#92) | ||||||
|  | - Fixed UnboundLocalError in composite index with pk field (#88) | ||||||
|  | - Updated ReferenceField's to optionally store ObjectId strings | ||||||
|  |   this will become the default in 0.8 (#89) | ||||||
|  | - Added FutureWarning - save will default to ``cascade=False`` in 0.8 | ||||||
|  | - Added example of indexing embedded document fields (#75) | ||||||
|  | - Fixed ImageField resizing when forcing size (#80) | ||||||
|  | - Add flexibility for fields handling bad data (#78) | ||||||
|  | - Embedded Documents no longer handle meta definitions | ||||||
|  | - Use weakref proxies in base lists / dicts (#74) | ||||||
|  | - Improved queryset filtering (hmarr/mongoengine#554) | ||||||
|  | - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) | ||||||
|  | - Fixed abstract classes and shard keys (#64) | ||||||
|  | - Fixed Python 2.5 support | ||||||
|  | - Added Python 3 support (thanks to Laine Heron) | ||||||
|  |  | ||||||
|  | Changes in 0.6.20 | ||||||
|  | ================= | ||||||
|  | - Added support for distinct and db_alias (#59) | ||||||
|  | - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) | ||||||
|  | - Fixed BinaryField lookup re (#48) | ||||||
|  |  | ||||||
|  | Changes in 0.6.19 | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | - Added Binary support to UUID (#47) | ||||||
|  | - Fixed MapField lookup for fields without declared lookups (#46) | ||||||
|  | - Fixed BinaryField python value issue (#48) | ||||||
|  | - Fixed SequenceField non numeric value lookup (#41) | ||||||
|  | - Fixed queryset manager issue (#52) | ||||||
|  | - Fixed FileField comparision (hmarr/mongoengine#547) | ||||||
|  |  | ||||||
|  | Changes in 0.6.18 | ||||||
|  | ================= | ||||||
|  | - Fixed recursion loading bug in _get_changed_fields | ||||||
|  |  | ||||||
|  | Changes in 0.6.17 | ||||||
|  | ================= | ||||||
|  | - Fixed issue with custom queryset manager expecting explict variable names | ||||||
|  |  | ||||||
|  | Changes in 0.6.16 | ||||||
|  | ================= | ||||||
|  | - Fixed issue where db_alias wasn't inherited | ||||||
|  |  | ||||||
|  | Changes in 0.6.15 | ||||||
|  | ================= | ||||||
|  | - Updated validation error messages | ||||||
|  | - Added support for null / zero / false values in item_frequencies | ||||||
|  | - Fixed cascade save edge case | ||||||
|  | - Fixed geo index creation through reference fields | ||||||
|  | - Added support for args / kwargs when using @queryset_manager | ||||||
|  | - Deref list custom id fix | ||||||
|  |  | ||||||
|  | Changes in 0.6.14 | ||||||
|  | ================= | ||||||
|  | - Fixed error dict with nested validation | ||||||
|  | - Fixed Int/Float fields and not equals None | ||||||
|  | - Exclude tests from installation | ||||||
|  | - Allow tuples for index meta | ||||||
|  | - Fixed use of str in instance checks | ||||||
|  | - Fixed unicode support in transform update | ||||||
|  | - Added support for add_to_set and each | ||||||
|  |  | ||||||
|  | Changes in 0.6.13 | ||||||
|  | ================= | ||||||
|  | - Fixed EmbeddedDocument db_field validation issue | ||||||
|  | - Fixed StringField unicode issue | ||||||
|  | - Fixes __repr__ modifying the cursor | ||||||
|  |  | ||||||
|  | Changes in 0.6.12 | ||||||
|  | ================= | ||||||
|  | - Fixes scalar lookups for primary_key | ||||||
|  | - Fixes error with _delta handling DBRefs | ||||||
|  |  | ||||||
|  | Changes in 0.6.11 | ||||||
|  | ================= | ||||||
|  | - Fixed inconsistency handling None values field attrs | ||||||
|  | - Fixed map_field embedded db_field issue | ||||||
|  | - Fixed .save() _delta issue with DbRefs | ||||||
|  | - Fixed Django TestCase | ||||||
|  | - Added cmp to Embedded Document | ||||||
|  | - Added PULL reverse_delete_rule | ||||||
|  | - Fixed CASCADE delete bug | ||||||
|  | - Fixed db_field data load error | ||||||
|  | - Fixed recursive save with FileField | ||||||
|  |  | ||||||
|  | Changes in 0.6.10 | ||||||
|  | ================= | ||||||
|  | - Fixed basedict / baselist to return super(..) | ||||||
|  | - Promoted BaseDynamicField to DynamicField | ||||||
|  |  | ||||||
|  | Changes in 0.6.9 | ||||||
|  | ================ | ||||||
|  | - Fixed sparse indexes on inherited docs | ||||||
|  | - Removed FileField auto deletion, needs more work maybe 0.7 | ||||||
|  |  | ||||||
|  | Changes in 0.6.8 | ||||||
|  | ================ | ||||||
|  | - Fixed FileField losing reference when no default set | ||||||
|  | - Removed possible race condition from FileField (grid_file) | ||||||
|  | - Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` | ||||||
|  | - Added support for pull operations on nested EmbeddedDocuments | ||||||
|  | - Added support for choices with GenericReferenceFields | ||||||
|  | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
|  | - Fixed Django 1.4 sessions first save data loss | ||||||
|  | - FileField now automatically delete files on .delete() | ||||||
|  | - Fix for GenericReference to_mongo method | ||||||
|  | - Fixed connection regression | ||||||
|  | - Updated Django User document, now allows inheritance | ||||||
|  |  | ||||||
|  | Changes in 0.6.7 | ||||||
|  | ================ | ||||||
|  | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
|  | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
|  | - Cleaned up the Validation Error - docs and code | ||||||
|  | - Added meta ``auto_create_index`` so you can disable index creation | ||||||
|  | - Added write concern options to inserts | ||||||
|  | - Fixed typo in meta for index options | ||||||
|  | - Bug fix Read preference now passed correctly | ||||||
|  | - Added support for File like objects for GridFS | ||||||
|  | - Fix for #473 - Dereferencing abstracts | ||||||
|  |  | ||||||
|  | Changes in 0.6.6 | ||||||
|  | ================ | ||||||
|  | - Django 1.4 fixed (finally) | ||||||
|  | - Added tests for Django | ||||||
|  |  | ||||||
|  | Changes in 0.6.5 | ||||||
|  | ================ | ||||||
|  | - More Django updates | ||||||
|  |  | ||||||
|  | Changes in 0.6.4 | ||||||
| ================ | ================ | ||||||
|  |  | ||||||
|  | - Refactored connection / fixed replicasetconnection | ||||||
|  | - Bug fix for unknown connection alias error message | ||||||
|  | - Sessions support Django 1.3 and Django 1.4 | ||||||
|  | - Minor fix for ReferenceField | ||||||
|  |  | ||||||
|  | Changes in 0.6.3 | ||||||
|  | ================ | ||||||
|  | - Updated sessions for Django 1.4 | ||||||
|  | - Bug fix for updates where listfields contain embedded documents | ||||||
|  | - Bug fix for collection naming and mixins | ||||||
|  |  | ||||||
|  | Changes in 0.6.2 | ||||||
|  | ================ | ||||||
|  | - Updated documentation for ReplicaSet connections | ||||||
|  | - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems. | ||||||
|  |  | ||||||
|  | Changes in 0.6.1 | ||||||
|  | ================ | ||||||
|  | - Fix for replicaSet connections | ||||||
|  |  | ||||||
|  | Changes in 0.6 | ||||||
|  | ============== | ||||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||||
| - Added support for covered indexes when inheritance is off | - Added support for covered indexes when inheritance is off | ||||||
| - No longer always upsert on save for items with a '_id' | - No longer always upsert on save for items with a '_id' | ||||||
| @@ -91,8 +896,8 @@ Changes in v0.5 | |||||||
| - Updated default collection naming convention | - Updated default collection naming convention | ||||||
| - Added Document Mixin support | - Added Document Mixin support | ||||||
| - Fixed queryet __repr__ mid iteration | - Fixed queryet __repr__ mid iteration | ||||||
| - Added hint() support, so cantell Mongo the proper index to use for the query | - Added hint() support, so can tell Mongo the proper index to use for the query | ||||||
| - Fixed issue with inconsitent setting of _cls breaking inherited referencing | - Fixed issue with inconsistent setting of _cls breaking inherited referencing | ||||||
| - Added help_text and verbose_name to fields to help with some form libs | - Added help_text and verbose_name to fields to help with some form libs | ||||||
| - Updated item_frequencies to handle embedded document lookups | - Updated item_frequencies to handle embedded document lookups | ||||||
| - Added delta tracking now only sets / unsets explicitly changed fields | - Added delta tracking now only sets / unsets explicitly changed fields | ||||||
| @@ -229,7 +1034,6 @@ Changes in v0.1.3 | |||||||
|   querying takes place |   querying takes place | ||||||
| - A few minor bugfixes | - A few minor bugfixes | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.2 | Changes in v0.1.2 | ||||||
| ================= | ================= | ||||||
| - Query values may be processed before before being used in queries | - Query values may be processed before before being used in queries | ||||||
| @@ -238,7 +1042,6 @@ Changes in v0.1.2 | |||||||
| - Added ``BooleanField`` | - Added ``BooleanField`` | ||||||
| - Added ``Document.reload()`` method | - Added ``Document.reload()`` method | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.1 | Changes in v0.1.1 | ||||||
| ================= | ================= | ||||||
| - Documents may now use capped collections | - Documents may now use capped collections | ||||||
|   | |||||||
| @@ -1,66 +1,77 @@ | |||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| connect('tumblelog') | connect("tumblelog") | ||||||
|  |  | ||||||
|  |  | ||||||
| class Comment(EmbeddedDocument): | class Comment(EmbeddedDocument): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|     name = StringField(max_length=120) |     name = StringField(max_length=120) | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     email = StringField(required=True) |     email = StringField(required=True) | ||||||
|     first_name = StringField(max_length=50) |     first_name = StringField(max_length=50) | ||||||
|     last_name = StringField(max_length=50) |     last_name = StringField(max_length=50) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Post(Document): | class Post(Document): | ||||||
|     title = StringField(max_length=120, required=True) |     title = StringField(max_length=120, required=True) | ||||||
|     author = ReferenceField(User) |     author = ReferenceField(User) | ||||||
|     tags = ListField(StringField(max_length=30)) |     tags = ListField(StringField(max_length=30)) | ||||||
|     comments = ListField(EmbeddedDocumentField(Comment)) |     comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|  |     # bugfix | ||||||
|  |     meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |  | ||||||
| class TextPost(Post): | class TextPost(Post): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ImagePost(Post): | class ImagePost(Post): | ||||||
|     image_path = StringField() |     image_path = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class LinkPost(Post): | class LinkPost(Post): | ||||||
|     link_url = StringField() |     link_url = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| Post.drop_collection() | Post.drop_collection() | ||||||
|  |  | ||||||
| john = User(email='jdoe@example.com', first_name='John', last_name='Doe') | john = User(email="jdoe@example.com", first_name="John", last_name="Doe") | ||||||
| john.save() | john.save() | ||||||
|  |  | ||||||
| post1 = TextPost(title='Fun with MongoEngine', author=john) | post1 = TextPost(title="Fun with MongoEngine", author=john) | ||||||
| post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | post1.content = "Took a look at MongoEngine today, looks pretty cool." | ||||||
| post1.tags = ['mongodb', 'mongoengine'] | post1.tags = ["mongodb", "mongoengine"] | ||||||
| post1.save() | post1.save() | ||||||
|  |  | ||||||
| post2 = LinkPost(title='MongoEngine Documentation', author=john) | post2 = LinkPost(title="MongoEngine Documentation", author=john) | ||||||
| post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" | ||||||
| post2.tags = ['mongoengine'] | post2.tags = ["mongoengine"] | ||||||
| post2.save() | post2.save() | ||||||
|  |  | ||||||
| print 'ALL POSTS' | print("ALL POSTS") | ||||||
| print | print() | ||||||
| for post in Post.objects: | for post in Post.objects: | ||||||
|     print post.title |     print(post.title) | ||||||
|     print '=' * len(post.title) |     # print '=' * post.title.count() | ||||||
|  |     print("=" * 20) | ||||||
|  |  | ||||||
|     if isinstance(post, TextPost): |     if isinstance(post, TextPost): | ||||||
|         print post.content |         print(post.content) | ||||||
|  |  | ||||||
|     if isinstance(post, LinkPost): |     if isinstance(post, LinkPost): | ||||||
|         print 'Link:', post.link_url |         print("Link:", post.link_url) | ||||||
|  |  | ||||||
|     print |     print() | ||||||
| print | print() | ||||||
|  |  | ||||||
| print 'POSTS TAGGED \'MONGODB\'' | print("POSTS TAGGED 'MONGODB'") | ||||||
| print | print() | ||||||
| for post in Post.objects(tags='mongodb'): | for post in Post.objects(tags="mongodb"): | ||||||
|     print post.title |     print(post.title) | ||||||
| print | print() | ||||||
|  |  | ||||||
| num_posts = Post.objects(tags='mongodb').count() | num_posts = Post.objects(tags="mongodb").count() | ||||||
| print 'Found %d posts with tag "mongodb"' % num_posts | print('Found %d posts with tag "mongodb"' % num_posts) | ||||||
|   | |||||||
							
								
								
									
										106
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										106
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| # | # | ||||||
| # MongoEngine documentation build configuration file, created by | # MongoEngine documentation build configuration file, created by | ||||||
| # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | ||||||
| @@ -11,40 +10,44 @@ | |||||||
| # All configuration values have a default; values that are commented out | # All configuration values have a default; values that are commented out | ||||||
| # serve to show the default. | # serve to show the default. | ||||||
|  |  | ||||||
| import sys, os | import os | ||||||
|  | import sys | ||||||
|  |  | ||||||
|  | import sphinx_rtd_theme | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  |  | ||||||
| # If extensions (or modules to document with autodoc) are in another directory, | # If extensions (or modules to document with autodoc) are in another directory, | ||||||
| # add these directories to sys.path here. If the directory is relative to the | # add these directories to sys.path here. If the directory is relative to the | ||||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||||
| sys.path.append(os.path.abspath('..')) | sys.path.insert(0, os.path.abspath("..")) | ||||||
|  |  | ||||||
| # -- General configuration ----------------------------------------------------- | # -- General configuration ----------------------------------------------------- | ||||||
|  |  | ||||||
| # Add any Sphinx extension module names here, as strings. They can be extensions | # Add any Sphinx extension module names here, as strings. They can be extensions | ||||||
| # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | ||||||
| extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] | extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] | ||||||
|  |  | ||||||
| # Add any paths that contain templates here, relative to this directory. | # Add any paths that contain templates here, relative to this directory. | ||||||
| templates_path = ['_templates'] | templates_path = ["_templates"] | ||||||
|  |  | ||||||
| # The suffix of source filenames. | # The suffix of source filenames. | ||||||
| source_suffix = '.rst' | source_suffix = ".rst" | ||||||
|  |  | ||||||
| # The encoding of source files. | # The encoding of source files. | ||||||
| #source_encoding = 'utf-8' | # source_encoding = 'utf-8' | ||||||
|  |  | ||||||
| # The master toctree document. | # The master toctree document. | ||||||
| master_doc = 'index' | master_doc = "index" | ||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = u"MongoEngine" | ||||||
| copyright = u'2009-2012, MongoEngine Authors' | copyright = u"2009, MongoEngine Authors" | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| # built documents. | # built documents. | ||||||
| # | # | ||||||
| import mongoengine |  | ||||||
| # The short X.Y version. | # The short X.Y version. | ||||||
| version = mongoengine.get_version() | version = mongoengine.get_version() | ||||||
| # The full version, including alpha/beta/rc tags. | # The full version, including alpha/beta/rc tags. | ||||||
| @@ -52,144 +55,149 @@ release = mongoengine.get_version() | |||||||
|  |  | ||||||
| # The language for content autogenerated by Sphinx. Refer to documentation | # The language for content autogenerated by Sphinx. Refer to documentation | ||||||
| # for a list of supported languages. | # for a list of supported languages. | ||||||
| #language = None | # language = None | ||||||
|  |  | ||||||
| # There are two options for replacing |today|: either, you set today to some | # There are two options for replacing |today|: either, you set today to some | ||||||
| # non-false value, then it is used: | # non-false value, then it is used: | ||||||
| #today = '' | # today = '' | ||||||
| # Else, today_fmt is used as the format for a strftime call. | # Else, today_fmt is used as the format for a strftime call. | ||||||
| #today_fmt = '%B %d, %Y' | # today_fmt = '%B %d, %Y' | ||||||
|  |  | ||||||
| # List of documents that shouldn't be included in the build. | # List of documents that shouldn't be included in the build. | ||||||
| #unused_docs = [] | # unused_docs = [] | ||||||
|  |  | ||||||
| # List of directories, relative to source directory, that shouldn't be searched | # List of directories, relative to source directory, that shouldn't be searched | ||||||
| # for source files. | # for source files. | ||||||
| exclude_trees = ['_build'] | exclude_trees = ["_build"] | ||||||
|  |  | ||||||
| # The reST default role (used for this markup: `text`) to use for all documents. | # The reST default role (used for this markup: `text`) to use for all documents. | ||||||
| #default_role = None | # default_role = None | ||||||
|  |  | ||||||
| # If true, '()' will be appended to :func: etc. cross-reference text. | # If true, '()' will be appended to :func: etc. cross-reference text. | ||||||
| #add_function_parentheses = True | # add_function_parentheses = True | ||||||
|  |  | ||||||
| # If true, the current module name will be prepended to all description | # If true, the current module name will be prepended to all description | ||||||
| # unit titles (such as .. function::). | # unit titles (such as .. function::). | ||||||
| #add_module_names = True | # add_module_names = True | ||||||
|  |  | ||||||
| # If true, sectionauthor and moduleauthor directives will be shown in the | # If true, sectionauthor and moduleauthor directives will be shown in the | ||||||
| # output. They are ignored by default. | # output. They are ignored by default. | ||||||
| #show_authors = False | # show_authors = False | ||||||
|  |  | ||||||
| # The name of the Pygments (syntax highlighting) style to use. | # The name of the Pygments (syntax highlighting) style to use. | ||||||
| pygments_style = 'sphinx' | pygments_style = "sphinx" | ||||||
|  |  | ||||||
| # A list of ignored prefixes for module index sorting. | # A list of ignored prefixes for module index sorting. | ||||||
| #modindex_common_prefix = [] | # modindex_common_prefix = [] | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for HTML output --------------------------------------------------- | # -- Options for HTML output --------------------------------------------------- | ||||||
|  |  | ||||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | # Sphinx are currently 'default' and 'sphinxdoc'. | ||||||
| html_theme = 'nature' | html_theme = "sphinx_rtd_theme" | ||||||
|  |  | ||||||
| # Theme options are theme-specific and customize the look and feel of a theme | # Theme options are theme-specific and customize the look and feel of a theme | ||||||
| # further.  For a list of options available for each theme, see the | # further.  For a list of options available for each theme, see the | ||||||
| # documentation. | # documentation. | ||||||
| #html_theme_options = {} | html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} | ||||||
|  |  | ||||||
| # Add any paths that contain custom themes here, relative to this directory. | # Add any paths that contain custom themes here, relative to this directory. | ||||||
| html_theme_path = ['_themes'] | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | ||||||
|  |  | ||||||
| # The name for this set of Sphinx documents.  If None, it defaults to | # The name for this set of Sphinx documents.  If None, it defaults to | ||||||
| # "<project> v<release> documentation". | # "<project> v<release> documentation". | ||||||
| #html_title = None | # html_title = None | ||||||
|  |  | ||||||
| # A shorter title for the navigation bar.  Default is the same as html_title. | # A shorter title for the navigation bar.  Default is the same as html_title. | ||||||
| #html_short_title = None | # html_short_title = None | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top | # The name of an image file (relative to this directory) to place at the top | ||||||
| # of the sidebar. | # of the sidebar. | ||||||
| #html_logo = None | # html_logo = None | ||||||
|  |  | ||||||
| # The name of an image file (within the static path) to use as favicon of the | # The name of an image file (within the static path) to use as favicon of the | ||||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||||
| # pixels large. | # pixels large. | ||||||
| #html_favicon = None | html_favicon = "favicon.ico" | ||||||
|  |  | ||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| #html_static_path = ['_static'] | # html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
| #html_last_updated_fmt = '%b %d, %Y' | # html_last_updated_fmt = '%b %d, %Y' | ||||||
|  |  | ||||||
| # If true, SmartyPants will be used to convert quotes and dashes to | # If true, SmartyPants will be used to convert quotes and dashes to | ||||||
| # typographically correct entities. | # typographically correct entities. | ||||||
| html_use_smartypants = True | html_use_smartypants = True | ||||||
|  |  | ||||||
| # Custom sidebar templates, maps document names to template names. | # Custom sidebar templates, maps document names to template names. | ||||||
| #html_sidebars = {} | html_sidebars = { | ||||||
|  |     "index": ["globaltoc.html", "searchbox.html"], | ||||||
|  |     "**": ["localtoc.html", "relations.html", "searchbox.html"], | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
| # Additional templates that should be rendered to pages, maps page names to | # Additional templates that should be rendered to pages, maps page names to | ||||||
| # template names. | # template names. | ||||||
| #html_additional_pages = {} | # html_additional_pages = {} | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #html_use_modindex = True | # html_use_modindex = True | ||||||
|  |  | ||||||
| # If false, no index is generated. | # If false, no index is generated. | ||||||
| #html_use_index = True | # html_use_index = True | ||||||
|  |  | ||||||
| # If true, the index is split into individual pages for each letter. | # If true, the index is split into individual pages for each letter. | ||||||
| #html_split_index = False | # html_split_index = False | ||||||
|  |  | ||||||
| # If true, links to the reST sources are added to the pages. | # If true, links to the reST sources are added to the pages. | ||||||
| #html_show_sourcelink = True | # html_show_sourcelink = True | ||||||
|  |  | ||||||
| # If true, an OpenSearch description file will be output, and all pages will | # If true, an OpenSearch description file will be output, and all pages will | ||||||
| # contain a <link> tag referring to it.  The value of this option must be the | # contain a <link> tag referring to it.  The value of this option must be the | ||||||
| # base URL from which the finished HTML is served. | # base URL from which the finished HTML is served. | ||||||
| #html_use_opensearch = '' | # html_use_opensearch = '' | ||||||
|  |  | ||||||
| # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | ||||||
| #html_file_suffix = '' | # html_file_suffix = '' | ||||||
|  |  | ||||||
| # Output file base name for HTML help builder. | # Output file base name for HTML help builder. | ||||||
| htmlhelp_basename = 'MongoEnginedoc' | htmlhelp_basename = "MongoEnginedoc" | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for LaTeX output -------------------------------------------------- | # -- Options for LaTeX output -------------------------------------------------- | ||||||
|  |  | ||||||
| # The paper size ('letter' or 'a4'). | # The paper size ('letter' or 'a4'). | ||||||
| latex_paper_size = 'a4' | latex_paper_size = "a4" | ||||||
|  |  | ||||||
| # The font size ('10pt', '11pt' or '12pt'). | # The font size ('10pt', '11pt' or '12pt'). | ||||||
| #latex_font_size = '10pt' | # latex_font_size = '10pt' | ||||||
|  |  | ||||||
| # Grouping the document tree into LaTeX files. List of tuples | # Grouping the document tree into LaTeX files. List of tuples | ||||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | # (source start file, target name, title, author, documentclass [howto/manual]). | ||||||
| latex_documents = [ | latex_documents = [ | ||||||
|   ('index', 'MongoEngine.tex', u'MongoEngine Documentation', |     ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") | ||||||
|    u'Harry Marr', 'manual'), |  | ||||||
| ] | ] | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top of | # The name of an image file (relative to this directory) to place at the top of | ||||||
| # the title page. | # the title page. | ||||||
| #latex_logo = None | # latex_logo = None | ||||||
|  |  | ||||||
| # For "manual" documents, if this is true, then toplevel headings are parts, | # For "manual" documents, if this is true, then toplevel headings are parts, | ||||||
| # not chapters. | # not chapters. | ||||||
| #latex_use_parts = False | # latex_use_parts = False | ||||||
|  |  | ||||||
| # Additional stuff for the LaTeX preamble. | # Additional stuff for the LaTeX preamble. | ||||||
| #latex_preamble = '' | # latex_preamble = '' | ||||||
|  |  | ||||||
| # Documents to append as an appendix to all manuals. | # Documents to append as an appendix to all manuals. | ||||||
| #latex_appendices = [] | # latex_appendices = [] | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #latex_use_modindex = True | # latex_use_modindex = True | ||||||
|  |  | ||||||
|  | autoclass_content = "both" | ||||||
|   | |||||||
| @@ -1,88 +1,19 @@ | |||||||
| ============================= |  | ||||||
| Using MongoEngine with Django |  | ||||||
| ============================= |  | ||||||
|  |  | ||||||
| Connecting |  | ||||||
| ========== |  | ||||||
| In your **settings.py** file, ignore the standard database settings (unless you |  | ||||||
| also plan to use the ORM in your project), and instead call  |  | ||||||
| :func:`~mongoengine.connect` somewhere in the settings module. |  | ||||||
|  |  | ||||||
| Authentication |  | ||||||
| ============== | ============== | ||||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The | Django Support | ||||||
| :class:`~mongoengine.django.auth.User` model is a MongoEngine  | ============== | ||||||
| :class:`~mongoengine.Document`, but implements most of the methods and  |  | ||||||
| attributes that the standard Django :class:`User` model does - so the two are |  | ||||||
| moderately compatible. Using this backend will allow you to store users in  |  | ||||||
| MongoDB but still use many of the Django authentication infrastucture (such as |  | ||||||
| the :func:`login_required` decorator and the :func:`authenticate` function). To |  | ||||||
| enable the MongoEngine auth backend, add the following to you **settings.py** |  | ||||||
| file:: |  | ||||||
|  |  | ||||||
|     AUTHENTICATION_BACKENDS = ( | .. note:: Django support has been split from the main MongoEngine | ||||||
|         'mongoengine.django.auth.MongoEngineBackend', |     repository. The *legacy* Django extension may be found bundled with the | ||||||
|     ) |     0.9 release of MongoEngine. | ||||||
|  |  | ||||||
| The :mod:`~mongoengine.django.auth` module also contains a  |  | ||||||
| :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's |  | ||||||
| :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.1.3 |  | ||||||
|  |  | ||||||
| Sessions | Help Wanted! | ||||||
| ======== | ------------ | ||||||
| Django allows the use of different backend stores for its sessions. MongoEngine |  | ||||||
| provides a MongoDB-based session backend for Django, which allows you to use |  | ||||||
| sessions in you Django application with just MongoDB. To enable the MongoEngine |  | ||||||
| session backend, ensure that your settings module has |  | ||||||
| ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the |  | ||||||
| ``MIDDLEWARE_CLASSES`` field  and ``'django.contrib.sessions'`` in your |  | ||||||
| ``INSTALLED_APPS``. From there, all you need to do is add the following line |  | ||||||
| into you settings module:: |  | ||||||
|  |  | ||||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' | The MongoEngine team is looking for help contributing and maintaining a new | ||||||
|  | Django extension for MongoEngine! If you have Django experience and would like | ||||||
| .. versionadded:: 0.2.1 | to help contribute to the project, please get in touch on the | ||||||
|  | `mailing list <http://groups.google.com/group/mongoengine-users>`_ or by | ||||||
| Storage | simply contributing on | ||||||
| ======= | `GitHub <https://github.com/MongoEngine/django-mongoengine>`_. | ||||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, |  | ||||||
| it is useful to have a Django file storage backend that wraps this. The new |  | ||||||
| storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.  |  | ||||||
| Using it is very similar to using the default FileSystemStorage.:: |  | ||||||
|      |  | ||||||
|     from mongoengine.django.storage import GridFSStorage |  | ||||||
|     fs = GridFSStorage() |  | ||||||
|  |  | ||||||
|     filename = fs.save('hello.txt', 'Hello, World!') |  | ||||||
|  |  | ||||||
| All of the `Django Storage API methods |  | ||||||
| <http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been |  | ||||||
| implemented except :func:`path`. If the filename provided already exists, an |  | ||||||
| underscore and a number (before # the file extension, if one exists) will be |  | ||||||
| appended to the filename until the generated filename doesn't exist. The |  | ||||||
| :func:`save` method will return the new filename.:: |  | ||||||
|  |  | ||||||
|     >>> fs.exists('hello.txt') |  | ||||||
|     True |  | ||||||
|     >>> fs.open('hello.txt').read() |  | ||||||
|     'Hello, World!' |  | ||||||
|     >>> fs.size('hello.txt') |  | ||||||
|     13 |  | ||||||
|     >>> fs.url('hello.txt') |  | ||||||
|     'http://your_media_url/hello.txt' |  | ||||||
|     >>> fs.open('hello.txt').name |  | ||||||
|     'hello.txt' |  | ||||||
|     >>> fs.listdir() |  | ||||||
|     ([], [u'hello.txt']) |  | ||||||
|  |  | ||||||
| All files will be saved and retrieved in GridFS via the :class::`FileDocument` |  | ||||||
| document, allowing easy access to the files without the GridFSStorage |  | ||||||
| backend.:: |  | ||||||
|  |  | ||||||
|     >>> from mongoengine.django.storage import FileDocument |  | ||||||
|     >>> FileDocument.objects() |  | ||||||
|     [<FileDocument: FileDocument object>] |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 |  | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | ========================== | ||||||
|  | Frequently Asked Questions | ||||||
|  | ========================== | ||||||
|  |  | ||||||
|  | Does MongoEngine support asynchronous drivers (Motor, TxMongo)? | ||||||
|  | --------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. | ||||||
|  | If this is a requirement for your project, check the alternative:  `uMongo`_ and `MotorEngine`_. | ||||||
|  |  | ||||||
|  | .. _uMongo: https://umongo.readthedocs.io/ | ||||||
|  | .. _MotorEngine: https://motorengine.readthedocs.io/ | ||||||
| @@ -4,57 +4,176 @@ | |||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | Connections in MongoEngine are registered globally and are identified with aliases. | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | If no `alias` is provided during the connection, it will use "default" as alias. | ||||||
| database to connect to. If the database does not exist, it will be created. If |  | ||||||
| the database requires authentication, :attr:`username` and :attr:`password` | To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` | ||||||
| arguments may be provided:: | function. The first argument is the name of the database to connect to:: | ||||||
|  |  | ||||||
|     from mongoengine import connect |     from mongoengine import connect | ||||||
|     connect('project1', username='webapp', password='pwd123') |     connect('project1') | ||||||
|  |  | ||||||
| By default, MongoEngine assumes that the :program:`mongod` instance is running | By default, MongoEngine assumes that the :program:`mongod` instance is running | ||||||
| on **localhost** on port **27017**. If MongoDB is running elsewhere, you may | on **localhost** on port **27017**. If MongoDB is running elsewhere, you should | ||||||
| provide :attr:`host` and :attr:`port` arguments to | provide the :attr:`host` and :attr:`port` arguments to | ||||||
| :func:`~mongoengine.connect`:: | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
| Uri style connections are also supported as long as you include the database | If the database requires authentication, :attr:`username`, :attr:`password` | ||||||
| name - just supply the uri as the :attr:`host` to | and :attr:`authentication_source` arguments should be provided:: | ||||||
|  |  | ||||||
|  |     connect('project1', username='webapp', password='pwd123', authentication_source='admin') | ||||||
|  |  | ||||||
|  | URI style connections are also supported -- just supply the URI as | ||||||
|  | the :attr:`host` to | ||||||
| :func:`~mongoengine.connect`:: | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|     connect('project1', host='mongodb://localhost/database_name') |     connect('project1', host='mongodb://localhost/database_name') | ||||||
|  |  | ||||||
|  | .. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host` | ||||||
|  |  | ||||||
|  | .. note:: Database, username and password from URI string overrides | ||||||
|  |     corresponding parameters in :func:`~mongoengine.connect`: :: | ||||||
|  |  | ||||||
|  |         connect( | ||||||
|  |             db='test', | ||||||
|  |             username='user', | ||||||
|  |             password='12345', | ||||||
|  |             host='mongodb://admin:qwerty@localhost/production' | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     will establish connection to ``production`` database using | ||||||
|  |     ``admin`` username and ``qwerty`` password. | ||||||
|  |  | ||||||
|  | .. note:: Calling :func:`~mongoengine.connect` without argument will establish | ||||||
|  |     a connection to the "test" database by default | ||||||
|  |  | ||||||
|  | Replica Sets | ||||||
|  | ============ | ||||||
|  |  | ||||||
|  | MongoEngine supports connecting to replica sets:: | ||||||
|  |  | ||||||
|  |     from mongoengine import connect | ||||||
|  |  | ||||||
|  |     # Regular connect | ||||||
|  |     connect('dbname', replicaset='rs-name') | ||||||
|  |  | ||||||
|  |     # MongoDB URI-style connect | ||||||
|  |     connect(host='mongodb://localhost/dbname?replicaSet=rs-name') | ||||||
|  |  | ||||||
|  | Read preferences are supported through the connection or via individual | ||||||
|  | queries by passing the read_preference :: | ||||||
|  |  | ||||||
|  |     Bar.objects().read_preference(ReadPreference.PRIMARY) | ||||||
|  |     Bar.objects(read_preference=ReadPreference.PRIMARY) | ||||||
|  |  | ||||||
| Multiple Databases | Multiple Databases | ||||||
| ================== | ================== | ||||||
|  |  | ||||||
| Multiple database support was added in MongoEngine 0.6. To use multiple | To use multiple databases you can use :func:`~mongoengine.connect` and provide | ||||||
| databases you can use :func:`~mongoengine.connect` and provide an `alias` name | an `alias` name for the connection - if no `alias` is provided then "default" | ||||||
| for the connection - if no `alias` is provided then "default" is used. | is used. | ||||||
|  |  | ||||||
| In the background this uses :func:`~mongoengine.register_connection` to | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
| store the data and you can register all aliases up front if required. | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
| Individual documents can also support multiple databases by providing a | Documents defined in different database | ||||||
| `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects | --------------------------------------- | ||||||
| to point across databases and collections.  Below is an example schema, using | Individual documents can be attached to different databases by providing a | ||||||
| 3 different databases to store data:: | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||||
|  | objects to point across databases and collections. Below is an example schema, | ||||||
|  | using 3 different databases to store data:: | ||||||
|  |  | ||||||
|  |         connect(alias='user-db-alias', db='user-db') | ||||||
|  |         connect(alias='book-db-alias', db='book-db') | ||||||
|  |         connect(alias='users-books-db-alias', db='users-books-db') | ||||||
|  |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {"db_alias": "user-db"} |             meta = {'db_alias': 'user-db-alias'} | ||||||
|  |  | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {"db_alias": "book-db"} |             meta = {'db_alias': 'book-db-alias'} | ||||||
|  |  | ||||||
|         class AuthorBooks(Document): |         class AuthorBooks(Document): | ||||||
|             author = ReferenceField(User) |             author = ReferenceField(User) | ||||||
|             book = ReferenceField(Book) |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|             meta = {"db_alias": "users-books-db"} |             meta = {'db_alias': 'users-books-db-alias'} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Disconnecting an existing connection | ||||||
|  | ------------------------------------ | ||||||
|  | The function :func:`~mongoengine.disconnect` can be used to | ||||||
|  | disconnect a particular connection. This can be used to change a | ||||||
|  | connection globally:: | ||||||
|  |  | ||||||
|  |         from mongoengine import connect, disconnect | ||||||
|  |         connect('a_db', alias='db1') | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         disconnect(alias='db1') | ||||||
|  |  | ||||||
|  |         connect('another_db', alias='db1') | ||||||
|  |  | ||||||
|  | .. note:: Calling :func:`~mongoengine.disconnect` without argument | ||||||
|  |     will disconnect the "default" connection | ||||||
|  |  | ||||||
|  | .. note:: Since connections gets registered globally, it is important | ||||||
|  |     to use the `disconnect` function from MongoEngine and not the | ||||||
|  |     `disconnect()` method of an existing connection (pymongo.MongoClient) | ||||||
|  |  | ||||||
|  | .. note:: :class:`~mongoengine.Document` are caching the pymongo collection. | ||||||
|  |     using `disconnect` ensures that it gets cleaned as well | ||||||
|  |  | ||||||
|  | Context Managers | ||||||
|  | ================ | ||||||
|  | Sometimes you may want to switch the database or collection to query against. | ||||||
|  | For example, archiving older data into a separate database for performance | ||||||
|  | reasons or writing functions that dynamically choose collections to write | ||||||
|  | a document to. | ||||||
|  |  | ||||||
|  | Switch Database | ||||||
|  | --------------- | ||||||
|  | The :class:`~mongoengine.context_managers.switch_db` context manager allows | ||||||
|  | you to change the database alias for a given class allowing quick and easy | ||||||
|  | access to the same User document across databases:: | ||||||
|  |  | ||||||
|  |     from mongoengine.context_managers import switch_db | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |         meta = {'db_alias': 'user-db'} | ||||||
|  |  | ||||||
|  |     with switch_db(User, 'archive-user-db') as User: | ||||||
|  |         User(name='Ross').save()  # Saves the 'archive-user-db' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Switch Collection | ||||||
|  | ----------------- | ||||||
|  | The :func:`~mongoengine.context_managers.switch_collection` context manager | ||||||
|  | allows you to change the collection for a given class allowing quick and easy | ||||||
|  | access to the same Group document across collection:: | ||||||
|  |  | ||||||
|  |         from mongoengine.context_managers import switch_collection | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|  |         with switch_collection(Group, 'group2000') as Group: | ||||||
|  |             Group(name='hello Group 2000 collection!').save()  # Saves in group2000 collection | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. note:: Make sure any aliases have been registered with | ||||||
|  |     :func:`~mongoengine.register_connection` or :func:`~mongoengine.connect` | ||||||
|  |     before using the context manager. | ||||||
|   | |||||||
| @@ -4,7 +4,7 @@ Defining documents | |||||||
| In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When | In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When | ||||||
| working with relational databases, rows are stored in **tables**, which have a | working with relational databases, rows are stored in **tables**, which have a | ||||||
| strict **schema** that the rows follow. MongoDB stores documents in | strict **schema** that the rows follow. MongoDB stores documents in | ||||||
| **collections** rather than tables - the principle difference is that no schema | **collections** rather than tables --- the principal difference is that no schema | ||||||
| is enforced at a database level. | is enforced at a database level. | ||||||
|  |  | ||||||
| Defining a document's schema | Defining a document's schema | ||||||
| @@ -22,11 +22,14 @@ objects** as class attributes to the document class:: | |||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.datetime.now) |         date_modified = DateTimeField(default=datetime.datetime.utcnow) | ||||||
|  |  | ||||||
|  | As BSON (the binary format for storing data in mongodb) is order dependent, | ||||||
|  | documents are serialized based on their field order. | ||||||
|  |  | ||||||
| Dynamic document schemas | Dynamic document schemas | ||||||
| ======================== | ======================== | ||||||
| One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | One of the benefits of MongoDB is dynamic schemas for a collection, whilst data | ||||||
| should be planned and organised (after all explicit is better than implicit!) | should be planned and organised (after all explicit is better than implicit!) | ||||||
| there are scenarios where having dynamic / expando style documents is desirable. | there are scenarios where having dynamic / expando style documents is desirable. | ||||||
|  |  | ||||||
| @@ -47,10 +50,11 @@ be saved :: | |||||||
|     >>> Page.objects(tags='mongoengine').count() |     >>> Page.objects(tags='mongoengine').count() | ||||||
|     >>> 1 |     >>> 1 | ||||||
|  |  | ||||||
| ..note:: | .. note:: | ||||||
|  |  | ||||||
|    There is one caveat on Dynamic Documents: fields cannot start with `_` |    There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|  |  | ||||||
|  | Dynamic fields are stored in creation order *after* any declared fields. | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
| @@ -62,28 +66,42 @@ not provided. Default values may optionally be a callable, which will be called | |||||||
| to retrieve the value (such as in the above example). The field types available | to retrieve the value (such as in the above example). The field types available | ||||||
| are as follows: | are as follows: | ||||||
|  |  | ||||||
| * :class:`~mongoengine.StringField` | * :class:`~mongoengine.fields.BinaryField` | ||||||
| * :class:`~mongoengine.URLField` | * :class:`~mongoengine.fields.BooleanField` | ||||||
| * :class:`~mongoengine.EmailField` | * :class:`~mongoengine.fields.ComplexDateTimeField` | ||||||
| * :class:`~mongoengine.IntField` | * :class:`~mongoengine.fields.DateTimeField` | ||||||
| * :class:`~mongoengine.FloatField` | * :class:`~mongoengine.fields.DecimalField` | ||||||
| * :class:`~mongoengine.DecimalField` | * :class:`~mongoengine.fields.DictField` | ||||||
| * :class:`~mongoengine.DateTimeField` | * :class:`~mongoengine.fields.DynamicField` | ||||||
| * :class:`~mongoengine.ComplexDateTimeField` | * :class:`~mongoengine.fields.EmailField` | ||||||
| * :class:`~mongoengine.ListField` | * :class:`~mongoengine.fields.EmbeddedDocumentField` | ||||||
| * :class:`~mongoengine.SortedListField` | * :class:`~mongoengine.fields.EmbeddedDocumentListField` | ||||||
| * :class:`~mongoengine.DictField` | * :class:`~mongoengine.fields.EnumField` | ||||||
| * :class:`~mongoengine.MapField` | * :class:`~mongoengine.fields.FileField` | ||||||
| * :class:`~mongoengine.ObjectIdField` | * :class:`~mongoengine.fields.FloatField` | ||||||
| * :class:`~mongoengine.ReferenceField` | * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||||
| * :class:`~mongoengine.GenericReferenceField` | * :class:`~mongoengine.fields.GenericReferenceField` | ||||||
| * :class:`~mongoengine.EmbeddedDocumentField` | * :class:`~mongoengine.fields.GenericLazyReferenceField` | ||||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` | * :class:`~mongoengine.fields.GeoPointField` | ||||||
| * :class:`~mongoengine.BooleanField` | * :class:`~mongoengine.fields.ImageField` | ||||||
| * :class:`~mongoengine.FileField` | * :class:`~mongoengine.fields.IntField` | ||||||
| * :class:`~mongoengine.BinaryField` | * :class:`~mongoengine.fields.ListField` | ||||||
| * :class:`~mongoengine.GeoPointField` | * :class:`~mongoengine.fields.LongField` | ||||||
| * :class:`~mongoengine.SequenceField` | * :class:`~mongoengine.fields.MapField` | ||||||
|  | * :class:`~mongoengine.fields.ObjectIdField` | ||||||
|  | * :class:`~mongoengine.fields.ReferenceField` | ||||||
|  | * :class:`~mongoengine.fields.LazyReferenceField` | ||||||
|  | * :class:`~mongoengine.fields.SequenceField` | ||||||
|  | * :class:`~mongoengine.fields.SortedListField` | ||||||
|  | * :class:`~mongoengine.fields.StringField` | ||||||
|  | * :class:`~mongoengine.fields.URLField` | ||||||
|  | * :class:`~mongoengine.fields.UUIDField` | ||||||
|  | * :class:`~mongoengine.fields.PointField` | ||||||
|  | * :class:`~mongoengine.fields.LineStringField` | ||||||
|  | * :class:`~mongoengine.fields.PolygonField` | ||||||
|  | * :class:`~mongoengine.fields.MultiPointField` | ||||||
|  | * :class:`~mongoengine.fields.MultiLineStringField` | ||||||
|  | * :class:`~mongoengine.fields.MultiPolygonField` | ||||||
|  |  | ||||||
| Field arguments | Field arguments | ||||||
| --------------- | --------------- | ||||||
| @@ -93,21 +111,18 @@ arguments can be set on all fields: | |||||||
| :attr:`db_field` (Default: None) | :attr:`db_field` (Default: None) | ||||||
|     The MongoDB field name. |     The MongoDB field name. | ||||||
|  |  | ||||||
| :attr:`name` (Default: None) |  | ||||||
|     The mongoengine field name. |  | ||||||
|  |  | ||||||
| :attr:`required` (Default: False) | :attr:`required` (Default: False) | ||||||
|     If set to True and the field is not set on the document instance, a |     If set to True and the field is not set on the document instance, a | ||||||
|     :class:`~mongoengine.base.ValidationError` will be raised when the document is |     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||||
|     validated. |     validated. | ||||||
|  |  | ||||||
| :attr:`default` (Default: None) | :attr:`default` (Default: None) | ||||||
|     A value to use when no value is set for this field. |     A value to use when no value is set for this field. | ||||||
|  |  | ||||||
|     The definion of default parameters follow `the general rules on Python |     The definition of default parameters follow `the general rules on Python | ||||||
|     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, |     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||||
|     which means that some care should be taken when dealing with default mutable objects |     which means that some care should be taken when dealing with default mutable objects | ||||||
|     (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: |     (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: | ||||||
|  |  | ||||||
|         class ExampleFirst(Document): |         class ExampleFirst(Document): | ||||||
|             # Default an empty list |             # Default an empty list | ||||||
| @@ -122,6 +137,7 @@ arguments can be set on all fields: | |||||||
|             # instead to just an object |             # instead to just an object | ||||||
|             values = ListField(IntField(), default=[1,2,3]) |             values = ListField(IntField(), default=[1,2,3]) | ||||||
|  |  | ||||||
|  |     .. note:: Unsetting a field with a default value will revert back to the default. | ||||||
|  |  | ||||||
| :attr:`unique` (Default: False) | :attr:`unique` (Default: False) | ||||||
|     When True, no documents in the collection will have the same value for this |     When True, no documents in the collection will have the same value for this | ||||||
| @@ -132,13 +148,16 @@ arguments can be set on all fields: | |||||||
|     field, will not have two documents in the collection with the same value. |     field, will not have two documents in the collection with the same value. | ||||||
|  |  | ||||||
| :attr:`primary_key` (Default: False) | :attr:`primary_key` (Default: False) | ||||||
|     When True, use this field as a primary key for the collection. |     When True, use this field as a primary key for the collection.  `DictField` | ||||||
|  |     and `EmbeddedDocuments` both support being the primary key for a document. | ||||||
|  |  | ||||||
|  |     .. note:: If set, this field is also accessible through the `pk` field. | ||||||
|  |  | ||||||
| :attr:`choices` (Default: None) | :attr:`choices` (Default: None) | ||||||
|     An iterable (e.g. a list or tuple) of choices to which the value of this |     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||||
|     field should be limited. |     field should be limited. | ||||||
|  |  | ||||||
|     Can be either be a nested tuples of value (stored in mongo) and a |     Can either be nested tuples of value (stored in mongo) and a | ||||||
|     human readable key :: |     human readable key :: | ||||||
|  |  | ||||||
|         SIZE = (('S', 'Small'), |         SIZE = (('S', 'Small'), | ||||||
| @@ -158,18 +177,33 @@ arguments can be set on all fields: | |||||||
|         class Shirt(Document): |         class Shirt(Document): | ||||||
|             size = StringField(max_length=3, choices=SIZE) |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
| :attr:`help_text` (Default: None) | :attr:`validation` (Optional) | ||||||
|     Optional help text to output with the field - used by form libraries |     A callable to validate the value of the field. | ||||||
|  |     The callable takes the value as parameter and should raise a ValidationError | ||||||
|  |     if validation fails | ||||||
|  |  | ||||||
| :attr:`verbose_name` (Default: None) |     e.g :: | ||||||
|     Optional human-readable name for the field - used by form libraries |  | ||||||
|  |         def _not_empty(val): | ||||||
|  |             if not val: | ||||||
|  |                 raise ValidationError('value can not be empty') | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(validation=_not_empty) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | :attr:`**kwargs` (Optional) | ||||||
|  |     You can supply additional metadata as arbitrary additional keyword | ||||||
|  |     arguments.  You can not override existing attributes, however.  Common | ||||||
|  |     choices include `help_text` and `verbose_name`, commonly used by form and | ||||||
|  |     widget libraries. | ||||||
|  |  | ||||||
|  |  | ||||||
| List fields | List fields | ||||||
| ----------- | ----------- | ||||||
| MongoDB allows the storage of lists of items. To add a list of items to a | MongoDB allows storing lists of items. To add a list of items to a | ||||||
| :class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field | :class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field | ||||||
| type. :class:`~mongoengine.ListField` takes another field object as its first | type. :class:`~mongoengine.fields.ListField` takes another field object as its first | ||||||
| argument, which specifies which type elements may be stored within the list:: | argument, which specifies which type elements may be stored within the list:: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
| @@ -187,7 +221,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than | |||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
| To embed the document within another document, use the | To embed the document within another document, use the | ||||||
| :class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded | :class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded | ||||||
| document class as the first argument:: | document class as the first argument:: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
| @@ -199,17 +233,17 @@ document class as the first argument:: | |||||||
|  |  | ||||||
| Dictionary Fields | Dictionary Fields | ||||||
| ----------------- | ----------------- | ||||||
| Often, an embedded document may be used instead of a dictionary -- generally | Often, an embedded document may be used instead of a dictionary – generally | ||||||
| this is recommended as dictionaries don't support validation or custom field | embedded documents are recommended as dictionaries don’t support validation | ||||||
| types. However, sometimes you will not know the structure of what you want to | or custom field types. However, sometimes you will not know the structure of what you want to | ||||||
| store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: | ||||||
|  |  | ||||||
|     class SurveyResponse(Document): |     class SurveyResponse(Document): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|         user = ReferenceField(User) |         user = ReferenceField(User) | ||||||
|         answers = DictField() |         answers = DictField() | ||||||
|  |  | ||||||
|     survey_response = SurveyResponse(date=datetime.now(), user=request.user) |     survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) | ||||||
|     response_form = ResponseForm(request.POST) |     response_form = ResponseForm(request.POST) | ||||||
|     survey_response.answers = response_form.cleaned_data() |     survey_response.answers = response_form.cleaned_data() | ||||||
|     survey_response.save() |     survey_response.save() | ||||||
| @@ -220,7 +254,7 @@ other objects, so are the most flexible field type available. | |||||||
| Reference fields | Reference fields | ||||||
| ---------------- | ---------------- | ||||||
| References may be stored to other documents in the database using the | References may be stored to other documents in the database using the | ||||||
| :class:`~mongoengine.ReferenceField`. Pass in another document class as the | :class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the | ||||||
| first argument to the constructor, then simply assign document objects to the | first argument to the constructor, then simply assign document objects to the | ||||||
| field:: | field:: | ||||||
|  |  | ||||||
| @@ -241,9 +275,9 @@ field:: | |||||||
| The :class:`User` object is automatically turned into a reference behind the | The :class:`User` object is automatically turned into a reference behind the | ||||||
| scenes, and dereferenced when the :class:`Page` object is retrieved. | scenes, and dereferenced when the :class:`Page` object is retrieved. | ||||||
|  |  | ||||||
| To add a :class:`~mongoengine.ReferenceField` that references the document | To add a :class:`~mongoengine.fields.ReferenceField` that references the document | ||||||
| being defined, use the string ``'self'`` in place of the document class as the | being defined, use the string ``'self'`` in place of the document class as the | ||||||
| argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a | argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a | ||||||
| document that has not yet been defined, use the name of the undefined document | document that has not yet been defined, use the name of the undefined document | ||||||
| as the constructor's argument:: | as the constructor's argument:: | ||||||
|  |  | ||||||
| @@ -256,6 +290,41 @@ as the constructor's argument:: | |||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. _one-to-many-with-listfields: | ||||||
|  |  | ||||||
|  | One to Many with ListFields | ||||||
|  | ''''''''''''''''''''''''''' | ||||||
|  |  | ||||||
|  | If you are implementing a one to many relationship via a list of references, | ||||||
|  | then the references are stored as DBRefs and to query you need to pass an | ||||||
|  | instance of the object to the query:: | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         content = StringField() | ||||||
|  |         authors = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |     bob = User(name="Bob Jones").save() | ||||||
|  |     john = User(name="John Smith").save() | ||||||
|  |  | ||||||
|  |     Page(content="Test Page", authors=[bob, john]).save() | ||||||
|  |     Page(content="Another Page", authors=[john]).save() | ||||||
|  |  | ||||||
|  |     # Find all pages Bob authored | ||||||
|  |     Page.objects(authors__in=[bob]) | ||||||
|  |  | ||||||
|  |     # Find all pages that both Bob and John have authored | ||||||
|  |     Page.objects(authors__all=[bob, john]) | ||||||
|  |  | ||||||
|  |     # Remove Bob from the authors for a page. | ||||||
|  |     Page.objects(id='...').update_one(pull__authors=bob) | ||||||
|  |  | ||||||
|  |     # Add John to the authors for a page. | ||||||
|  |     Page.objects(id='...').update_one(push__authors=john) | ||||||
|  |  | ||||||
|  |  | ||||||
| Dealing with deletion of referred documents | Dealing with deletion of referred documents | ||||||
| ''''''''''''''''''''''''''''''''''''''''''' | ''''''''''''''''''''''''''''''''''''''''''' | ||||||
| By default, MongoDB doesn't check the integrity of your data, so deleting | By default, MongoDB doesn't check the integrity of your data, so deleting | ||||||
| @@ -266,12 +335,12 @@ reference with a delete rule specification.  A delete rule is specified by | |||||||
| supplying the :attr:`reverse_delete_rule` attributes on the | supplying the :attr:`reverse_delete_rule` attributes on the | ||||||
| :class:`ReferenceField` definition, like this:: | :class:`ReferenceField` definition, like this:: | ||||||
|  |  | ||||||
|     class Employee(Document): |     class ProfilePage(Document): | ||||||
|         ... |         ... | ||||||
|         profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) |         employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) | ||||||
|  |  | ||||||
| The declaration in this example means that when an :class:`Employee` object is | The declaration in this example means that when an :class:`Employee` object is | ||||||
| removed, the :class:`ProfilePage` that belongs to that employee is removed as | removed, the :class:`ProfilePage` that references that employee is removed as | ||||||
| well.  If a whole batch of employees is removed, all profile pages that are | well.  If a whole batch of employees is removed, all profile pages that are | ||||||
| linked are removed as well. | linked are removed as well. | ||||||
|  |  | ||||||
| @@ -284,11 +353,15 @@ Its value can take any of the following constants: | |||||||
|   Deletion is denied if there still exist references to the object being |   Deletion is denied if there still exist references to the object being | ||||||
|   deleted. |   deleted. | ||||||
| :const:`mongoengine.NULLIFY` | :const:`mongoengine.NULLIFY` | ||||||
|   Any object's fields still referring to the object being deleted are removed |   Any object's fields still referring to the object being deleted are set to None | ||||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. |   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||||
| :const:`mongoengine.CASCADE` | :const:`mongoengine.CASCADE` | ||||||
|   Any object containing fields that are refererring to the object being deleted |   Any object containing fields that are referring to the object being deleted | ||||||
|   are deleted first. |   are deleted first. | ||||||
|  | :const:`mongoengine.PULL` | ||||||
|  |   Removes the reference to the object (using MongoDB's "pull" operation) | ||||||
|  |   from any object's fields of | ||||||
|  |   :class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`). | ||||||
|  |  | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
| @@ -307,11 +380,10 @@ Its value can take any of the following constants: | |||||||
|    In Django, be sure to put all apps that have such delete rule declarations in |    In Django, be sure to put all apps that have such delete rule declarations in | ||||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. |    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||||
|  |  | ||||||
|  |  | ||||||
| Generic reference fields | Generic reference fields | ||||||
| '''''''''''''''''''''''' | '''''''''''''''''''''''' | ||||||
| A second kind of reference field also exists, | A second kind of reference field also exists, | ||||||
| :class:`~mongoengine.GenericReferenceField`. This allows you to reference any | :class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any | ||||||
| kind of :class:`~mongoengine.Document`, and hence doesn't take a | kind of :class:`~mongoengine.Document`, and hence doesn't take a | ||||||
| :class:`~mongoengine.Document` subclass as a constructor argument:: | :class:`~mongoengine.Document` subclass as a constructor argument:: | ||||||
|  |  | ||||||
| @@ -335,18 +407,18 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a | |||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less |    Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less | ||||||
|    efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if |    efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if | ||||||
|    you will only be referencing one document type, prefer the standard |    you will only be referencing one document type, prefer the standard | ||||||
|    :class:`~mongoengine.ReferenceField`. |    :class:`~mongoengine.fields.ReferenceField`. | ||||||
|  |  | ||||||
| Uniqueness constraints | Uniqueness constraints | ||||||
| ---------------------- | ---------------------- | ||||||
| MongoEngine allows you to specify that a field should be unique across a | MongoEngine allows you to specify that a field should be unique across a | ||||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's | ||||||
| constructor. If you try to save a document that has the same value for a unique | constructor. If you try to save a document that has the same value for a unique | ||||||
| field as a document that is already in the database, a | field as a document that is already in the database, a | ||||||
| :class:`~mongoengine.OperationError` will be raised. You may also specify | :class:`~mongoengine.NotUniqueError` will be raised. You may also specify | ||||||
| multi-field uniqueness constraints by using :attr:`unique_with`, which may be | multi-field uniqueness constraints by using :attr:`unique_with`, which may be | ||||||
| either a single field name, or a list or tuple of field names:: | either a single field name, or a list or tuple of field names:: | ||||||
|  |  | ||||||
| @@ -355,25 +427,12 @@ either a single field name, or a list or tuple of field names:: | |||||||
|         first_name = StringField() |         first_name = StringField() | ||||||
|         last_name = StringField(unique_with='first_name') |         last_name = StringField(unique_with='first_name') | ||||||
|  |  | ||||||
| Skipping Document validation on save |  | ||||||
| ------------------------------------ |  | ||||||
| You can also skip the whole document validation process by setting |  | ||||||
| ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` |  | ||||||
| method:: |  | ||||||
|  |  | ||||||
|     class Recipient(Document): |  | ||||||
|         name = StringField() |  | ||||||
|         email = EmailField() |  | ||||||
|  |  | ||||||
|     recipient = Recipient(name='admin', email='root@localhost') |  | ||||||
|     recipient.save()               # will raise a ValidationError while |  | ||||||
|     recipient.save(validate=False) # won't |  | ||||||
|  |  | ||||||
| Document collections | Document collections | ||||||
| ==================== | ==================== | ||||||
| Document classes that inherit **directly** from :class:`~mongoengine.Document` | Document classes that inherit **directly** from :class:`~mongoengine.Document` | ||||||
| will have their own **collection** in the database. The name of the collection | will have their own **collection** in the database. The name of the collection | ||||||
| is by default the name of the class, coverted to lowercase (so in the example | is by default the name of the class, converted to lowercase (so in the example | ||||||
| above, the collection would be called `page`). If you need to change the name | above, the collection would be called `page`). If you need to change the name | ||||||
| of the collection (e.g. to use MongoEngine with an existing database), then | of the collection (e.g. to use MongoEngine with an existing database), then | ||||||
| create a class dictionary attribute called :attr:`meta` on your document, and | create a class dictionary attribute called :attr:`meta` on your document, and | ||||||
| @@ -390,8 +449,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | |||||||
| :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | ||||||
| :attr:`max_documents` is the maximum number of documents that is allowed to be | :attr:`max_documents` is the maximum number of documents that is allowed to be | ||||||
| stored in the collection, and :attr:`max_size` is the maximum size of the | stored in the collection, and :attr:`max_size` is the maximum size of the | ||||||
| collection in bytes. If :attr:`max_size` is not specified and | collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256 | ||||||
| :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). | by MongoDB internally and mongoengine before. Use also a multiple of 256 to | ||||||
|  | avoid confusions. If :attr:`max_size` is not specified and | ||||||
|  | :attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB). | ||||||
| The following example shows a :class:`Log` document that will be limited to | The following example shows a :class:`Log` document that will be limited to | ||||||
| 1000 entries and 2MB of disk space:: | 1000 entries and 2MB of disk space:: | ||||||
|  |  | ||||||
| @@ -399,53 +460,159 @@ The following example shows a :class:`Log` document that will be limited to | |||||||
|         ip_address = StringField() |         ip_address = StringField() | ||||||
|         meta = {'max_documents': 1000, 'max_size': 2000000} |         meta = {'max_documents': 1000, 'max_size': 2000000} | ||||||
|  |  | ||||||
|  | .. defining-indexes_ | ||||||
|  |  | ||||||
| Indexes | Indexes | ||||||
| ======= | ======= | ||||||
|  |  | ||||||
| You can specify indexes on collections to make querying faster. This is done | You can specify indexes on collections to make querying faster. This is done | ||||||
| by creating a list of index specifications called :attr:`indexes` in the | by creating a list of index specifications called :attr:`indexes` in the | ||||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||||
| either be a single field name, a tuple containing multiple field names, or a | either be a single field name, a tuple containing multiple field names, or a | ||||||
| dictionary containing a full index definition. A direction may be specified on | dictionary containing a full index definition. | ||||||
| fields by prefixing the field name with a **+** or a **-** sign. Note that |  | ||||||
| direction only matters on multi-field indexes. :: | A direction may be specified on fields by prefixing the field name with a | ||||||
|  | **+** (for ascending) or a **-** sign (for descending). Note that direction | ||||||
|  | only matters on multi-field indexes. Text indexes may be specified by prefixing | ||||||
|  | the field name with a **$**. Hashed indexes may be specified by prefixing | ||||||
|  | the field name with a **#**:: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|  |         category = IntField() | ||||||
|         title = StringField() |         title = StringField() | ||||||
|         rating = StringField() |         rating = StringField() | ||||||
|  |         created = DateTimeField() | ||||||
|         meta = { |         meta = { | ||||||
|             'indexes': ['title', ('title', '-rating')] |             'indexes': [ | ||||||
|  |                 'title', | ||||||
|  |                 '$title',  # text index | ||||||
|  |                 '#title',  # hashed index | ||||||
|  |                 ('title', '-rating'), | ||||||
|  |                 ('category', '_cls'), | ||||||
|  |                 { | ||||||
|  |                     'fields': ['created'], | ||||||
|  |                     'expireAfterSeconds': 3600 | ||||||
|  |                 } | ||||||
|  |             ] | ||||||
|         } |         } | ||||||
|  |  | ||||||
| If a dictionary is passed then the following options are available: | If a dictionary is passed then additional options become available. Valid options include, | ||||||
|  | but are not limited to: | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`fields` (Default: None) | :attr:`fields` (Default: None) | ||||||
|     The fields to index. Specified in the same format as described above. |     The fields to index. Specified in the same format as described above. | ||||||
|  |  | ||||||
| :attr:`types` (Default: True) | :attr:`cls` (Default: True) | ||||||
|     Whether the index should have the :attr:`_types` field added automatically |     If you have polymorphic models that inherit and have | ||||||
|     to the start of the index. |     :attr:`allow_inheritance` turned on, you can configure whether the index | ||||||
|  |     should have the :attr:`_cls` field added automatically to the start of the | ||||||
|  |     index. | ||||||
|  |  | ||||||
| :attr:`sparse` (Default: False) | :attr:`sparse` (Default: False) | ||||||
|     Whether the index should be sparse. |     Whether the index should be sparse. | ||||||
|  |  | ||||||
| :attr:`unique` (Default: False) | :attr:`unique` (Default: False) | ||||||
|     Whether the index should be sparse. |     Whether the index should be unique. | ||||||
|  |  | ||||||
| .. warning:: | :attr:`expireAfterSeconds` (Optional) | ||||||
|  |     Allows you to automatically expire data from a collection by setting the | ||||||
|  |     time in seconds to expire the a field. | ||||||
|  |  | ||||||
|  | :attr:`name` (Optional) | ||||||
|  |     Allows you to specify a name for the index | ||||||
|  |  | ||||||
|  | :attr:`collation` (Optional) | ||||||
|  |     Allows to create case insensitive indexes (MongoDB v3.4+ only) | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |     Additional options are forwarded as **kwargs to pymongo's create_index method. | ||||||
|  |     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||||
|  |  | ||||||
|  | Global index default options | ||||||
|  | ---------------------------- | ||||||
|  |  | ||||||
|  | There are a few top level defaults for all indexes that can be set:: | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         title = StringField() | ||||||
|  |         rating = StringField() | ||||||
|  |         meta = { | ||||||
|  |             'index_opts': {}, | ||||||
|  |             'index_background': True, | ||||||
|  |             'index_cls': False, | ||||||
|  |             'auto_create_index': True, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |  | ||||||
|    Inheritance adds extra indices. | :attr:`index_opts` (Optional) | ||||||
|    If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. |     Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_ | ||||||
|  |  | ||||||
|  | :attr:`index_background` (Optional) | ||||||
|  |     Set the default value for if an index should be indexed in the background | ||||||
|  |  | ||||||
|  | :attr:`index_cls` (Optional) | ||||||
|  |     A way to turn off a specific index for _cls. | ||||||
|  |  | ||||||
|  | :attr:`auto_create_index` (Optional) | ||||||
|  |     When this is True (default), MongoEngine will ensure that the correct | ||||||
|  |     indexes exist in MongoDB each time a command is run. This can be disabled | ||||||
|  |     in systems where indexes are managed separately. Disabling this will improve | ||||||
|  |     performance. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Compound Indexes and Indexing sub documents | ||||||
|  | ------------------------------------------- | ||||||
|  |  | ||||||
|  | Compound indexes can be created by adding the Embedded field or dictionary | ||||||
|  | field name to the index definition. | ||||||
|  |  | ||||||
|  | Sometimes its more efficient to index parts of Embedded / dictionary fields, | ||||||
|  | in this case use 'dot' notation to identify the value to index eg: `rank.title` | ||||||
|  |  | ||||||
|  | .. _geospatial-indexes: | ||||||
|  |  | ||||||
| Geospatial indexes | Geospatial indexes | ||||||
| --------------------------- | ------------------ | ||||||
|  |  | ||||||
|  | The best geo index for mongodb is the new "2dsphere", which has an improved | ||||||
|  | spherical model and provides better performance and more options when querying. | ||||||
|  | The following fields will explicitly add a "2dsphere" index: | ||||||
|  |  | ||||||
|  |     - :class:`~mongoengine.fields.PointField` | ||||||
|  |     - :class:`~mongoengine.fields.LineStringField` | ||||||
|  |     - :class:`~mongoengine.fields.PolygonField` | ||||||
|  |     - :class:`~mongoengine.fields.MultiPointField` | ||||||
|  |     - :class:`~mongoengine.fields.MultiLineStringField` | ||||||
|  |     - :class:`~mongoengine.fields.MultiPolygonField` | ||||||
|  |  | ||||||
|  | As "2dsphere" indexes can be part of a compound index, you may not want the | ||||||
|  | automatic index but would prefer a compound index.  In this example we turn off | ||||||
|  | auto indexing and explicitly declare a compound index on ``location`` and ``datetime``:: | ||||||
|  |  | ||||||
|  |     class Log(Document): | ||||||
|  |         location = PointField(auto_index=False) | ||||||
|  |         datetime = DateTimeField() | ||||||
|  |  | ||||||
|  |         meta = { | ||||||
|  |             'indexes': [[("location", "2dsphere"), ("datetime", 1)]] | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Pre MongoDB 2.4 Geo | ||||||
|  | ''''''''''''''''''' | ||||||
|  |  | ||||||
|  | .. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere | ||||||
|  |     index is a big improvement over the previous 2D model - so upgrading is | ||||||
|  |     advised. | ||||||
|  |  | ||||||
| Geospatial indexes will be automatically created for all | Geospatial indexes will be automatically created for all | ||||||
| :class:`~mongoengine.GeoPointField`\ s | :class:`~mongoengine.fields.GeoPointField`\ s | ||||||
|  |  | ||||||
| It is also possible to explicitly define geospatial indexes. This is | It is also possible to explicitly define geospatial indexes. This is | ||||||
| useful if you need to define a geospatial index on a subfield of a | useful if you need to define a geospatial index on a subfield of a | ||||||
| :class:`~mongoengine.DictField` or a custom field that contains a | :class:`~mongoengine.fields.DictField` or a custom field that contains a | ||||||
| point. To create a geospatial index you must prefix the field with the | point. To create a geospatial index you must prefix the field with the | ||||||
| ***** sign. :: | ***** sign. :: | ||||||
|  |  | ||||||
| @@ -457,6 +624,35 @@ point. To create a geospatial index you must prefix the field with the | |||||||
|             ], |             ], | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | Time To Live indexes | ||||||
|  | -------------------- | ||||||
|  |  | ||||||
|  | A special index type that allows you to automatically expire data from a | ||||||
|  | collection after a given period. See the official | ||||||
|  | `ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_ | ||||||
|  | documentation for more information.  A common usecase might be session data:: | ||||||
|  |  | ||||||
|  |     class Session(Document): | ||||||
|  |         created = DateTimeField(default=datetime.utcnow) | ||||||
|  |         meta = { | ||||||
|  |             'indexes': [ | ||||||
|  |                 {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||||
|  |             ] | ||||||
|  |         } | ||||||
|  |  | ||||||
|  | .. warning:: TTL indexes happen on the MongoDB server and not in the application | ||||||
|  |     code, therefore no signals will be fired on document deletion. | ||||||
|  |     If you need signals to be fired on deletion, then you must handle the | ||||||
|  |     deletion of Documents in your application code. | ||||||
|  |  | ||||||
|  | Comparing Indexes | ||||||
|  | ----------------- | ||||||
|  |  | ||||||
|  | Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in | ||||||
|  | the database to those that your document definitions define.  This is useful | ||||||
|  | for maintenance purposes and ensuring you have the correct indexes for your | ||||||
|  | schema. | ||||||
|  |  | ||||||
| Ordering | Ordering | ||||||
| ======== | ======== | ||||||
| A default ordering can be specified for your | A default ordering can be specified for your | ||||||
| @@ -500,12 +696,17 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
| Shard keys | Shard keys | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| If your collection is sharded, then you need to specify the shard key as a tuple, | If your collection is sharded by multiple keys, then you can improve shard | ||||||
| using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. | routing (and thus the performance of your application) by specifying the shard | ||||||
| This ensures that the shard key is sent with the query when calling the | key, using the :attr:`shard_key` attribute of | ||||||
| :meth:`~mongoengine.document.Document.save` or | :attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. | ||||||
| :meth:`~mongoengine.document.Document.update` method on an existing |  | ||||||
| :class:`-mongoengine.Document` instance:: | This ensures that the full shard key is sent with the query when calling | ||||||
|  | methods such as :meth:`~mongoengine.document.Document.save`, | ||||||
|  | :meth:`~mongoengine.document.Document.update`, | ||||||
|  | :meth:`~mongoengine.document.Document.modify`, or | ||||||
|  | :meth:`~mongoengine.document.Document.delete` on an existing | ||||||
|  | :class:`~mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|     class LogEntry(Document): |     class LogEntry(Document): | ||||||
|         machine = StringField() |         machine = StringField() | ||||||
| @@ -514,7 +715,8 @@ This ensures that the shard key is sent with the query when calling the | |||||||
|         data = StringField() |         data = StringField() | ||||||
|  |  | ||||||
|         meta = { |         meta = { | ||||||
|             'shard_key': ('machine', 'timestamp',) |             'shard_key': ('machine', 'timestamp'), | ||||||
|  |             'indexes': ('machine', 'timestamp'), | ||||||
|         } |         } | ||||||
|  |  | ||||||
| .. _document-inheritance: | .. _document-inheritance: | ||||||
| @@ -524,10 +726,12 @@ Document inheritance | |||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this new class is not a direct subclass of | ||||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||||
| will use the same collection as its superclass uses. This allows for more | will use the same collection as its superclass uses. This allows for more | ||||||
| convenient and efficient retrieval of related documents:: | convenient and efficient retrieval of related documents -- all you need do is | ||||||
|  | set :attr:`allow_inheritance` to True in the :attr:`meta` data for a | ||||||
|  | document.:: | ||||||
|  |  | ||||||
|     # Stored in a collection named 'page' |     # Stored in a collection named 'page' | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
| @@ -539,25 +743,71 @@ convenient and efficient retrieval of related documents:: | |||||||
|     class DatedPage(Page): |     class DatedPage(Page): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|  |  | ||||||
| .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. | .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||||
|  |           to False, meaning you must set it to True to use inheritance. | ||||||
|  |  | ||||||
|  |           Setting :attr:`allow_inheritance` to True should also be used in | ||||||
|  |           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it | ||||||
|  |  | ||||||
|  | When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query | ||||||
|  | both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. | ||||||
|  | Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains | ||||||
|  | the class name in every documents. When a document is loaded, MongoEngine checks | ||||||
|  | it's :attr:`_cls` attribute and use that class to construct the instance.:: | ||||||
|  |  | ||||||
|  |     Page(title='a funky title').save() | ||||||
|  |     DatedPage(title='another title', date=datetime.utcnow()).save() | ||||||
|  |  | ||||||
|  |     print(Page.objects().count())         # 2 | ||||||
|  |     print(DatedPage.objects().count())    # 1 | ||||||
|  |  | ||||||
|  |     # print documents in their native form | ||||||
|  |     # we remove 'id' to avoid polluting the output with unnecessary detail | ||||||
|  |     qs = Page.objects.exclude('id').as_pymongo() | ||||||
|  |     print(list(qs)) | ||||||
|  |     # [ | ||||||
|  |     #   {'_cls': u 'Page', 'title': 'a funky title'}, | ||||||
|  |     #   {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} | ||||||
|  |     # ] | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| To enable correct retrieval of documents involved in this kind of heirarchy, | As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||||
| two extra attributes are stored on each document in the database: :attr:`_cls` | easily get working with existing data.  Just define the document to match | ||||||
| and :attr:`_types`. These are hidden from the user through the MongoEngine | the expected schema in your database :: | ||||||
| interface, but may not be present if you are trying to use MongoEngine with |  | ||||||
| an existing database. For this reason, you may disable this inheritance |  | ||||||
| mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling |  | ||||||
| you to work with existing databases. To disable inheritance on a document |  | ||||||
| class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` |  | ||||||
| dictionary:: |  | ||||||
|  |  | ||||||
|     # Will work with data in an existing collection named 'cmsPage' |     # Will work with data in an existing collection named 'cmsPage' | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         meta = { |         meta = { | ||||||
|             'collection': 'cmsPage', |             'collection': 'cmsPage' | ||||||
|             'allow_inheritance': False, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  | If you have wildly varying schemas then using a | ||||||
|  | :class:`~mongoengine.DynamicDocument` might be more appropriate, instead of | ||||||
|  | defining all possible field types. | ||||||
|  |  | ||||||
|  | If you use :class:`~mongoengine.Document` and the database contains data that | ||||||
|  | isn't defined then that data will be stored in the `document._data` dictionary. | ||||||
|  |  | ||||||
|  | Abstract classes | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | If you want to add some extra functionality to a group of Document classes but | ||||||
|  | you don't need or want the overhead of inheritance you can use the | ||||||
|  | :attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`. | ||||||
|  | This won't turn on :ref:`document-inheritance` but will allow you to keep your | ||||||
|  | code DRY:: | ||||||
|  |  | ||||||
|  |         class BaseDocument(Document): | ||||||
|  |             meta = { | ||||||
|  |                 'abstract': True, | ||||||
|  |             } | ||||||
|  |             def check_permissions(self): | ||||||
|  |                 ... | ||||||
|  |  | ||||||
|  |         class User(BaseDocument): | ||||||
|  |            ... | ||||||
|  |  | ||||||
|  | Now the User class will have access to the inherited `check_permissions` method | ||||||
|  | and won't store any of the extra `_cls` information. | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ | |||||||
| Documents instances | Documents instances | ||||||
| =================== | =================== | ||||||
| To create a new document object, create an instance of the relevant document | To create a new document object, create an instance of the relevant document | ||||||
| class, providing values for its fields as its constructor keyword arguments. | class, providing values for its fields as constructor keyword arguments. | ||||||
| You may provide values for any of the fields on the document:: | You may provide values for any of the fields on the document:: | ||||||
|  |  | ||||||
|     >>> page = Page(title="Test Page") |     >>> page = Page(title="Test Page") | ||||||
| @@ -30,21 +30,25 @@ already exist, then any changes will be updated atomically.  For example:: | |||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|     Changes to documents are tracked and on the whole perform `set` operations. |     Changes to documents are tracked and on the whole perform ``set`` operations. | ||||||
|  |  | ||||||
|     * ``list_field.pop(0)`` - *sets* the resulting list |     * ``list_field.push(0)`` --- *sets* the resulting list | ||||||
|     * ``del(list_field)``   - *unsets* whole list |     * ``del(list_field)``   --- *unsets* whole list | ||||||
|  |  | ||||||
|  |     With lists its preferable to use ``Doc.update(push__list_field=0)`` as | ||||||
|  |     this stops the whole list being updated --- stopping any race conditions. | ||||||
|  |  | ||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
| Cascading Saves | Cascading Saves | ||||||
| --------------- | --------------- | ||||||
| If your document contains :class:`~mongoengine.ReferenceField` or | If your document contains :class:`~mongoengine.fields.ReferenceField` or | ||||||
| :class:`~mongoengine.GenericReferenceField` objects, then by default the | :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the | ||||||
| :meth:`~mongoengine.Document.save` method will automatically save any changes to | :meth:`~mongoengine.Document.save` method will not save any changes to | ||||||
| those objects as well.  If this is not desired passing :attr:`cascade` as False | those objects.  If you want all references to be saved also, noting each | ||||||
| to the save method turns this feature off. | save is a separate query, then passing :attr:`cascade` as True | ||||||
|  | to the save method will cascade any saves. | ||||||
|  |  | ||||||
| Deleting documents | Deleting documents | ||||||
| ------------------ | ------------------ | ||||||
| @@ -81,15 +85,16 @@ you may still use :attr:`id` to access the primary key if you want:: | |||||||
|     >>> bob.id == bob.email == 'bob@example.com' |     >>> bob.id == bob.email == 'bob@example.com' | ||||||
|     True |     True | ||||||
|  |  | ||||||
| You can also access the document's "primary key" using the :attr:`pk` field; in | You can also access the document's "primary key" using the :attr:`pk` field, | ||||||
| is an alias to :attr:`id`:: | it's an alias to :attr:`id`:: | ||||||
|  |  | ||||||
|     >>> page = Page(title="Another Test Page") |     >>> page = Page(title="Another Test Page") | ||||||
|     >>> page.save() |     >>> page.save() | ||||||
|     >>> page.id == page.pk |     >>> page.id == page.pk | ||||||
|  |     True | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If you define your own primary key field, the field implicitly becomes |    If you define your own primary key field, the field implicitly becomes | ||||||
|    required, so a :class:`ValidationError` will be thrown if you don't provide |    required, so a :class:`~mongoengine.ValidationError` will be thrown if | ||||||
|    it. |    you don't provide it. | ||||||
|   | |||||||
| @@ -7,47 +7,52 @@ GridFS | |||||||
| Writing | Writing | ||||||
| ------- | ------- | ||||||
|  |  | ||||||
| GridFS support comes in the form of the :class:`~mongoengine.FileField` field | GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | ||||||
| object. This field acts as a file-like object and provides a couple of | object. This field acts as a file-like object and provides a couple of | ||||||
| different ways of inserting and retrieving data. Arbitrary metadata such as | different ways of inserting and retrieving data. Arbitrary metadata such as | ||||||
| content type can also be stored alongside the files. In the following example, | content type can also be stored alongside the files. The object returned when accessing a | ||||||
| a document is created to store details about animals, including a photo:: | FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_ | ||||||
|  | In the following example, a document is created to store details about animals, including a photo:: | ||||||
|  |  | ||||||
|     class Animal(Document): |     class Animal(Document): | ||||||
|         genus = StringField() |         genus = StringField() | ||||||
|         family = StringField() |         family = StringField() | ||||||
|         photo = FileField() |         photo = FileField() | ||||||
|  |  | ||||||
|     marmot = Animal('Marmota', 'Sciuridae') |     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|     marmot_photo = open('marmot.jpg', 'r')      # Retrieve a photo from disk |  | ||||||
|     marmot.photo = marmot_photo                 # Store photo in the document |  | ||||||
|     marmot.photo.content_type = 'image/jpeg'    # Store metadata |  | ||||||
|  |  | ||||||
|     marmot.save() |  | ||||||
|  |  | ||||||
| Another way of writing to a :class:`~mongoengine.FileField` is to use the |  | ||||||
| :func:`put` method. This allows for metadata to be stored in the same call as |  | ||||||
| the file:: |  | ||||||
|  |  | ||||||
|     marmot.photo.put(marmot_photo, content_type='image/jpeg') |  | ||||||
|  |  | ||||||
|  |     with open('marmot.jpg', 'rb') as fd: | ||||||
|  |         marmot.photo.put(fd, content_type = 'image/jpeg') | ||||||
|     marmot.save() |     marmot.save() | ||||||
|  |  | ||||||
| Retrieval | Retrieval | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| So using the :class:`~mongoengine.FileField` is just like using any other | So using the :class:`~mongoengine.fields.FileField` is just like using any other | ||||||
| field. The file can also be retrieved just as easily:: | field. The file can also be retrieved just as easily:: | ||||||
|  |  | ||||||
|     marmot = Animal.objects(genus='Marmota').first() |     marmot = Animal.objects(genus='Marmota').first() | ||||||
|     photo = marmot.photo.read() |     photo = marmot.photo.read() | ||||||
|     content_type = marmot.photo.content_type |     content_type = marmot.photo.content_type | ||||||
|  |  | ||||||
|  | .. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" | ||||||
|  |     the file-like object using `seek`:: | ||||||
|  |  | ||||||
|  |         marmot = Animal.objects(genus='Marmota').first() | ||||||
|  |         content1 = marmot.photo.read() | ||||||
|  |         assert content1 != "" | ||||||
|  |  | ||||||
|  |         content2 = marmot.photo.read()    # will be empty | ||||||
|  |         assert content2 == "" | ||||||
|  |  | ||||||
|  |         marmot.photo.seek(0)              # rewind the file by setting the current position of the cursor in the file to 0 | ||||||
|  |         content3 = marmot.photo.read() | ||||||
|  |         assert content3 == content1 | ||||||
|  |  | ||||||
| Streaming | Streaming | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| Streaming data into a :class:`~mongoengine.FileField` is achieved in a | Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a | ||||||
| slightly different manner.  First, a new file must be created by calling the | slightly different manner.  First, a new file must be created by calling the | ||||||
| :func:`new_file` method. Data can then be written using :func:`write`:: | :func:`new_file` method. Data can then be written using :func:`write`:: | ||||||
|  |  | ||||||
| @@ -56,16 +61,17 @@ slightly different manner.  First, a new file must be created by calling the | |||||||
|     marmot.photo.write('some_more_image_data') |     marmot.photo.write('some_more_image_data') | ||||||
|     marmot.photo.close() |     marmot.photo.close() | ||||||
|  |  | ||||||
|     marmot.photo.save() |     marmot.save() | ||||||
|  |  | ||||||
| Deletion | Deletion | ||||||
| -------- | -------- | ||||||
|  |  | ||||||
| Deleting stored files is achieved with the :func:`delete` method:: | Deleting stored files is achieved with the :func:`delete` method:: | ||||||
|  |  | ||||||
|     marmot.photo.delete() |     marmot.photo.delete()    # Deletes the GridFS document | ||||||
|  |     marmot.save()            # Saves the GridFS reference (being None) contained in the marmot instance | ||||||
|  |  | ||||||
| .. note:: | .. warning:: | ||||||
|  |  | ||||||
|     The FileField in a Document actually only stores the ID of a file in a |     The FileField in a Document actually only stores the ID of a file in a | ||||||
|     separate GridFS collection. This means that deleting a document |     separate GridFS collection. This means that deleting a document | ||||||
| @@ -80,5 +86,6 @@ Replacing files | |||||||
| Files can be replaced with the :func:`replace` method. This works just like | Files can be replaced with the :func:`replace` method. This works just like | ||||||
| the :func:`put` method so even metadata can (and should) be replaced:: | the :func:`put` method so even metadata can (and should) be replaced:: | ||||||
|  |  | ||||||
|     another_marmot = open('another_marmot.png', 'r') |     another_marmot = open('another_marmot.png', 'rb') | ||||||
|     marmot.photo.replace(another_marmot, content_type='image/png') |     marmot.photo.replace(another_marmot, content_type='image/png')  # Replaces the GridFS document | ||||||
|  |     marmot.save()                                                   # Replaces the GridFS reference contained in marmot instance | ||||||
|   | |||||||
| @@ -10,5 +10,9 @@ User Guide | |||||||
|    defining-documents |    defining-documents | ||||||
|    document-instances |    document-instances | ||||||
|    querying |    querying | ||||||
|  |    validation | ||||||
|    gridfs |    gridfs | ||||||
|    signals |    signals | ||||||
|  |    text-indexes | ||||||
|  |    logging-monitoring | ||||||
|  |    mongomock | ||||||
|   | |||||||
| @@ -2,17 +2,17 @@ | |||||||
| Installing MongoEngine | Installing MongoEngine | ||||||
| ====================== | ====================== | ||||||
|  |  | ||||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_ | ||||||
| and ensure it is running in an accessible location. You will also need | and ensure it is running in an accessible location. You will also need | ||||||
| `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | ||||||
| install MongoEngine using setuptools, then the dependencies will be handled for | install MongoEngine using setuptools, then the dependencies will be handled for | ||||||
| you. | you. | ||||||
|  |  | ||||||
| MongoEngine is available on PyPI, so to use it you can use :program:`pip`: | MongoEngine is available on PyPI, so you can use :program:`pip`: | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Alternatively, if you don't have setuptools installed, `download it from PyPi | Alternatively, if you don't have setuptools installed, `download it from PyPi | ||||||
| <http://pypi.python.org/pypi/mongoengine/>`_ and run | <http://pypi.python.org/pypi/mongoengine/>`_ and run | ||||||
| @@ -22,10 +22,10 @@ Alternatively, if you don't have setuptools installed, `download it from PyPi | |||||||
|     $ python setup.py install |     $ python setup.py install | ||||||
|  |  | ||||||
| To use the bleeding-edge version of MongoEngine, you can get the source from | To use the bleeding-edge version of MongoEngine, you can get the source from | ||||||
| `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | `GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above: | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ git clone git://github.com/hmarr/mongoengine |     $ git clone git://github.com/mongoengine/mongoengine | ||||||
|     $ cd mongoengine |     $ cd mongoengine | ||||||
|     $ python setup.py install |     $ python setup.py install | ||||||
|   | |||||||
							
								
								
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | ================== | ||||||
|  | Logging/Monitoring | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor | ||||||
|  | the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by | ||||||
|  | MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners | ||||||
|  | **before** establishing the database connection (i.e calling `connect`): | ||||||
|  |  | ||||||
|  | The following snippet provides a basic logging of all command events: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import logging | ||||||
|  |     from pymongo import monitoring | ||||||
|  |     from mongoengine import * | ||||||
|  |  | ||||||
|  |     log = logging.getLogger() | ||||||
|  |     log.setLevel(logging.DEBUG) | ||||||
|  |     logging.basicConfig(level=logging.DEBUG) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class CommandLogger(monitoring.CommandListener): | ||||||
|  |  | ||||||
|  |         def started(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} started on server " | ||||||
|  |                      "{0.connection_id}".format(event)) | ||||||
|  |  | ||||||
|  |         def succeeded(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "succeeded in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |         def failed(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "failed in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |     monitoring.register(CommandLogger()) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class Jedi(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     connect() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     log.info('GO!') | ||||||
|  |  | ||||||
|  |     log.info('Saving an item through MongoEngine...') | ||||||
|  |     Jedi(name='Obi-Wan Kenobii').save() | ||||||
|  |  | ||||||
|  |     log.info('Querying through MongoEngine...') | ||||||
|  |     obiwan = Jedi.objects.first() | ||||||
|  |  | ||||||
|  |     log.info('Updating through MongoEngine...') | ||||||
|  |     obiwan.name = 'Obi-Wan Kenobi' | ||||||
|  |     obiwan.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Executing this prints the following output:: | ||||||
|  |  | ||||||
|  |     INFO:root:GO! | ||||||
|  |     INFO:root:Saving an item through MongoEngine... | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds | ||||||
|  |     INFO:root:Querying through MongoEngine... | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds | ||||||
|  |     INFO:root:Updating through MongoEngine... | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds | ||||||
|  |  | ||||||
|  | More details can of course be obtained by checking the `event` argument from the `CommandListener`. | ||||||
							
								
								
									
										48
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | |||||||
|  | ============================== | ||||||
|  | Use mongomock for testing | ||||||
|  | ============================== | ||||||
|  |  | ||||||
|  | `mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just | ||||||
|  | what the name implies, mocking a mongo database. | ||||||
|  |  | ||||||
|  | To use with mongoengine, simply specify mongomock when connecting with | ||||||
|  | mongoengine: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     connect('mongoenginetest', host='mongomock://localhost') | ||||||
|  |     conn = get_connection() | ||||||
|  |  | ||||||
|  | or with an alias: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||||
|  |     conn = get_connection('testdb') | ||||||
|  |  | ||||||
|  | Example of test file: | ||||||
|  | --------------------- | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import unittest | ||||||
|  |     from mongoengine import connect, disconnect | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class TestPerson(unittest.TestCase): | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def setUpClass(cls): | ||||||
|  |             connect('mongoenginetest', host='mongomock://localhost') | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def tearDownClass(cls): | ||||||
|  |            disconnect() | ||||||
|  |  | ||||||
|  |         def test_thing(self): | ||||||
|  |             pers = Person(name='John') | ||||||
|  |             pers.save() | ||||||
|  |  | ||||||
|  |             fresh_pers = Person.objects().first() | ||||||
|  |             assert fresh_pers.name ==  'John' | ||||||
| @@ -15,11 +15,10 @@ fetch documents from the database:: | |||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    Once the iteration finishes (when :class:`StopIteration` is raised), |     As of MongoEngine 0.8 the querysets utilise a local cache.  So iterating | ||||||
|    :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the |     it multiple times will only cause a single query.  If this is not the | ||||||
|    :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The |     desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache` | ||||||
|    results of the first iteration are *not* cached, so the database will be hit |     (version **0.8.3+**) to return a non-caching queryset. | ||||||
|    each time the :class:`~mongoengine.queryset.QuerySet` is iterated over. |  | ||||||
|  |  | ||||||
| Filtering queries | Filtering queries | ||||||
| ================= | ================= | ||||||
| @@ -40,10 +39,18 @@ syntax:: | |||||||
|     # been written by a user whose 'country' field is set to 'uk' |     # been written by a user whose 'country' field is set to 'uk' | ||||||
|     uk_pages = Page.objects(author__country='uk') |     uk_pages = Page.objects(author__country='uk') | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    (version **0.9.1+**) if your field name is like mongodb operator name (for example | ||||||
|  |    type, lte, lt...) and you want to place it at the end of lookup keyword | ||||||
|  |    mongoengine automatically  prepend $ to it. To avoid this use  __ at the end of | ||||||
|  |    your lookup keyword. For example if your field name is ``type`` and you want to | ||||||
|  |    query by this field you must use ``.objects(user__type__="admin")`` instead of | ||||||
|  |    ``.objects(user__type="admin")`` | ||||||
|  |  | ||||||
| Query operators | Query operators | ||||||
| =============== | =============== | ||||||
| Operators other than equality may also be used in queries; just attach the | Operators other than equality may also be used in queries --- just attach the | ||||||
| operator name to a key with a double-underscore:: | operator name to a key with a double-underscore:: | ||||||
|  |  | ||||||
|     # Only find users whose age is 18 or less |     # Only find users whose age is 18 or less | ||||||
| @@ -57,7 +64,7 @@ Available operators are as follows: | |||||||
| * ``gt`` -- greater than | * ``gt`` -- greater than | ||||||
| * ``gte`` -- greater than or equal to | * ``gte`` -- greater than or equal to | ||||||
| * ``not`` -- negate a standard check, may be used before other operators (e.g. | * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||||
|   ``Q(age__not__mod=5)``) |   ``Q(age__not__mod=(5, 0))``) | ||||||
| * ``in`` -- value is in list (a list of values should be provided) | * ``in`` -- value is in list (a list of values should be provided) | ||||||
| * ``nin`` -- value is not in list (a list of values should be provided) | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
| @@ -65,6 +72,9 @@ Available operators are as follows: | |||||||
| * ``size`` -- the size of the array is | * ``size`` -- the size of the array is | ||||||
| * ``exists`` -- value for field exists | * ``exists`` -- value for field exists | ||||||
|  |  | ||||||
|  | String queries | ||||||
|  | -------------- | ||||||
|  |  | ||||||
| The following operators are available as shortcuts to querying with regular | The following operators are available as shortcuts to querying with regular | ||||||
| expressions: | expressions: | ||||||
|  |  | ||||||
| @@ -78,12 +88,75 @@ expressions: | |||||||
| * ``iendswith`` -- string field ends with value (case insensitive) | * ``iendswith`` -- string field ends with value (case insensitive) | ||||||
| * ``match``  -- performs an $elemMatch so you can match an entire document within an array | * ``match``  -- performs an $elemMatch so you can match an entire document within an array | ||||||
|  |  | ||||||
| There are a few special operators for performing geographical queries, that |  | ||||||
| may used with :class:`~mongoengine.GeoPointField`\ s: | Geo queries | ||||||
|  | ----------- | ||||||
|  |  | ||||||
|  | There are a few special operators for performing geographical queries. | ||||||
|  | The following were added in MongoEngine 0.8 for | ||||||
|  | :class:`~mongoengine.fields.PointField`, | ||||||
|  | :class:`~mongoengine.fields.LineStringField` and | ||||||
|  | :class:`~mongoengine.fields.PolygonField`: | ||||||
|  |  | ||||||
|  | * ``geo_within`` -- check if a geometry is within a polygon. For ease of use | ||||||
|  |   it accepts either a geojson geometry or just the polygon coordinates eg:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) | ||||||
|  |         loc.objects(point__geo_within={"type": "Polygon", | ||||||
|  |                                  "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||||
|  |  | ||||||
|  | * ``geo_within_box`` -- simplified geo_within searching with a box eg:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)]) | ||||||
|  |         loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>]) | ||||||
|  |  | ||||||
|  | * ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]]) | ||||||
|  |         loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] , | ||||||
|  |                                                 [ <x2> , <y2> ] , | ||||||
|  |                                                 [ <x3> , <y3> ] ]) | ||||||
|  |  | ||||||
|  | * ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__geo_within_center=[(-125.0, 35.0), 1]) | ||||||
|  |         loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ]) | ||||||
|  |  | ||||||
|  | * ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1]) | ||||||
|  |         loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ]) | ||||||
|  |  | ||||||
|  | * ``geo_intersects`` -- selects all locations that intersect with a geometry eg:: | ||||||
|  |  | ||||||
|  |         # Inferred from provided points lists: | ||||||
|  |         loc.objects(poly__geo_intersects=[40, 6]) | ||||||
|  |         loc.objects(poly__geo_intersects=[[40, 5], [40, 6]]) | ||||||
|  |         loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]) | ||||||
|  |  | ||||||
|  |         # With geoJson style objects | ||||||
|  |         loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]}) | ||||||
|  |         loc.objects(poly__geo_intersects={"type": "LineString", | ||||||
|  |                                           "coordinates": [[40, 5], [40, 6]]}) | ||||||
|  |         loc.objects(poly__geo_intersects={"type": "Polygon", | ||||||
|  |                                           "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}) | ||||||
|  |  | ||||||
|  | * ``near`` -- find all the locations near a given point:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__near=[40, 5]) | ||||||
|  |         loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) | ||||||
|  |  | ||||||
|  |   You can also set the maximum and/or the minimum distance in meters as well:: | ||||||
|  |  | ||||||
|  |         loc.objects(point__near=[40, 5], point__max_distance=1000) | ||||||
|  |         loc.objects(point__near=[40, 5], point__min_distance=100) | ||||||
|  |  | ||||||
|  | The older 2D indexes are still supported with the | ||||||
|  | :class:`~mongoengine.fields.GeoPointField`: | ||||||
|  |  | ||||||
| * ``within_distance`` -- provide a list containing a point and a maximum | * ``within_distance`` -- provide a list containing a point and a maximum | ||||||
|   distance (e.g. [(41.342, -87.653), 5]) |   distance (e.g. [(41.342, -87.653), 5]) | ||||||
| * ``within_spherical_distance`` -- Same as above but using the spherical geo model | * ``within_spherical_distance`` -- same as above but using the spherical geo model | ||||||
|   (e.g. [(41.342, -87.653), 5/earth_radius]) |   (e.g. [(41.342, -87.653), 5/earth_radius]) | ||||||
| * ``near`` -- order the documents by how close they are to a given point | * ``near`` -- order the documents by how close they are to a given point | ||||||
| * ``near_sphere`` -- Same as above but using the spherical geo model | * ``near_sphere`` -- Same as above but using the spherical geo model | ||||||
| @@ -91,14 +164,19 @@ may used with :class:`~mongoengine.GeoPointField`\ s: | |||||||
|   [(35.0, -125.0), (40.0, -100.0)]) |   [(35.0, -125.0), (40.0, -100.0)]) | ||||||
| * ``within_polygon`` -- filter documents to those within a given polygon (e.g. | * ``within_polygon`` -- filter documents to those within a given polygon (e.g. | ||||||
|   [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). |   [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). | ||||||
|  |  | ||||||
|   .. note:: Requires Mongo Server 2.0 |   .. note:: Requires Mongo Server 2.0 | ||||||
|  |  | ||||||
|  | * ``max_distance`` -- can be added to your location queries to set a maximum | ||||||
|  |   distance. | ||||||
|  | * ``min_distance`` -- can be added to your location queries to set a minimum | ||||||
|  |   distance. | ||||||
|  |  | ||||||
| Querying lists | Querying lists | ||||||
| -------------- | -------------- | ||||||
| On most fields, this syntax will look up documents where the field specified | On most fields, this syntax will look up documents where the field specified | ||||||
| matches the given value exactly, but when the field refers to a | matches the given value exactly, but when the field refers to a | ||||||
| :class:`~mongoengine.ListField`, a single item may be provided, in which case | :class:`~mongoengine.fields.ListField`, a single item may be provided, in which case | ||||||
| lists that contain that item will be matched:: | lists that contain that item will be matched:: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
| @@ -129,12 +207,14 @@ However, this doesn't map well to the syntax so you can also use a capital S ins | |||||||
|  |  | ||||||
|     Post.objects(comments__by="joe").update(inc__comments__S__votes=1) |     Post.objects(comments__by="joe").update(inc__comments__S__votes=1) | ||||||
|  |  | ||||||
|     .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. | .. note:: | ||||||
|  |     Due to :program:`Mongo`, currently the $ operator only applies to the | ||||||
|  |     first matched item in the query. | ||||||
|  |  | ||||||
|  |  | ||||||
| Raw queries | Raw queries | ||||||
| ----------- | ----------- | ||||||
| It is possible to provide a raw PyMongo query as a query parameter, which will | It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will | ||||||
| be integrated directly into the query. This is done using the ``__raw__`` | be integrated directly into the query. This is done using the ``__raw__`` | ||||||
| keyword argument:: | keyword argument:: | ||||||
|  |  | ||||||
| @@ -142,14 +222,26 @@ keyword argument:: | |||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
|  | Sorting/Ordering results | ||||||
|  | ======================== | ||||||
|  | It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. | ||||||
|  | The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: | ||||||
|  |  | ||||||
|  |     # Order by ascending date | ||||||
|  |     blogs = BlogPost.objects().order_by('date')    # equivalent to .order_by('+date') | ||||||
|  |  | ||||||
|  |     # Order by ascending date first, then descending title | ||||||
|  |     blogs = BlogPost.objects().order_by('+date', '-title') | ||||||
|  |  | ||||||
|  |  | ||||||
| Limiting and skipping results | Limiting and skipping results | ||||||
| ============================= | ============================= | ||||||
| Just as with traditional ORMs, you may limit the number of results returned, or | Just as with traditional ORMs, you may limit the number of results returned or | ||||||
| skip a number or results in you query. | skip a number or results in you query. | ||||||
| :meth:`~mongoengine.queryset.QuerySet.limit` and | :meth:`~mongoengine.queryset.QuerySet.limit` and | ||||||
| :meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on | :meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on | ||||||
| :class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for | :class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax | ||||||
| achieving this is using array-slicing syntax:: | is preferred for achieving this:: | ||||||
|  |  | ||||||
|     # Only the first 5 people |     # Only the first 5 people | ||||||
|     users = User.objects[:5] |     users = User.objects[:5] | ||||||
| @@ -157,7 +249,7 @@ achieving this is using array-slicing syntax:: | |||||||
|     # All except for the first 5 people |     # All except for the first 5 people | ||||||
|     users = User.objects[5:] |     users = User.objects[5:] | ||||||
|  |  | ||||||
|     # 5 users, starting from the 10th user found |     # 5 users, starting from the 11th user found | ||||||
|     users = User.objects[10:15] |     users = User.objects[10:15] | ||||||
|  |  | ||||||
| You may also index the query to retrieve a single result. If an item at that | You may also index the query to retrieve a single result. If an item at that | ||||||
| @@ -179,25 +271,21 @@ Retrieving unique results | |||||||
| ------------------------- | ------------------------- | ||||||
| To retrieve a result that should be unique in the collection, use | To retrieve a result that should be unique in the collection, use | ||||||
| :meth:`~mongoengine.queryset.QuerySet.get`. This will raise | :meth:`~mongoengine.queryset.QuerySet.get`. This will raise | ||||||
| :class:`~mongoengine.queryset.DoesNotExist` if no document matches the query, | :class:`~mongoengine.queryset.DoesNotExist` if | ||||||
| and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one | no document matches the query, and | ||||||
| document matched the query. | :class:`~mongoengine.queryset.MultipleObjectsReturned` | ||||||
|  | if more than one document matched the query.  These exceptions are merged into | ||||||
|  | your document definitions eg: `MyDoc.DoesNotExist` | ||||||
|  |  | ||||||
| A variation of this method exists, | A variation of this method, get_or_create() existed, but it was unsafe. It | ||||||
| :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | could not be made safe, because there are no transactions in mongoDB. Other | ||||||
| document with the query arguments if no documents match the query. An | approaches should be investigated, to ensure you don't accidentally duplicate | ||||||
| additional keyword argument, :attr:`defaults` may be provided, which will be | data when using something similar to this method. Therefore it was deprecated | ||||||
| used as default values for the new document, in the case that it should need | in 0.8 and removed in 0.10. | ||||||
| to be created:: |  | ||||||
|  |  | ||||||
|     >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) |  | ||||||
|     >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) |  | ||||||
|     >>> a.name == b.name and a.age == b.age |  | ||||||
|     True |  | ||||||
|  |  | ||||||
| Default Document queries | Default Document queries | ||||||
| ======================== | ======================== | ||||||
| By default, the objects :attr:`~mongoengine.Document.objects` attribute on a | By default, the objects :attr:`~Document.objects` attribute on a | ||||||
| document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter | document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter | ||||||
| the collection -- it returns all objects. This may be changed by defining a | the collection -- it returns all objects. This may be changed by defining a | ||||||
| method on a document that modifies a queryset. The method should accept two | method on a document that modifies a queryset. The method should accept two | ||||||
| @@ -232,7 +320,7 @@ custom manager methods as you like:: | |||||||
|     BlogPost(title='test1', published=False).save() |     BlogPost(title='test1', published=False).save() | ||||||
|     BlogPost(title='test2', published=True).save() |     BlogPost(title='test2', published=True).save() | ||||||
|     assert len(BlogPost.objects) == 2 |     assert len(BlogPost.objects) == 2 | ||||||
|     assert len(BlogPost.live_posts) == 1 |     assert len(BlogPost.live_posts()) == 1 | ||||||
|  |  | ||||||
| Custom QuerySets | Custom QuerySets | ||||||
| ================ | ================ | ||||||
| @@ -240,14 +328,19 @@ Should you want to add custom methods for interacting with or filtering | |||||||
| documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be | documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be | ||||||
| the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on | the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on | ||||||
| a document, set ``queryset_class`` to the custom class in a | a document, set ``queryset_class`` to the custom class in a | ||||||
| :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | :class:`~mongoengine.Document`'s ``meta`` dictionary:: | ||||||
|  |  | ||||||
|     class AwesomerQuerySet(QuerySet): |     class AwesomerQuerySet(QuerySet): | ||||||
|         pass |  | ||||||
|  |         def get_awesome(self): | ||||||
|  |             return self.filter(awesome=True) | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         meta = {'queryset_class': AwesomerQuerySet} |         meta = {'queryset_class': AwesomerQuerySet} | ||||||
|  |  | ||||||
|  |     # To call: | ||||||
|  |     Page.objects.get_awesome() | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Aggregation | Aggregation | ||||||
| @@ -259,12 +352,19 @@ Javascript code that is executed on the database server. | |||||||
|  |  | ||||||
| Counting results | Counting results | ||||||
| ---------------- | ---------------- | ||||||
| Just as with limiting and skipping results, there is a method on | Just as with limiting and skipping results, there is a method on a | ||||||
| :class:`~mongoengine.queryset.QuerySet` objects -- | :class:`~mongoengine.queryset.QuerySet` object -- | ||||||
| :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | :meth:`~mongoengine.queryset.QuerySet.count`:: | ||||||
| way of achieving this:: |  | ||||||
|  |  | ||||||
|     num_users = len(User.objects) |     num_users = User.objects.count() | ||||||
|  |  | ||||||
|  | You could technically use ``len(User.objects)`` to get the same result, but it | ||||||
|  | would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
|  | When you execute a server-side count query, you let MongoDB do the heavy | ||||||
|  | lifting and you receive a single integer over the wire. Meanwhile, ``len()`` | ||||||
|  | retrieves all the results, places them in a local cache, and finally counts | ||||||
|  | them. If we compare the performance of the two operations, ``len()`` is much slower | ||||||
|  | than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
|  |  | ||||||
| Further aggregation | Further aggregation | ||||||
| ------------------- | ------------------- | ||||||
| @@ -298,6 +398,25 @@ would be generating "tag-clouds":: | |||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | MongoDB aggregation API | ||||||
|  | ----------------------- | ||||||
|  | If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_ | ||||||
|  | through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. | ||||||
|  | An example of its use would be:: | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person(name='John').save() | ||||||
|  |         Person(name='Bob').save() | ||||||
|  |  | ||||||
|  |         pipeline = [ | ||||||
|  |             {"$sort" : {"name" : -1}}, | ||||||
|  |             {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} | ||||||
|  |             ] | ||||||
|  |         data = Person.objects().aggregate(pipeline) | ||||||
|  |         assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] | ||||||
|  |  | ||||||
| Query efficiency and performance | Query efficiency and performance | ||||||
| ================================ | ================================ | ||||||
|  |  | ||||||
| @@ -310,7 +429,7 @@ Retrieving a subset of fields | |||||||
| Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | ||||||
| and for efficiency only these should be retrieved from the database. This issue | and for efficiency only these should be retrieved from the database. This issue | ||||||
| is especially important for MongoDB, as fields may often be extremely large | is especially important for MongoDB, as fields may often be extremely large | ||||||
| (e.g. a :class:`~mongoengine.ListField` of | (e.g. a :class:`~mongoengine.fields.ListField` of | ||||||
| :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a | :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a | ||||||
| blog post. To select only a subset of fields, use | blog post. To select only a subset of fields, use | ||||||
| :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to | :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to | ||||||
| @@ -342,14 +461,14 @@ If you later need the missing fields, just call | |||||||
| Getting related data | Getting related data | ||||||
| -------------------- | -------------------- | ||||||
|  |  | ||||||
| When iterating the results of :class:`~mongoengine.ListField` or | When iterating the results of :class:`~mongoengine.fields.ListField` or | ||||||
| :class:`~mongoengine.DictField` we automatically dereference any | :class:`~mongoengine.fields.DictField` we automatically dereference any | ||||||
| :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the | :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the | ||||||
| number the queries to mongo. | number the queries to mongo. | ||||||
|  |  | ||||||
| There are times when that efficiency is not enough, documents that have | There are times when that efficiency is not enough, documents that have | ||||||
| :class:`~mongoengine.ReferenceField` objects or | :class:`~mongoengine.fields.ReferenceField` objects or | ||||||
| :class:`~mongoengine.GenericReferenceField` objects at the top level are | :class:`~mongoengine.fields.GenericReferenceField` objects at the top level are | ||||||
| expensive as the number of queries to MongoDB can quickly rise. | expensive as the number of queries to MongoDB can quickly rise. | ||||||
|  |  | ||||||
| To limit the number of queries use | To limit the number of queries use | ||||||
| @@ -360,8 +479,30 @@ references to the depth of 1 level.  If you have more complicated documents and | |||||||
| want to dereference more of the object at once then increasing the :attr:`max_depth` | want to dereference more of the object at once then increasing the :attr:`max_depth` | ||||||
| will dereference more levels of the document. | will dereference more levels of the document. | ||||||
|  |  | ||||||
|  | Turning off dereferencing | ||||||
|  | ------------------------- | ||||||
|  |  | ||||||
|  | Sometimes for performance reasons you don't want to automatically dereference | ||||||
|  | data. To turn off dereferencing of the results of a query use | ||||||
|  | :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||||
|  |  | ||||||
|  |     post = Post.objects.no_dereference().first() | ||||||
|  |     assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
|  | You can also turn off all dereferencing for a fixed period by using the | ||||||
|  | :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||||
|  |  | ||||||
|  |     with no_dereference(Post) as Post: | ||||||
|  |         post = Post.objects.first() | ||||||
|  |         assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
|  |     # Outside the context manager dereferencing occurs. | ||||||
|  |     assert(isinstance(post.author, User)) | ||||||
|  |  | ||||||
|  |  | ||||||
| Advanced queries | Advanced queries | ||||||
| ================ | ================ | ||||||
|  |  | ||||||
| Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | ||||||
| arguments can't fully express the query you want to use -- for example if you | arguments can't fully express the query you want to use -- for example if you | ||||||
| need to combine a number of constraints using *and* and *or*. This is made | need to combine a number of constraints using *and* and *or*. This is made | ||||||
| @@ -374,34 +515,46 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | |||||||
| first positional argument to :attr:`Document.objects` when you filter it by | first positional argument to :attr:`Document.objects` when you filter it by | ||||||
| calling it with keyword arguments:: | calling it with keyword arguments:: | ||||||
|  |  | ||||||
|  |     from mongoengine.queryset.visitor import Q | ||||||
|  |  | ||||||
|     # Get published posts |     # Get published posts | ||||||
|     Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) |     Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) | ||||||
|  |  | ||||||
|     # Get top posts |     # Get top posts | ||||||
|     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) |     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) | ||||||
|  |  | ||||||
|  | .. warning:: You have to use bitwise operators.  You cannot use ``or``, ``and`` | ||||||
|  |     to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as | ||||||
|  |     ``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is | ||||||
|  |     the same as ``Q(a=a)``. | ||||||
|  |  | ||||||
| .. _guide-atomic-updates: | .. _guide-atomic-updates: | ||||||
|  |  | ||||||
| Atomic updates | Atomic updates | ||||||
| ============== | ============== | ||||||
| Documents may be updated atomically by using the | Documents may be updated atomically by using the | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update_one` and | :meth:`~mongoengine.queryset.QuerySet.update_one`, | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update` methods on a | :meth:`~mongoengine.queryset.QuerySet.update` and | ||||||
| :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" | :meth:`~mongoengine.queryset.QuerySet.modify` methods on a | ||||||
| that you may use with these methods: | :class:`~mongoengine.queryset.QuerySet` or | ||||||
|  | :meth:`~mongoengine.Document.modify` and | ||||||
|  | :meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a | ||||||
|  | :class:`~mongoengine.Document`. | ||||||
|  | There are several different "modifiers" that you may use with these methods: | ||||||
|  |  | ||||||
| * ``set`` -- set a particular value | * ``set`` -- set a particular value | ||||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3+) | * ``unset`` -- delete a particular value (since MongoDB v1.3) | ||||||
| * ``inc`` -- increment a value by a given amount | * ``inc`` -- increment a value by a given amount | ||||||
| * ``dec`` -- decrement a value by a given amount | * ``dec`` -- decrement a value by a given amount | ||||||
| * ``pop`` -- remove the last item from a list |  | ||||||
| * ``push`` -- append a value to a list | * ``push`` -- append a value to a list | ||||||
| * ``push_all`` -- append several values to a list | * ``push_all`` -- append several values to a list | ||||||
| * ``pop`` -- remove the first or last element of a list | * ``pop`` -- remove the first or last element of a list `depending on the value`_ | ||||||
| * ``pull`` -- remove a value from a list | * ``pull`` -- remove a value from a list | ||||||
| * ``pull_all`` -- remove several values from a list | * ``pull_all`` -- remove several values from a list | ||||||
| * ``add_to_set`` -- add value to a list only if its not in the list already | * ``add_to_set`` -- add value to a list only if its not in the list already | ||||||
|  |  | ||||||
|  | .. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/ | ||||||
|  |  | ||||||
| The syntax for atomic updates is similar to the querying syntax, but the | The syntax for atomic updates is similar to the querying syntax, but the | ||||||
| modifier comes before the field, not after it:: | modifier comes before the field, not after it:: | ||||||
|  |  | ||||||
| @@ -420,7 +573,14 @@ modifier comes before the field, not after it:: | |||||||
|     >>> post.tags |     >>> post.tags | ||||||
|     ['database', 'nosql'] |     ['database', 'nosql'] | ||||||
|  |  | ||||||
| .. note :: | .. note:: | ||||||
|  |  | ||||||
|  |     If no modifier operator is specified the default will be ``$set``. So the following sentences are identical:: | ||||||
|  |  | ||||||
|  |         >>> BlogPost.objects(id=post.id).update(title='Example Post') | ||||||
|  |         >>> BlogPost.objects(id=post.id).update(set__title='Example Post') | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates |     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates | ||||||
|     on changed documents by tracking changes to that document. |     on changed documents by tracking changes to that document. | ||||||
| @@ -436,10 +596,20 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | |||||||
|     >>> post.tags |     >>> post.tags | ||||||
|     ['database', 'mongodb'] |     ['database', 'mongodb'] | ||||||
|  |  | ||||||
| .. note :: | From MongoDB version 2.6, push operator supports $position value which allows | ||||||
|  | to push values with index:: | ||||||
|  |  | ||||||
|  |     >>> post = BlogPost(title="Test", tags=["mongo"]) | ||||||
|  |     >>> post.save() | ||||||
|  |     >>> post.update(push__tags__0=["database", "code"]) | ||||||
|  |     >>> post.reload() | ||||||
|  |     >>> post.tags | ||||||
|  |     ['database', 'code', 'mongo'] | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|     Currently only top level lists are handled, future versions of mongodb / |     Currently only top level lists are handled, future versions of mongodb / | ||||||
|     pymongo plan to support nested positional operators.  See `The $ positional |     pymongo plan to support nested positional operators.  See `The $ positional | ||||||
|     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. |     operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_. | ||||||
|  |  | ||||||
| Server-side javascript execution | Server-side javascript execution | ||||||
| ================================ | ================================ | ||||||
| @@ -478,7 +648,7 @@ Some variables are made available in the scope of the Javascript function: | |||||||
|  |  | ||||||
| The following example demonstrates the intended usage of | The following example demonstrates the intended usage of | ||||||
| :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | ||||||
| over a field on a document (this functionality is already available throught | over a field on a document (this functionality is already available through | ||||||
| :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | ||||||
| example):: | example):: | ||||||
|  |  | ||||||
| @@ -505,7 +675,7 @@ Javascript code. When accessing a field on a collection object, use | |||||||
| square-bracket notation, and prefix the MongoEngine field name with a tilde. | square-bracket notation, and prefix the MongoEngine field name with a tilde. | ||||||
| The field name that follows the tilde will be translated to the name used in | The field name that follows the tilde will be translated to the name used in | ||||||
| the database. Note that when referring to fields on embedded documents, | the database. Note that when referring to fields on embedded documents, | ||||||
| the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, | the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot, | ||||||
| should be used before the name of the field on the embedded document. The | should be used before the name of the field on the embedded document. The | ||||||
| following example shows how the substitutions are made:: | following example shows how the substitutions are made:: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,40 +1,102 @@ | |||||||
| .. _signals: | .. _signals: | ||||||
|  |  | ||||||
|  | ======= | ||||||
| Signals | Signals | ||||||
| ======= | ======= | ||||||
|  |  | ||||||
| .. versionadded:: 0.5 | .. versionadded:: 0.5 | ||||||
|  |  | ||||||
| Signal support is provided by the excellent `blinker`_ library and | .. note:: | ||||||
| will gracefully fall back if it is not available. |  | ||||||
|  |  | ||||||
|  |   Signal support is provided by the excellent `blinker`_ library. If you wish | ||||||
|  |   to enable signal support this library must be installed, though it is not | ||||||
|  |   required for MongoEngine to function. | ||||||
|  |  | ||||||
| <<<<<<< HEAD | Overview | ||||||
| The following document signals exist in MongoEngine and are pretty self explanatory: | -------- | ||||||
| ======= |  | ||||||
| The following document signals exist in MongoEngine and are pretty self-explanatory: |  | ||||||
| >>>>>>> master |  | ||||||
|  |  | ||||||
|   * `mongoengine.signals.pre_init` | Signals are found within the `mongoengine.signals` module.  Unless | ||||||
|   * `mongoengine.signals.post_init` | specified signals receive no additional arguments beyond the `sender` class and | ||||||
|   * `mongoengine.signals.pre_save` | `document` instance.  Post-signals are only called if there were no exceptions | ||||||
|   * `mongoengine.signals.post_save` | raised during the processing of their related function. | ||||||
|   * `mongoengine.signals.pre_delete` |  | ||||||
|   * `mongoengine.signals.post_delete` |  | ||||||
|   * `mongoengine.signals.pre_bulk_insert` |  | ||||||
|   * `mongoengine.signals.post_bulk_insert` |  | ||||||
|  |  | ||||||
| Example usage:: | Available signals include: | ||||||
|  |  | ||||||
|  | `pre_init` | ||||||
|  |   Called during the creation of a new :class:`~mongoengine.Document` or | ||||||
|  |   :class:`~mongoengine.EmbeddedDocument` instance, after the constructor | ||||||
|  |   arguments have been collected but before any additional processing has been | ||||||
|  |   done to them.  (I.e. assignment of default values.)  Handlers for this signal | ||||||
|  |   are passed the dictionary of arguments using the `values` keyword argument | ||||||
|  |   and may modify this dictionary prior to returning. | ||||||
|  |  | ||||||
|  | `post_init` | ||||||
|  |   Called after all processing of a new :class:`~mongoengine.Document` or | ||||||
|  |   :class:`~mongoengine.EmbeddedDocument` instance has been completed. | ||||||
|  |  | ||||||
|  | `pre_save` | ||||||
|  |   Called within :meth:`~mongoengine.Document.save` prior to performing | ||||||
|  |   any actions. | ||||||
|  |  | ||||||
|  | `pre_save_post_validation` | ||||||
|  |   Called within :meth:`~mongoengine.Document.save` after validation | ||||||
|  |   has taken place but before saving. | ||||||
|  |  | ||||||
|  | `post_save` | ||||||
|  |   Called within :meth:`~mongoengine.Document.save` after most actions | ||||||
|  |   (validation, insert/update, and cascades, but not clearing dirty flags) have | ||||||
|  |   completed successfully.  Passed the additional boolean keyword argument | ||||||
|  |   `created` to indicate if the save was an insert or an update. | ||||||
|  |  | ||||||
|  | `pre_delete` | ||||||
|  |   Called within :meth:`~mongoengine.Document.delete` prior to | ||||||
|  |   attempting the delete operation. | ||||||
|  |  | ||||||
|  | `post_delete` | ||||||
|  |   Called within :meth:`~mongoengine.Document.delete` upon successful | ||||||
|  |   deletion of the record. | ||||||
|  |  | ||||||
|  | `pre_bulk_insert` | ||||||
|  |   Called after validation of the documents to insert, but prior to any data | ||||||
|  |   being written. In this case, the `document` argument is replaced by a | ||||||
|  |   `documents` argument representing the list of documents being inserted. | ||||||
|  |  | ||||||
|  | `post_bulk_insert` | ||||||
|  |   Called after a successful bulk insert operation.  As per `pre_bulk_insert`, | ||||||
|  |   the `document` argument is omitted and replaced with a `documents` argument. | ||||||
|  |   An additional boolean argument, `loaded`, identifies the contents of | ||||||
|  |   `documents` as either :class:`~mongoengine.Document` instances when `True` or | ||||||
|  |   simply a list of primary key values for the inserted records if `False`. | ||||||
|  |  | ||||||
|  | Attaching Events | ||||||
|  | ---------------- | ||||||
|  |  | ||||||
|  | After writing a handler function like the following:: | ||||||
|  |  | ||||||
|  |     import logging | ||||||
|  |     from datetime import datetime | ||||||
|  |  | ||||||
|     from mongoengine import * |     from mongoengine import * | ||||||
|     from mongoengine import signals |     from mongoengine import signals | ||||||
|  |  | ||||||
|  |     def update_modified(sender, document): | ||||||
|  |         document.modified = datetime.utcnow() | ||||||
|  |  | ||||||
|  | You attach the event handler to your :class:`~mongoengine.Document` or | ||||||
|  | :class:`~mongoengine.EmbeddedDocument` subclass:: | ||||||
|  |  | ||||||
|  |     class Record(Document): | ||||||
|  |         modified = DateTimeField() | ||||||
|  |  | ||||||
|  |     signals.pre_save.connect(update_modified) | ||||||
|  |  | ||||||
|  | While this is not the most elaborate document model, it does demonstrate the | ||||||
|  | concepts involved.  As a more complete demonstration you can also define your | ||||||
|  | handlers within your subclass:: | ||||||
|  |  | ||||||
|     class Author(Document): |     class Author(Document): | ||||||
|         name = StringField() |         name = StringField() | ||||||
|  |  | ||||||
|         def __unicode__(self): |  | ||||||
|             return self.name |  | ||||||
|  |  | ||||||
|         @classmethod |         @classmethod | ||||||
|         def pre_save(cls, sender, document, **kwargs): |         def pre_save(cls, sender, document, **kwargs): | ||||||
|             logging.debug("Pre Save: %s" % document.name) |             logging.debug("Pre Save: %s" % document.name) | ||||||
| @@ -51,5 +113,37 @@ Example usage:: | |||||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |  | ||||||
|  |     Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. | ||||||
|  |  | ||||||
|  | Finally, you can also use this small decorator to quickly create a number of | ||||||
|  | signals and attach them to your :class:`~mongoengine.Document` or | ||||||
|  | :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||||
|  |  | ||||||
|  |     def handler(event): | ||||||
|  |         """Signal decorator to allow use of callback functions as class decorators.""" | ||||||
|  |  | ||||||
|  |         def decorator(fn): | ||||||
|  |             def apply(cls): | ||||||
|  |                 event.connect(fn, sender=cls) | ||||||
|  |                 return cls | ||||||
|  |  | ||||||
|  |             fn.apply = apply | ||||||
|  |             return fn | ||||||
|  |  | ||||||
|  |         return decorator | ||||||
|  |  | ||||||
|  | Using the first example of updating a modification time the code is now much | ||||||
|  | cleaner looking while still allowing manual execution of the callback:: | ||||||
|  |  | ||||||
|  |     @handler(signals.pre_save) | ||||||
|  |     def update_modified(sender, document): | ||||||
|  |         document.modified = datetime.utcnow() | ||||||
|  |  | ||||||
|  |     @update_modified.apply | ||||||
|  |     class Record(Document): | ||||||
|  |         modified = DateTimeField() | ||||||
|  |  | ||||||
|  |  | ||||||
| .. _blinker: http://pypi.python.org/pypi/blinker | .. _blinker: http://pypi.python.org/pypi/blinker | ||||||
|   | |||||||
							
								
								
									
										51
									
								
								docs/guide/text-indexes.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								docs/guide/text-indexes.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | |||||||
|  | =========== | ||||||
|  | Text Search | ||||||
|  | =========== | ||||||
|  |  | ||||||
|  | After MongoDB 2.4 version, supports search documents by text indexes. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Defining a Document with text index | ||||||
|  | =================================== | ||||||
|  | Use the *$* prefix to set a text index, Look the declaration:: | ||||||
|  |  | ||||||
|  |   class News(Document): | ||||||
|  |       title = StringField() | ||||||
|  |       content = StringField() | ||||||
|  |       is_active = BooleanField() | ||||||
|  |  | ||||||
|  |       meta = {'indexes': [ | ||||||
|  |           {'fields': ['$title', "$content"], | ||||||
|  |            'default_language': 'english', | ||||||
|  |            'weights': {'title': 10, 'content': 2} | ||||||
|  |           } | ||||||
|  |       ]} | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Querying | ||||||
|  | ======== | ||||||
|  |  | ||||||
|  | Saving a document:: | ||||||
|  |  | ||||||
|  |   News(title="Using mongodb text search", | ||||||
|  |        content="Testing text search").save() | ||||||
|  |  | ||||||
|  |   News(title="MongoEngine 0.9 released", | ||||||
|  |        content="Various improvements").save() | ||||||
|  |  | ||||||
|  | Next, start a text search using :attr:`QuerySet.search_text` method:: | ||||||
|  |  | ||||||
|  |   document = News.objects.search_text('testing').first() | ||||||
|  |   document.title # may be: "Using mongodb text search" | ||||||
|  |  | ||||||
|  |   document = News.objects.search_text('released').first() | ||||||
|  |   document.title # may be: "MongoEngine 0.9 released" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Ordering by text score | ||||||
|  | ====================== | ||||||
|  |  | ||||||
|  | :: | ||||||
|  |  | ||||||
|  |   objects = News.objects.search_text('mongo').order_by('$text_score') | ||||||
							
								
								
									
										123
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										123
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,123 @@ | |||||||
|  | ==================== | ||||||
|  | Document Validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB | ||||||
|  | and makes sure they are consistent with the fields defined in your models. | ||||||
|  |  | ||||||
|  | MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema. | ||||||
|  | This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance | ||||||
|  | of your model but this operation may fail under some circumstances (e.g. if there is a field in | ||||||
|  | the document fetched from the database that is not defined in your model). | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Built-in validation | ||||||
|  | =================== | ||||||
|  |  | ||||||
|  | Mongoengine provides different fields that encapsulate the corresponding validation | ||||||
|  | out of the box. Validation runs when calling `.validate()` or `.save()` | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     from mongoengine import Document, EmailField | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         email = EmailField() | ||||||
|  |         age = IntField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     user = User(email='invalid@', age=24) | ||||||
|  |     user.validate()     # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |     user.save()         # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |  | ||||||
|  |     user2 = User(email='john.doe@garbage.com', age=1000) | ||||||
|  |     user2.save()        # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |  | ||||||
|  | Custom validation | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | The following feature can be used to customize the validation: | ||||||
|  |  | ||||||
|  | * Field `validation` parameter | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     def not_john_doe(name): | ||||||
|  |         if name == 'John Doe': | ||||||
|  |             raise ValidationError("John Doe is not a valid name") | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         full_name = StringField(validation=not_john_doe) | ||||||
|  |  | ||||||
|  |     Person(full_name='Billy Doe').save() | ||||||
|  |     Person(full_name='John Doe').save()  # raises ValidationError (John Doe is not a valid name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | * Document `clean` method | ||||||
|  |  | ||||||
|  | This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide | ||||||
|  | custom model validation and/or to modify some of the field values prior to validation. | ||||||
|  | For instance, you could use it to automatically provide a value for a field, or to do validation | ||||||
|  | that requires access to more than a single field. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Essay(Document): | ||||||
|  |         status = StringField(choices=('Published', 'Draft'), required=True) | ||||||
|  |         pub_date = DateTimeField() | ||||||
|  |  | ||||||
|  |         def clean(self): | ||||||
|  |             # Validate that only published essays have a `pub_date` | ||||||
|  |             if self.status == 'Draft' and self.pub_date is not None: | ||||||
|  |                 raise ValidationError('Draft entries should not have a publication date.') | ||||||
|  |             # Set the pub_date for published items if not set. | ||||||
|  |             if self.status == 'Published' and self.pub_date is None: | ||||||
|  |                 self.pub_date = datetime.now() | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |     Cleaning is only called if validation is turned on and when calling | ||||||
|  |     :meth:`~mongoengine.Document.save`. | ||||||
|  |  | ||||||
|  | * Adding custom Field classes | ||||||
|  |  | ||||||
|  | We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible | ||||||
|  | to subclass a Field and encapsulate some validation by overriding the `validate` method | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class AgeField(IntField): | ||||||
|  |  | ||||||
|  |         def validate(self, value): | ||||||
|  |             super(AgeField, self).validate(value)     # let IntField.validate run first | ||||||
|  |             if value == 60: | ||||||
|  |                 self.error('60 is not allowed') | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = AgeField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     Person(age=20).save()   # passes | ||||||
|  |     Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |     Person(age=60).save()   # raises ValidationError (Person:None) (60 is not allowed: ['age']) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly, | ||||||
|  |    it will provide a better context with the error message | ||||||
|  |  | ||||||
|  | Skipping validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | Although discouraged as it allows to violate fields constraints, if for some reason you need to disable | ||||||
|  | the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = IntField(max_value=100) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save()    # raises ValidationError (Integer value is too large) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save(validate=False) | ||||||
|  |     person = Person.objects.first() | ||||||
|  |     assert person.age == 1000 | ||||||
|  |  | ||||||
| @@ -7,56 +7,83 @@ MongoDB. To install it, simply run | |||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # pip install -U mongoengine |     $ python -m pip install -U mongoengine | ||||||
|  |  | ||||||
| :doc:`tutorial` | :doc:`tutorial` | ||||||
|   Start here for a quick overview. |   A quick tutorial building a tumblelog to get you up and running with | ||||||
|  |   MongoEngine. | ||||||
|  |  | ||||||
| :doc:`guide/index` | :doc:`guide/index` | ||||||
|   The Full guide to MongoEngine |   The Full guide to MongoEngine --- from modeling documents to storing files, | ||||||
|  |   from querying for data to firing signals and *everything* between. | ||||||
|  |  | ||||||
| :doc:`apireference` | :doc:`apireference` | ||||||
|   The complete API documentation. |   The complete API documentation --- the innards of documents, querysets and fields. | ||||||
|  |  | ||||||
| :doc:`upgrade` | :doc:`upgrade` | ||||||
|   How to upgrade MongoEngine. |   How to upgrade MongoEngine. | ||||||
|  |  | ||||||
|  | :doc:`faq` | ||||||
|  |   Frequently Asked Questions | ||||||
|  |  | ||||||
| :doc:`django` | :doc:`django` | ||||||
|   Using MongoEngine and Django |   Using MongoEngine and Django | ||||||
|  |  | ||||||
|  | MongoDB and driver support | ||||||
|  | -------------------------- | ||||||
|  |  | ||||||
|  | MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. | ||||||
|  | For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_. | ||||||
|  |  | ||||||
| Community | Community | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| To get help with using MongoEngine, use the `MongoEngine Users mailing list | To get help with using MongoEngine, use the `MongoEngine Users mailing list | ||||||
| <http://groups.google.com/group/mongoengine-users>`_ or come chat on the | <http://groups.google.com/group/mongoengine-users>`_ or the ever popular | ||||||
| `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_. | `stackoverflow <http://www.stackoverflow.com>`_. | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ------------ | ------------ | ||||||
|  |  | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and | **Yes please!**  We are always looking for contributions, additions and improvements. | ||||||
| contributions are always encouraged. Contributions can be as simple as |  | ||||||
| minor tweaks to this documentation. To contribute, fork the project on |  | ||||||
| `GitHub <http://github.com/hmarr/mongoengine>`_ and send a |  | ||||||
| pull request. |  | ||||||
|  |  | ||||||
| Also, you can join the developers' `mailing list | The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ | ||||||
| <http://groups.google.com/group/mongoengine-dev>`_. | and contributions are always encouraged. Contributions can be as simple as | ||||||
|  | minor tweaks to this documentation, the website or the core. | ||||||
|  |  | ||||||
|  | To contribute, fork the project on | ||||||
|  | `GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a | ||||||
|  | pull request. | ||||||
|  |  | ||||||
| Changes | Changes | ||||||
| ------- | ------- | ||||||
|  |  | ||||||
| See the :doc:`changelog` for a full list of changes to MongoEngine and | See the :doc:`changelog` for a full list of changes to MongoEngine and | ||||||
| :doc:`upgrade` for upgrade information. | :doc:`upgrade` for upgrade information. | ||||||
|  |  | ||||||
|  | .. note::  Always read and test the `upgrade <upgrade>`_ documentation before | ||||||
|  |     putting updates live in production **;)** | ||||||
|  |  | ||||||
|  | Offline Reading | ||||||
|  | --------------- | ||||||
|  |  | ||||||
|  | Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_ | ||||||
|  | or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_ | ||||||
|  | formats for offline reading. | ||||||
|  |  | ||||||
|  |  | ||||||
| .. toctree:: | .. toctree:: | ||||||
|  |     :maxdepth: 1 | ||||||
|  |     :numbered: | ||||||
|     :hidden: |     :hidden: | ||||||
|  |  | ||||||
|     tutorial |     tutorial | ||||||
|     guide/index |     guide/index | ||||||
|     apireference |     apireference | ||||||
|    django |  | ||||||
|     changelog |     changelog | ||||||
|     upgrade |     upgrade | ||||||
|  |     faq | ||||||
|  |     django | ||||||
|  |  | ||||||
| Indices and tables | Indices and tables | ||||||
| ------------------ | ------------------ | ||||||
| @@ -64,4 +91,3 @@ Indices and tables | |||||||
| * :ref:`genindex` | * :ref:`genindex` | ||||||
| * :ref:`modindex` | * :ref:`modindex` | ||||||
| * :ref:`search` | * :ref:`search` | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | |||||||
|  | pymongo>=3.11 | ||||||
|  | Sphinx==3.2.1 | ||||||
|  | sphinx-rtd-theme==0.5.0 | ||||||
| @@ -1,68 +1,78 @@ | |||||||
| ======== | ======== | ||||||
| Tutorial | Tutorial | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
| This tutorial introduces **MongoEngine** by means of example --- we will walk | This tutorial introduces **MongoEngine** by means of example --- we will walk | ||||||
| through how to create a simple **Tumblelog** application. A Tumblelog is a type | through how to create a simple **Tumblelog** application. A tumblelog is a | ||||||
| of blog where posts are not constrained to being conventional text-based posts. | blog that supports mixed media content, including text, images, links, video, | ||||||
| As well as text-based entries, users may post images, links, videos, etc. For | audio, etc. For simplicity's sake, we'll stick to text, image, and link | ||||||
| simplicity's sake, we'll stick to text, image and link entries in our | entries. As the purpose of this tutorial is to introduce MongoEngine, we'll | ||||||
| application. As the purpose of this tutorial is to introduce MongoEngine, we'll |  | ||||||
| focus on the data-modelling side of the application, leaving out a user | focus on the data-modelling side of the application, leaving out a user | ||||||
| interface. | interface. | ||||||
|  |  | ||||||
| Getting started | Getting started | ||||||
| =============== | =============== | ||||||
|  |  | ||||||
| Before we start, make sure that a copy of MongoDB is running in an accessible | Before we start, make sure that a copy of MongoDB is running in an accessible | ||||||
| location --- running it locally will be easier, but if that is not an option | location --- running it locally will be easier, but if that is not an option | ||||||
| then it may be run on a remote server. | then it may be run on a remote server. If you haven't installed MongoEngine, | ||||||
|  | simply use pip to install it like so:: | ||||||
|  |  | ||||||
|  |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Before we can start using MongoEngine, we need to tell it how to connect to our | Before we can start using MongoEngine, we need to tell it how to connect to our | ||||||
| instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | ||||||
| function. The only argument we need to provide is the name of the MongoDB | function. If running locally, the only argument we need to provide is the name | ||||||
| database to use:: | of the MongoDB database to use:: | ||||||
|  |  | ||||||
|     from mongoengine import * |     from mongoengine import * | ||||||
|  |  | ||||||
|     connect('tumblelog') |     connect('tumblelog') | ||||||
|  |  | ||||||
| For more information about connecting to MongoDB see :ref:`guide-connecting`. | There are lots of options for connecting to MongoDB, for more information about | ||||||
|  | them see the :ref:`guide-connecting` guide. | ||||||
|  |  | ||||||
| Defining our documents | Defining our documents | ||||||
| ====================== | ====================== | ||||||
|  |  | ||||||
| MongoDB is *schemaless*, which means that no schema is enforced by the database | MongoDB is *schemaless*, which means that no schema is enforced by the database | ||||||
| --- we may add and remove fields however we want and MongoDB won't complain. | --- we may add and remove fields however we want and MongoDB won't complain. | ||||||
| This makes life a lot easier in many regards, especially when there is a change | This makes life a lot easier in many regards, especially when there is a change | ||||||
| to the data model. However, defining schemata for our documents can help to | to the data model. However, defining schemas for our documents can help to iron | ||||||
| iron out bugs involving incorrect types or missing fields, and also allow us to | out bugs involving incorrect types or missing fields, and also allow us to | ||||||
| define utility methods on our documents in the same way that traditional | define utility methods on our documents in the same way that traditional | ||||||
| :abbr:`ORMs (Object-Relational Mappers)` do. | :abbr:`ORMs (Object-Relational Mappers)` do. | ||||||
|  |  | ||||||
| In our Tumblelog application we need to store several different types of | In our Tumblelog application we need to store several different types of | ||||||
| information. We will need to have a collection of **users**, so that we may | information. We will need to have a collection of **users**, so that we may | ||||||
| link posts to an individual. We also need to store our different types | link posts to an individual. We also need to store our different types of | ||||||
| **posts** (text, image and link) in the database. To aid navigation of our | **posts** (eg: text, image and link) in the database. To aid navigation of our | ||||||
| Tumblelog, posts may have **tags** associated with them, so that the list of | Tumblelog, posts may have **tags** associated with them, so that the list of | ||||||
| posts shown to the user may be limited to posts that have been assigned a | posts shown to the user may be limited to posts that have been assigned a | ||||||
| specified tag.  Finally, it would be nice if **comments** could be added to | specific tag. Finally, it would be nice if **comments** could be added to | ||||||
| posts. We'll start with **users**, as the others are slightly more involved. | posts. We'll start with **users**, as the other document models are slightly | ||||||
|  | more involved. | ||||||
|  |  | ||||||
| Users | Users | ||||||
| ----- | ----- | ||||||
|  |  | ||||||
| Just as if we were using a relational database with an ORM, we need to define | Just as if we were using a relational database with an ORM, we need to define | ||||||
| which fields a :class:`User` may have, and what their types will be:: | which fields a :class:`User` may have, and what types of data they might store:: | ||||||
|  |  | ||||||
|     class User(Document): |     class User(Document): | ||||||
|         email = StringField(required=True) |         email = StringField(required=True) | ||||||
|         first_name = StringField(max_length=50) |         first_name = StringField(max_length=50) | ||||||
|         last_name = StringField(max_length=50) |         last_name = StringField(max_length=50) | ||||||
|  |  | ||||||
| This looks similar to how a the structure of a table would be defined in a | This looks similar to how the structure of a table would be defined in a | ||||||
| regular ORM. The key difference is that this schema will never be passed on to | regular ORM. The key difference is that this schema will never be passed on to | ||||||
| MongoDB --- this will only be enforced at the application level. Also, the User | MongoDB --- this will only be enforced at the application level, making future | ||||||
| documents will be stored in a MongoDB *collection* rather than a table. | changes easy to manage. Also, the User documents will be stored in a | ||||||
|  | MongoDB *collection* rather than a table. | ||||||
|  |  | ||||||
| Posts, Comments and Tags | Posts, Comments and Tags | ||||||
| ------------------------ | ------------------------ | ||||||
|  |  | ||||||
| Now we'll think about how to store the rest of the information. If we were | Now we'll think about how to store the rest of the information. If we were | ||||||
| using a relational database, we would most likely have a table of **posts**, a | using a relational database, we would most likely have a table of **posts**, a | ||||||
| table of **comments** and a table of **tags**.  To associate the comments with | table of **comments** and a table of **tags**.  To associate the comments with | ||||||
| @@ -75,21 +85,25 @@ of them stand out as particularly intuitive solutions. | |||||||
|  |  | ||||||
| Posts | Posts | ||||||
| ^^^^^ | ^^^^^ | ||||||
| But MongoDB *isn't* a relational database, so we're not going to do it that |  | ||||||
|  | Happily MongoDB *isn't* a relational database, so we're not going to do it that | ||||||
| way. As it turns out, we can use MongoDB's schemaless nature to provide us with | way. As it turns out, we can use MongoDB's schemaless nature to provide us with | ||||||
| a much nicer solution. We will store all of the posts in *one collection* --- | a much nicer solution. We will store all of the posts in *one collection* and | ||||||
| each post type will just have the fields it needs. If we later want to add | each post type will only store the fields it needs. If we later want to add | ||||||
| video posts, we don't have to modify the collection at all, we just *start | video posts, we don't have to modify the collection at all, we just *start | ||||||
| using* the new fields we need to support video posts. This fits with the | using* the new fields we need to support video posts. This fits with the | ||||||
| Object-Oriented principle of *inheritance* nicely. We can think of | Object-Oriented principle of *inheritance* nicely. We can think of | ||||||
| :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | ||||||
| :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | ||||||
| this kind of modelling out of the box:: | this kind of modeling out of the box --- all you need do is turn on inheritance | ||||||
|  | by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: | ||||||
|  |  | ||||||
|     class Post(Document): |     class Post(Document): | ||||||
|         title = StringField(max_length=120, required=True) |         title = StringField(max_length=120, required=True) | ||||||
|         author = ReferenceField(User) |         author = ReferenceField(User) | ||||||
|  |  | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|     class TextPost(Post): |     class TextPost(Post): | ||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
| @@ -100,20 +114,21 @@ this kind of modelling out of the box:: | |||||||
|         link_url = StringField() |         link_url = StringField() | ||||||
|  |  | ||||||
| We are storing a reference to the author of the posts using a | We are storing a reference to the author of the posts using a | ||||||
| :class:`~mongoengine.ReferenceField` object. These are similar to foreign key | :class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key | ||||||
| fields in traditional ORMs, and are automatically translated into references | fields in traditional ORMs, and are automatically translated into references | ||||||
| when they are saved, and dereferenced when they are loaded. | when they are saved, and dereferenced when they are loaded. | ||||||
|  |  | ||||||
| Tags | Tags | ||||||
| ^^^^ | ^^^^ | ||||||
|  |  | ||||||
| Now that we have our Post models figured out, how will we attach tags to them? | Now that we have our Post models figured out, how will we attach tags to them? | ||||||
| MongoDB allows us to store lists of items natively, so rather than having a | MongoDB allows us to store lists of items natively, so rather than having a | ||||||
| link table, we can just store a list of tags in each post. So, for both | link table, we can just store a list of tags in each post. So, for both | ||||||
| efficiency and simplicity's sake, we'll store the tags as strings directly | efficiency and simplicity's sake, we'll store the tags as strings directly | ||||||
| within the post, rather than storing references to tags in a separate | within the post, rather than storing references to tags in a separate | ||||||
| collection. Especially as tags are generally very short (often even shorter | collection. Especially as tags are generally very short (often even shorter | ||||||
| than a document's id), this denormalisation won't impact very strongly on the | than a document's id), this denormalization won't impact the size of the | ||||||
| size of our database. So let's take a look that the code our modified | database very strongly. Let's take a look at the code of our modified | ||||||
| :class:`Post` class:: | :class:`Post` class:: | ||||||
|  |  | ||||||
|     class Post(Document): |     class Post(Document): | ||||||
| @@ -121,21 +136,24 @@ size of our database. So let's take a look that the code our modified | |||||||
|         author = ReferenceField(User) |         author = ReferenceField(User) | ||||||
|         tags = ListField(StringField(max_length=30)) |         tags = ListField(StringField(max_length=30)) | ||||||
|  |  | ||||||
| The :class:`~mongoengine.ListField` object that is used to define a Post's tags | The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags | ||||||
| takes a field object as its first argument --- this means that you can have | takes a field object as its first argument --- this means that you can have | ||||||
| lists of any type of field (including lists). Note that we don't need to | lists of any type of field (including lists). | ||||||
| modify the specialised post types as they all inherit from :class:`Post`. |  | ||||||
|  | .. note:: We don't need to modify the specialized post types as they all | ||||||
|  |     inherit from :class:`Post`. | ||||||
|  |  | ||||||
| Comments | Comments | ||||||
| ^^^^^^^^ | ^^^^^^^^ | ||||||
|  |  | ||||||
| A comment is typically associated with *one* post. In a relational database, to | A comment is typically associated with *one* post. In a relational database, to | ||||||
| display a post with its comments, we would have to retrieve the post from the | display a post with its comments, we would have to retrieve the post from the | ||||||
| database, then query the database again for the comments associated with the | database and then query the database again for the comments associated with the | ||||||
| post. This works, but there is no real reason to be storing the comments | post. This works, but there is no real reason to be storing the comments | ||||||
| separately from their associated posts, other than to work around the | separately from their associated posts, other than to work around the | ||||||
| relational model. Using MongoDB we can store the comments as a list of | relational model. Using MongoDB we can store the comments as a list of | ||||||
| *embedded documents* directly on a post document. An embedded document should | *embedded documents* directly on a post document. An embedded document should | ||||||
| be treated no differently that a regular document; it just doesn't have its own | be treated no differently than a regular document; it just doesn't have its own | ||||||
| collection in the database. Using MongoEngine, we can define the structure of | collection in the database. Using MongoEngine, we can define the structure of | ||||||
| embedded documents, along with utility methods, in exactly the same way we do | embedded documents, along with utility methods, in exactly the same way we do | ||||||
| with regular documents:: | with regular documents:: | ||||||
| @@ -155,7 +173,7 @@ We can then store a list of comment documents in our post document:: | |||||||
| Handling deletions of references | Handling deletions of references | ||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||||
|  |  | ||||||
| The :class:`~mongoengine.ReferenceField` object takes a keyword | The :class:`~mongoengine.fields.ReferenceField` object takes a keyword | ||||||
| `reverse_delete_rule` for handling deletion rules if the reference is deleted. | `reverse_delete_rule` for handling deletion rules if the reference is deleted. | ||||||
| To delete all the posts if a user is deleted set the rule:: | To delete all the posts if a user is deleted set the rule:: | ||||||
|  |  | ||||||
| @@ -165,9 +183,9 @@ To delete all the posts if a user is deleted set the rule:: | |||||||
|         tags = ListField(StringField(max_length=30)) |         tags = ListField(StringField(max_length=30)) | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
| See :class:`~mongoengine.ReferenceField` for more information. | See :class:`~mongoengine.fields.ReferenceField` for more information. | ||||||
|  |  | ||||||
| ..note:: | .. note:: | ||||||
|     MapFields and DictFields currently don't support automatic handling of |     MapFields and DictFields currently don't support automatic handling of | ||||||
|     deleted references |     deleted references | ||||||
|  |  | ||||||
| @@ -178,33 +196,37 @@ Now that we've defined how our documents will be structured, let's start adding | |||||||
| some documents to the database. Firstly, we'll need to create a :class:`User` | some documents to the database. Firstly, we'll need to create a :class:`User` | ||||||
| object:: | object:: | ||||||
|  |  | ||||||
|     john = User(email='jdoe@example.com', first_name='John', last_name='Doe') |     ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save() | ||||||
|     john.save() |  | ||||||
|  |  | ||||||
| Note that we could have also defined our user using attribute syntax:: | .. note:: | ||||||
|  |     We could have also defined our user using attribute syntax:: | ||||||
|  |  | ||||||
|     john = User(email='jdoe@example.com') |         ross = User(email='ross@example.com') | ||||||
|     john.first_name = 'John' |         ross.first_name = 'Ross' | ||||||
|     john.last_name = 'Doe' |         ross.last_name = 'Lawley' | ||||||
|     john.save() |         ross.save() | ||||||
|  |  | ||||||
| Now that we've got our user in the database, let's add a couple of posts:: | Assign another user to a variable called ``john``, just like we did above with | ||||||
|  | ``ross``. | ||||||
|  |  | ||||||
|  | Now that we've got our users in the database, let's add a couple of posts:: | ||||||
|  |  | ||||||
|     post1 = TextPost(title='Fun with MongoEngine', author=john) |     post1 = TextPost(title='Fun with MongoEngine', author=john) | ||||||
|     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' |     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | ||||||
|     post1.tags = ['mongodb', 'mongoengine'] |     post1.tags = ['mongodb', 'mongoengine'] | ||||||
|     post1.save() |     post1.save() | ||||||
|  |  | ||||||
|     post2 = LinkPost(title='MongoEngine Documentation', author=john) |     post2 = LinkPost(title='MongoEngine Documentation', author=ross) | ||||||
|     post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' |     post2.link_url = 'http://docs.mongoengine.com/' | ||||||
|     post2.tags = ['mongoengine'] |     post2.tags = ['mongoengine'] | ||||||
|     post2.save() |     post2.save() | ||||||
|  |  | ||||||
| Note that if you change a field on a object that has already been saved, then | .. note:: If you change a field on an object that has already been saved and | ||||||
| call :meth:`save` again, the document will be updated. |     then call :meth:`save` again, the document will be updated. | ||||||
|  |  | ||||||
| Accessing our data | Accessing our data | ||||||
| ================== | ================== | ||||||
|  |  | ||||||
| So now we've got a couple of posts in our database, how do we display them? | So now we've got a couple of posts in our database, how do we display them? | ||||||
| Each document class (i.e. any class that inherits either directly or indirectly | Each document class (i.e. any class that inherits either directly or indirectly | ||||||
| from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is | from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is | ||||||
| @@ -212,16 +234,17 @@ used to access the documents in the database collection associated with that | |||||||
| class. So let's see how we can get our posts' titles:: | class. So let's see how we can get our posts' titles:: | ||||||
|  |  | ||||||
|     for post in Post.objects: |     for post in Post.objects: | ||||||
|         print post.title |         print(post.title) | ||||||
|  |  | ||||||
| Retrieving type-specific information | Retrieving type-specific information | ||||||
| ------------------------------------ | ------------------------------------ | ||||||
| This will print the titles of our posts, one on each line. But What if we want |  | ||||||
|  | This will print the titles of our posts, one on each line. But what if we want | ||||||
| to access the type-specific data (link_url, content, etc.)? One way is simply | to access the type-specific data (link_url, content, etc.)? One way is simply | ||||||
| to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | ||||||
|  |  | ||||||
|     for post in TextPost.objects: |     for post in TextPost.objects: | ||||||
|         print post.content |         print(post.content) | ||||||
|  |  | ||||||
| Using TextPost's :attr:`objects` attribute only returns documents that were | Using TextPost's :attr:`objects` attribute only returns documents that were | ||||||
| created using :class:`TextPost`. Actually, there is a more general rule here: | created using :class:`TextPost`. Actually, there is a more general rule here: | ||||||
| @@ -238,22 +261,21 @@ instances of :class:`Post` --- they were instances of the subclass of | |||||||
| practice:: | practice:: | ||||||
|  |  | ||||||
|     for post in Post.objects: |     for post in Post.objects: | ||||||
|         print post.title |         print(post.title) | ||||||
|         print '=' * len(post.title) |         print('=' * len(post.title)) | ||||||
|  |  | ||||||
|         if isinstance(post, TextPost): |         if isinstance(post, TextPost): | ||||||
|             print post.content |             print(post.content) | ||||||
|  |  | ||||||
|         if isinstance(post, LinkPost): |         if isinstance(post, LinkPost): | ||||||
|             print 'Link:', post.link_url |             print('Link: {}'.format(post.link_url)) | ||||||
|  |  | ||||||
|         print |  | ||||||
|  |  | ||||||
| This would print the title of each post, followed by the content if it was a | This would print the title of each post, followed by the content if it was a | ||||||
| text post, and "Link: <url>" if it was a link post. | text post, and "Link: <url>" if it was a link post. | ||||||
|  |  | ||||||
| Searching our posts by tag | Searching our posts by tag | ||||||
| -------------------------- | -------------------------- | ||||||
|  |  | ||||||
| The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a | The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a | ||||||
| :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the | :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the | ||||||
| database only when you need the data. It may also be filtered to narrow down | database only when you need the data. It may also be filtered to narrow down | ||||||
| @@ -261,7 +283,7 @@ your query.  Let's adjust our query so that only posts with the tag "mongodb" | |||||||
| are returned:: | are returned:: | ||||||
|  |  | ||||||
|     for post in Post.objects(tags='mongodb'): |     for post in Post.objects(tags='mongodb'): | ||||||
|         print post.title |         print(post.title) | ||||||
|  |  | ||||||
| There are also methods available on :class:`~mongoengine.queryset.QuerySet` | There are also methods available on :class:`~mongoengine.queryset.QuerySet` | ||||||
| objects that allow different results to be returned, for example, calling | objects that allow different results to be returned, for example, calling | ||||||
| @@ -270,5 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be | |||||||
| used on :class:`~mongoengine.queryset.QuerySet` objects:: | used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||||
|  |  | ||||||
|     num_posts = Post.objects(tags='mongodb').count() |     num_posts = Post.objects(tags='mongodb').count() | ||||||
|     print 'Found %d posts with tag "mongodb"' % num_posts |     print('Found {} posts with tag "mongodb"'.format(num_posts)) | ||||||
|  |  | ||||||
|  | Learning more about MongoEngine | ||||||
|  | ------------------------------- | ||||||
|  |  | ||||||
|  | If you got this far you've made a great start, so well done! The next step on | ||||||
|  | your MongoEngine journey is the `full user guide <guide/index.html>`_, where | ||||||
|  | you can learn in-depth about how to use MongoEngine and MongoDB. | ||||||
|   | |||||||
							
								
								
									
										557
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							
							
						
						
									
										557
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							| @@ -1,31 +1,538 @@ | |||||||
| ========= | ######### | ||||||
| Upgrading | Upgrading | ||||||
| ========= | ######### | ||||||
|  |  | ||||||
| 0.5 to 0.6 | Development | ||||||
|  | *********** | ||||||
|  | (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||||
|  |  | ||||||
|  | URLField's constructor no longer takes `verify_exists` | ||||||
|  |  | ||||||
|  | 0.15.0 | ||||||
|  | ****** | ||||||
|  |  | ||||||
|  | 0.14.0 | ||||||
|  | ****** | ||||||
|  | This release includes a few bug fixes and a significant code cleanup. The most | ||||||
|  | important change is that `QuerySet.as_pymongo` no longer supports a | ||||||
|  | `coerce_types` mode. If you used it in the past, a) please let us know of your | ||||||
|  | use case, b) you'll need to override `as_pymongo` to get the desired outcome. | ||||||
|  |  | ||||||
|  | This release also makes the EmbeddedDocument not hashable by default. If you | ||||||
|  | use embedded documents in sets or dictionaries, you might have to override | ||||||
|  | `__hash__` and implement a hashing logic specific to your use case. See #1528 | ||||||
|  | for the reason behind this change. | ||||||
|  |  | ||||||
|  | 0.13.0 | ||||||
|  | ****** | ||||||
|  | This release adds Unicode support to the `EmailField` and changes its | ||||||
|  | structure significantly. Previously, email addresses containing Unicode | ||||||
|  | characters didn't work at all. Starting with v0.13.0, domains with Unicode | ||||||
|  | characters are supported out of the box, meaning some emails that previously | ||||||
|  | didn't pass validation now do. Make sure the rest of your application can | ||||||
|  | accept such email addresses. Additionally, if you subclassed the `EmailField` | ||||||
|  | in your application and overrode `EmailField.EMAIL_REGEX`, you will have to | ||||||
|  | adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, | ||||||
|  | and potentially `EmailField.UTF8_USER_REGEX`. | ||||||
|  |  | ||||||
|  | 0.12.0 | ||||||
|  | ****** | ||||||
|  | This release includes various fixes for the `BaseQuerySet` methods and how they | ||||||
|  | are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size | ||||||
|  | to an already-existing queryset wouldn't modify the underlying PyMongo cursor. | ||||||
|  | This has been fixed now, so you'll need to make sure that your code didn't rely | ||||||
|  | on the broken implementation. | ||||||
|  |  | ||||||
|  | Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private | ||||||
|  | `_clone_into`. If you directly used that method in your code, you'll need to | ||||||
|  | rename its occurrences. | ||||||
|  |  | ||||||
|  | 0.11.0 | ||||||
|  | ****** | ||||||
|  | This release includes a major rehaul of MongoEngine's code quality and | ||||||
|  | introduces a few breaking changes. It also touches many different parts of | ||||||
|  | the package and although all the changes have been tested and scrutinized, | ||||||
|  | you're encouraged to thoroughly test the upgrade. | ||||||
|  |  | ||||||
|  | First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | ||||||
|  | If you import or catch this exception, you'll need to rename it in your code. | ||||||
|  |  | ||||||
|  | Second breaking change drops Python v2.6 support. If you run MongoEngine on | ||||||
|  | that Python version, you'll need to upgrade it first. | ||||||
|  |  | ||||||
|  | Third breaking change drops an old backward compatibility measure where | ||||||
|  | `from mongoengine.base import ErrorClass` would work on top of | ||||||
|  | `from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. | ||||||
|  | `ValidationError`). If you import any exceptions from `mongoengine.base`, | ||||||
|  | change it to `mongoengine.errors`. | ||||||
|  |  | ||||||
|  | 0.10.8 | ||||||
|  | ****** | ||||||
|  | This version fixed an issue where specifying a MongoDB URI host would override | ||||||
|  | more information than it should. These changes are minor, but they still | ||||||
|  | subtly modify the connection logic and thus you're encouraged to test your | ||||||
|  | MongoDB connection before shipping v0.10.8 in production. | ||||||
|  |  | ||||||
|  | 0.10.7 | ||||||
|  | ****** | ||||||
|  |  | ||||||
|  | `QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use | ||||||
|  | `QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework | ||||||
|  | by default from now on. | ||||||
|  |  | ||||||
|  | 0.9.0 | ||||||
|  | ***** | ||||||
|  |  | ||||||
|  | The 0.8.7 package on pypi was corrupted.  If upgrading from 0.8.7 to 0.9.0 please follow: :: | ||||||
|  |  | ||||||
|  |     python -m pip uninstall pymongo | ||||||
|  |     python -m pip uninstall mongoengine | ||||||
|  |     python -m pip install pymongo==2.8 | ||||||
|  |     python -m pip install mongoengine | ||||||
|  |  | ||||||
|  | 0.8.7 | ||||||
|  | ***** | ||||||
|  |  | ||||||
|  | Calling reload on deleted / nonexistent documents now raises a DoesNotExist | ||||||
|  | exception. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | 0.8.2 to 0.8.3 | ||||||
|  | ************** | ||||||
|  |  | ||||||
|  | Minor change that may impact users: | ||||||
|  |  | ||||||
|  | DynamicDocument fields are now stored in creation order after any declared | ||||||
|  | fields.  Previously they were stored alphabetically. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | 0.7 to 0.8 | ||||||
|  | ********** | ||||||
|  |  | ||||||
|  | There have been numerous backwards breaking changes in 0.8.  The reasons for | ||||||
|  | these are to ensure that MongoEngine has sane defaults going forward and that it | ||||||
|  | performs the best it can out of the box.  Where possible there have been | ||||||
|  | FutureWarnings to help get you ready for the change, but that hasn't been | ||||||
|  | possible for the whole of the release. | ||||||
|  |  | ||||||
|  | .. warning:: Breaking changes - test upgrading on a test system before putting | ||||||
|  |     live. There maybe multiple manual steps in migrating and these are best honed | ||||||
|  |     on a staging / test system. | ||||||
|  |  | ||||||
|  | Python and PyMongo | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above) | ||||||
|  |  | ||||||
|  | Data Model | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| Embedded Documents - if you had a `pk` field you will have to rename it from `_id` | Inheritance | ||||||
| to `pk` as pk is no longer a property of Embedded Documents. | ----------- | ||||||
|  |  | ||||||
|  | The inheritance model has changed, we no longer need to store an array of | ||||||
|  | :attr:`types` with the model we can just use the classname in :attr:`_cls`. | ||||||
|  | This means that you will have to update your indexes for each of your | ||||||
|  | inherited classes like so: :: | ||||||
|  |  | ||||||
|  |     # 1. Declaration of the class | ||||||
|  |     class Animal(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         meta = { | ||||||
|  |             'allow_inheritance': True, | ||||||
|  |             'indexes': ['name'] | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     # 2. Remove _types | ||||||
|  |     collection = Animal._get_collection() | ||||||
|  |     collection.update({}, {"$unset": {"_types": 1}}, multi=True) | ||||||
|  |  | ||||||
|  |     # 3. Confirm extra data is removed | ||||||
|  |     count = collection.find({'_types': {"$exists": True}}).count() | ||||||
|  |     assert count == 0 | ||||||
|  |  | ||||||
|  |     # 4. Remove indexes | ||||||
|  |     info = collection.index_information() | ||||||
|  |     indexes_to_drop = [key for key, value in info.items() | ||||||
|  |                        if '_types' in dict(value['key'])] | ||||||
|  |     for index in indexes_to_drop: | ||||||
|  |         collection.drop_index(index) | ||||||
|  |  | ||||||
|  |     # 5. Recreate indexes | ||||||
|  |     Animal.ensure_indexes() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Document Definition | ||||||
|  | ------------------- | ||||||
|  |  | ||||||
|  | The default for inheritance has changed - it is now off by default and | ||||||
|  | :attr:`_cls` will not be stored automatically with the class.  So if you extend | ||||||
|  | your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` | ||||||
|  | you will need to declare :attr:`allow_inheritance` in the meta data like so: :: | ||||||
|  |  | ||||||
|  |     class Animal(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  | Previously, if you had data in the database that wasn't defined in the Document | ||||||
|  | definition, it would set it as an attribute on the document.  This is no longer | ||||||
|  | the case and the data is set only in the ``document._data`` dictionary: :: | ||||||
|  |  | ||||||
|  |     >>> from mongoengine import * | ||||||
|  |     >>> class Animal(Document): | ||||||
|  |     ...    name = StringField() | ||||||
|  |     ... | ||||||
|  |     >>> cat = Animal(name="kit", size="small") | ||||||
|  |  | ||||||
|  |     # 0.7 | ||||||
|  |     >>> cat.size | ||||||
|  |     u'small' | ||||||
|  |  | ||||||
|  |     # 0.8 | ||||||
|  |     >>> cat.size | ||||||
|  |     Traceback (most recent call last): | ||||||
|  |       File "<stdin>", line 1, in <module> | ||||||
|  |     AttributeError: 'Animal' object has no attribute 'size' | ||||||
|  |  | ||||||
|  | The Document class has introduced a reserved function `clean()`, which will be | ||||||
|  | called before saving the document. If your document class happens to have a method | ||||||
|  | with the same name, please try to rename it. | ||||||
|  |  | ||||||
|  |     def clean(self): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  | ReferenceField | ||||||
|  | -------------- | ||||||
|  |  | ||||||
|  | ReferenceFields now store ObjectIds by default - this is more efficient than | ||||||
|  | DBRefs as we already know what Document types they reference:: | ||||||
|  |  | ||||||
|  |     # Old code | ||||||
|  |     class Animal(Document): | ||||||
|  |         name = ReferenceField('self') | ||||||
|  |  | ||||||
|  |     # New code to keep dbrefs | ||||||
|  |     class Animal(Document): | ||||||
|  |         name = ReferenceField('self', dbref=True) | ||||||
|  |  | ||||||
|  | To migrate all the references you need to touch each object and mark it as dirty | ||||||
|  | eg:: | ||||||
|  |  | ||||||
|  |     # Doc definition | ||||||
|  |     class Person(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         parent = ReferenceField('self') | ||||||
|  |         friends = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |     # Mark all ReferenceFields as dirty and save | ||||||
|  |     for p in Person.objects: | ||||||
|  |         p._mark_as_changed('parent') | ||||||
|  |         p._mark_as_changed('friends') | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  | `An example test migration for ReferenceFields is available on github | ||||||
|  | <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_. | ||||||
|  |  | ||||||
|  | .. Note:: Internally mongoengine handles ReferenceFields the same, so they are | ||||||
|  |    converted to DBRef on loading and ObjectIds or DBRefs depending on settings | ||||||
|  |    on storage. | ||||||
|  |  | ||||||
|  | UUIDField | ||||||
|  | --------- | ||||||
|  |  | ||||||
|  | UUIDFields now default to storing binary values:: | ||||||
|  |  | ||||||
|  |     # Old code | ||||||
|  |     class Animal(Document): | ||||||
|  |         uuid = UUIDField() | ||||||
|  |  | ||||||
|  |     # New code | ||||||
|  |     class Animal(Document): | ||||||
|  |         uuid = UUIDField(binary=False) | ||||||
|  |  | ||||||
|  | To migrate all the uuids you need to touch each object and mark it as dirty | ||||||
|  | eg:: | ||||||
|  |  | ||||||
|  |     # Doc definition | ||||||
|  |     class Animal(Document): | ||||||
|  |         uuid = UUIDField() | ||||||
|  |  | ||||||
|  |     # Mark all UUIDFields as dirty and save | ||||||
|  |     for a in Animal.objects: | ||||||
|  |         a._mark_as_changed('uuid') | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  | `An example test migration for UUIDFields is available on github | ||||||
|  | <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_. | ||||||
|  |  | ||||||
|  | DecimalField | ||||||
|  | ------------ | ||||||
|  |  | ||||||
|  | DecimalFields now store floats - previously it was storing strings and that | ||||||
|  | made it impossible to do comparisons when querying correctly.:: | ||||||
|  |  | ||||||
|  |     # Old code | ||||||
|  |     class Person(Document): | ||||||
|  |         balance = DecimalField() | ||||||
|  |  | ||||||
|  |     # New code | ||||||
|  |     class Person(Document): | ||||||
|  |         balance = DecimalField(force_string=True) | ||||||
|  |  | ||||||
|  | To migrate all the DecimalFields you need to touch each object and mark it as dirty | ||||||
|  | eg:: | ||||||
|  |  | ||||||
|  |     # Doc definition | ||||||
|  |     class Person(Document): | ||||||
|  |         balance = DecimalField() | ||||||
|  |  | ||||||
|  |     # Mark all DecimalField's as dirty and save | ||||||
|  |     for p in Person.objects: | ||||||
|  |         p._mark_as_changed('balance') | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  | .. note:: DecimalFields have also been improved with the addition of precision | ||||||
|  |     and rounding.  See :class:`~mongoengine.fields.DecimalField` for more information. | ||||||
|  |  | ||||||
|  | `An example test migration for DecimalFields is available on github | ||||||
|  | <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_. | ||||||
|  |  | ||||||
|  | Cascading Saves | ||||||
|  | --------------- | ||||||
|  | To improve performance document saves will no longer automatically cascade. | ||||||
|  | Any changes to a Document's references will either have to be saved manually or | ||||||
|  | you will have to explicitly tell it to cascade on save:: | ||||||
|  |  | ||||||
|  |     # At the class level: | ||||||
|  |     class Person(Document): | ||||||
|  |         meta = {'cascade': True} | ||||||
|  |  | ||||||
|  |     # Or on save: | ||||||
|  |     my_document.save(cascade=True) | ||||||
|  |  | ||||||
|  | Storage | ||||||
|  | ------- | ||||||
|  |  | ||||||
|  | Document and Embedded Documents are now serialized based on declared field order. | ||||||
|  | Previously, the data was passed to mongodb as a dictionary and which meant that | ||||||
|  | order wasn't guaranteed - so things like ``$addToSet`` operations on | ||||||
|  | :class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected | ||||||
|  | ways. | ||||||
|  |  | ||||||
|  | If this impacts you, you may want to rewrite the objects using the | ||||||
|  | ``doc.mark_as_dirty('field')`` pattern described above.  If you are using a | ||||||
|  | compound primary key then you will need to ensure the order is fixed and match | ||||||
|  | your EmbeddedDocument to that order. | ||||||
|  |  | ||||||
|  | Querysets | ||||||
|  | ========= | ||||||
|  |  | ||||||
|  | Attack of the clones | ||||||
|  | -------------------- | ||||||
|  |  | ||||||
|  | Querysets now return clones and should no longer be considered editable in | ||||||
|  | place.  This brings us in line with how Django's querysets work and removes a | ||||||
|  | long running gotcha.  If you edit your querysets inplace you will have to | ||||||
|  | update your code like so: :: | ||||||
|  |  | ||||||
|  |     # Old code: | ||||||
|  |     mammals = Animal.objects(type="mammal") | ||||||
|  |     mammals.filter(order="Carnivora")       # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8 | ||||||
|  |     [m for m in mammals]                    # This will return all mammals in 0.8 as the 2nd filter returned a new queryset | ||||||
|  |  | ||||||
|  |     # Update example a) assign queryset after a change: | ||||||
|  |     mammals = Animal.objects(type="mammal") | ||||||
|  |     carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied | ||||||
|  |     [m for m in carnivores]                        # This will return all carnivores | ||||||
|  |  | ||||||
|  |     # Update example b) chain the queryset: | ||||||
|  |     mammals = Animal.objects(type="mammal").filter(order="Carnivora")  # The final queryset is assgined to mammals | ||||||
|  |     [m for m in mammals]                                               # This will return all carnivores | ||||||
|  |  | ||||||
|  | Len iterates the queryset | ||||||
|  | ------------------------- | ||||||
|  |  | ||||||
|  | If you ever did `len(queryset)` it previously did a `count()` under the covers, | ||||||
|  | this caused some unusual issues.  As `len(queryset)` is most often used by | ||||||
|  | `list(queryset)` we now cache the queryset results and use that for the length. | ||||||
|  |  | ||||||
|  | This isn't as performant as a `count()` and if you aren't iterating the | ||||||
|  | queryset you should upgrade to use count:: | ||||||
|  |  | ||||||
|  |     # Old code | ||||||
|  |     len(Animal.objects(type="mammal")) | ||||||
|  |  | ||||||
|  |     # New code | ||||||
|  |     Animal.objects(type="mammal").count() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .only() now inline with .exclude() | ||||||
|  | ---------------------------------- | ||||||
|  |  | ||||||
|  | The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion | ||||||
|  | to `.exclude()`.  Chaining `.only()` calls will increase the fields required:: | ||||||
|  |  | ||||||
|  |     # Old code | ||||||
|  |     Animal.objects().only(['type', 'name']).only('name', 'order')  # Would have returned just `name` | ||||||
|  |  | ||||||
|  |     # New code | ||||||
|  |     Animal.objects().only('name') | ||||||
|  |  | ||||||
|  |     # Note: | ||||||
|  |     Animal.objects().only(['name']).only('order')  # Now returns `name` *and* `order` | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Client | ||||||
|  | ====== | ||||||
|  | PyMongo 2.4 came with a new connection client; MongoClient_ and started the | ||||||
|  | depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine | ||||||
|  | now uses the latest `MongoClient` for connections.  By default operations were | ||||||
|  | `safe` but if you turned them off or used the connection directly this will | ||||||
|  | impact your queries. | ||||||
|  |  | ||||||
|  | Querysets | ||||||
|  | --------- | ||||||
|  |  | ||||||
|  | Safe | ||||||
|  | ^^^^ | ||||||
|  |  | ||||||
|  | `safe` has been depreciated in the new MongoClient connection.  Please use | ||||||
|  | `write_concern` instead.  As `safe` always defaulted as `True` normally no code | ||||||
|  | change is required. To disable confirmation of the write just pass `{"w": 0}` | ||||||
|  | eg: :: | ||||||
|  |  | ||||||
|  |    # Old | ||||||
|  |    Animal(name="Dinasour").save(safe=False) | ||||||
|  |  | ||||||
|  |    # new code: | ||||||
|  |    Animal(name="Dinasour").save(write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | Write Concern | ||||||
|  | ^^^^^^^^^^^^^ | ||||||
|  |  | ||||||
|  | `write_options` has been replaced with `write_concern` to bring it inline with | ||||||
|  | pymongo. To upgrade simply rename any instances where you used the `write_option` | ||||||
|  | keyword  to `write_concern` like so:: | ||||||
|  |  | ||||||
|  |    # Old code: | ||||||
|  |    Animal(name="Dinasour").save(write_options={"w": 2}) | ||||||
|  |  | ||||||
|  |    # new code: | ||||||
|  |    Animal(name="Dinasour").save(write_concern={"w": 2}) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Indexes | ||||||
|  | ======= | ||||||
|  |  | ||||||
|  | Index methods are no longer tied to querysets but rather to the document class. | ||||||
|  | Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist. | ||||||
|  | They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / | ||||||
|  | :func:`~mongoengine.Document.ensure_index`. | ||||||
|  |  | ||||||
|  | SequenceFields | ||||||
|  | ============== | ||||||
|  |  | ||||||
|  | :class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to | ||||||
|  | allow flexible storage of the calculated value.  As such MIN and MAX settings | ||||||
|  | are no longer handled. | ||||||
|  |  | ||||||
|  | .. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient | ||||||
|  |  | ||||||
|  | 0.6 to 0.7 | ||||||
|  | ********** | ||||||
|  |  | ||||||
|  | Cascade saves | ||||||
|  | ============= | ||||||
|  |  | ||||||
|  | Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set | ||||||
|  | to True.  This is because in 0.8 it will default to False.  If you require | ||||||
|  | cascading saves then either set it in the `meta` or pass | ||||||
|  | via `save` eg :: | ||||||
|  |  | ||||||
|  |     # At the class level: | ||||||
|  |     class Person(Document): | ||||||
|  |         meta = {'cascade': True} | ||||||
|  |  | ||||||
|  |     # Or in code: | ||||||
|  |     my_document.save(cascade=True) | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |     Remember: cascading saves **do not** cascade through lists. | ||||||
|  |  | ||||||
|  | ReferenceFields | ||||||
|  | =============== | ||||||
|  |  | ||||||
|  | ReferenceFields now can store references as ObjectId strings instead of DBRefs. | ||||||
|  | This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` | ||||||
|  | will be raised. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | To explicitly continue to use DBRefs change the `dbref` flag | ||||||
|  | to True :: | ||||||
|  |  | ||||||
|  |    class Person(Document): | ||||||
|  |        groups = ListField(ReferenceField(Group, dbref=True)) | ||||||
|  |  | ||||||
|  | To migrate to using strings instead of DBRefs you will have to manually | ||||||
|  | migrate :: | ||||||
|  |  | ||||||
|  |         # Step 1 - Migrate the model definition | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=False) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         # Step 2 - Migrate the data | ||||||
|  |         for g in Group.objects(): | ||||||
|  |             g.author = g.author | ||||||
|  |             g.members = g.members | ||||||
|  |             g.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | item_frequencies | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | In the 0.6 series we added support for null / zero / false values in | ||||||
|  | item_frequencies.  A side effect was to return keys in the value they are | ||||||
|  | stored in rather than as string representations.  Your code may need to be | ||||||
|  | updated to handle native types rather than strings keys for the results of | ||||||
|  | item frequency queries. | ||||||
|  |  | ||||||
|  | BinaryFields | ||||||
|  | ============ | ||||||
|  |  | ||||||
|  | Binary fields have been updated so that they are native binary types.  If you | ||||||
|  | previously were doing `str` comparisons with binary field values you will have | ||||||
|  | to update and wrap the value in a `str`. | ||||||
|  |  | ||||||
|  | 0.5 to 0.6 | ||||||
|  | ********** | ||||||
|  |  | ||||||
|  | Embedded Documents - if you had a `pk` field you will have to rename it from | ||||||
|  | `_id` to `pk` as pk is no longer a property of Embedded Documents. | ||||||
|  |  | ||||||
| Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | ||||||
| an InvalidDocument error as they aren't currently supported. | an InvalidDocument error as they aren't currently supported. | ||||||
|  |  | ||||||
| Document._get_subclasses - Is no longer used and the class method has been removed. | Document._get_subclasses - Is no longer used and the class method has been | ||||||
|  | removed. | ||||||
|  |  | ||||||
| Document.objects.with_id - now raises an InvalidQueryError if used with a filter. | Document.objects.with_id - now raises an InvalidQueryError if used with a | ||||||
|  | filter. | ||||||
|  |  | ||||||
| FutureWarning - A future warning has been added to all inherited classes that | FutureWarning - A future warning has been added to all inherited classes that | ||||||
| don't define `allow_inheritance` in their meta. | don't define :attr:`allow_inheritance` in their meta. | ||||||
|  |  | ||||||
|  | You may need to update pyMongo to 2.0 for use with Sharding. | ||||||
|  |  | ||||||
| 0.4 to 0.5 | 0.4 to 0.5 | ||||||
| =========== | ********** | ||||||
|  |  | ||||||
| There have been the following backwards incompatibilities from 0.4 to 0.5.  The | There have been the following backwards incompatibilities from 0.4 to 0.5.  The | ||||||
| main areas of changed are: choices in fields, map_reduce and collection names. | main areas of changed are: choices in fields, map_reduce and collection names. | ||||||
|  |  | ||||||
| Choice options: | Choice options: | ||||||
| --------------- | =============== | ||||||
|  |  | ||||||
| Are now expected to be an iterable of tuples, with the first element in each | Are now expected to be an iterable of tuples, with the first element in each | ||||||
| tuple being the actual value to be stored. The second element is the | tuple being the actual value to be stored. The second element is the | ||||||
| @@ -33,13 +540,13 @@ human-readable name for the option. | |||||||
|  |  | ||||||
|  |  | ||||||
| PyMongo / MongoDB | PyMongo / MongoDB | ||||||
| ----------------- | ================= | ||||||
|  |  | ||||||
| map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output | map reduce now requires pymongo 1.11+- The pymongo `merge_output` and | ||||||
| parameters, have been depreciated. | `reduce_output` parameters, have been depreciated. | ||||||
|  |  | ||||||
| More methods now use map_reduce as db.eval is not supported for sharding as such | More methods now use map_reduce as db.eval is not supported for sharding as | ||||||
| the following have been changed: | such the following have been changed: | ||||||
|  |  | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.sum` |     * :meth:`~mongoengine.queryset.QuerySet.sum` | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.average` |     * :meth:`~mongoengine.queryset.QuerySet.average` | ||||||
| @@ -47,10 +554,10 @@ the following have been changed: | |||||||
|  |  | ||||||
|  |  | ||||||
| Default collection naming | Default collection naming | ||||||
| ------------------------- | ========================= | ||||||
|  |  | ||||||
| Previously it was just lowercase, its now much more pythonic and readable as its | Previously it was just lowercase, it's now much more pythonic and readable as | ||||||
| lowercase and underscores, previously :: | it's lowercase and underscores, previously :: | ||||||
|  |  | ||||||
|     class MyAceDocument(Document): |     class MyAceDocument(Document): | ||||||
|         pass |         pass | ||||||
| @@ -74,7 +581,7 @@ To upgrade use a Mixin class to set meta like so :: | |||||||
|     class MyAceDocument(Document, BaseMixin): |     class MyAceDocument(Document, BaseMixin): | ||||||
|         pass |         pass | ||||||
|  |  | ||||||
|     MyAceDocument._get_collection_name() == myacedocument |     MyAceDocument._get_collection_name() == "myacedocument" | ||||||
|  |  | ||||||
| Alternatively, you can rename your collections eg :: | Alternatively, you can rename your collections eg :: | ||||||
|  |  | ||||||
| @@ -86,7 +593,8 @@ Alternatively, you can rename your collections eg :: | |||||||
|  |  | ||||||
|         failure = False |         failure = False | ||||||
|  |  | ||||||
|         collection_names = [d._get_collection_name() for d in _document_registry.values()] |         collection_names = [d._get_collection_name() | ||||||
|  |                             for d in _document_registry.values()] | ||||||
|  |  | ||||||
|         for new_style_name in collection_names: |         for new_style_name in collection_names: | ||||||
|             if not new_style_name:  # embedded documents don't have collections |             if not new_style_name:  # embedded documents don't have collections | ||||||
| @@ -104,10 +612,17 @@ Alternatively, you can rename your collections eg :: | |||||||
|                         old_style_name, new_style_name) |                         old_style_name, new_style_name) | ||||||
|                 else: |                 else: | ||||||
|                     db[old_style_name].rename(new_style_name) |                     db[old_style_name].rename(new_style_name) | ||||||
|                     print "Renamed:  %s to %s" % (old_style_name, new_style_name) |                     print "Renamed:  %s to %s" % (old_style_name, | ||||||
|  |                                                   new_style_name) | ||||||
|  |  | ||||||
|         if failure: |         if failure: | ||||||
|             print "Upgrading  collection names failed" |             print "Upgrading  collection names failed" | ||||||
|         else: |         else: | ||||||
|             print "Upgraded collection names" |             print "Upgraded collection names" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | mongodb 1.8 > 2.0 + | ||||||
|  | =================== | ||||||
|  |  | ||||||
|  | It's been reported that indexes may need to be recreated to the newer version of indexes. | ||||||
|  | To do this drop indexes and call ``ensure_indexes`` on each model. | ||||||
|   | |||||||
| @@ -1,24 +1,42 @@ | |||||||
| import document | # Import submodules so that we can expose their __all__ | ||||||
| from document import * | from mongoengine import connection | ||||||
| import fields | from mongoengine import document | ||||||
| from fields import * | from mongoengine import errors | ||||||
| import connection | from mongoengine import fields | ||||||
| from connection import * | from mongoengine import queryset | ||||||
| import queryset | from mongoengine import signals | ||||||
| from queryset import * |  | ||||||
| import signals |  | ||||||
| from signals import * |  | ||||||
|  |  | ||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | # Import everything from each submodule so that it can be accessed via | ||||||
|            queryset.__all__ + signals.__all__) | # mongoengine, e.g. instead of `from mongoengine.connection import connect`, | ||||||
|  | # users can simply use `from mongoengine import connect`, or even | ||||||
|  | # `from mongoengine import *` and then `connect('testdb')`. | ||||||
|  | from mongoengine.connection import * | ||||||
|  | from mongoengine.document import * | ||||||
|  | from mongoengine.errors import * | ||||||
|  | from mongoengine.fields import * | ||||||
|  | from mongoengine.queryset import * | ||||||
|  | from mongoengine.signals import * | ||||||
|  |  | ||||||
| VERSION = (0, 6, 1) |  | ||||||
|  | __all__ = ( | ||||||
|  |     list(document.__all__) | ||||||
|  |     + list(fields.__all__) | ||||||
|  |     + list(connection.__all__) | ||||||
|  |     + list(queryset.__all__) | ||||||
|  |     + list(signals.__all__) | ||||||
|  |     + list(errors.__all__) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | VERSION = (0, 20, 0) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     version = '%s.%s' % (VERSION[0], VERSION[1]) |     """Return the VERSION as a string. | ||||||
|     if VERSION[2]: |  | ||||||
|         version = '%s.%s' % (version, VERSION[2]) |     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. | ||||||
|     return version |     """ | ||||||
|  |     return ".".join(map(str, VERSION)) | ||||||
|  |  | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|   | |||||||
							
								
								
									
										1325
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
							
						
						
									
										1325
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										33
									
								
								mongoengine/base/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								mongoengine/base/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | # Base module is split into several files for convenience. Files inside of | ||||||
|  | # this module should import from a specific submodule (e.g. | ||||||
|  | # `from mongoengine.base.document import BaseDocument`), but all of the | ||||||
|  | # other modules should import directly from the top-level module (e.g. | ||||||
|  | # `from mongoengine.base import BaseDocument`). This approach is cleaner and | ||||||
|  | # also helps with cyclical import errors. | ||||||
|  | from mongoengine.base.common import * | ||||||
|  | from mongoengine.base.datastructures import * | ||||||
|  | from mongoengine.base.document import * | ||||||
|  | from mongoengine.base.fields import * | ||||||
|  | from mongoengine.base.metaclasses import * | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     # common | ||||||
|  |     "UPDATE_OPERATORS", | ||||||
|  |     "_document_registry", | ||||||
|  |     "get_document", | ||||||
|  |     # datastructures | ||||||
|  |     "BaseDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|  |     # document | ||||||
|  |     "BaseDocument", | ||||||
|  |     # fields | ||||||
|  |     "BaseField", | ||||||
|  |     "ComplexBaseField", | ||||||
|  |     "ObjectIdField", | ||||||
|  |     "GeoJsonBaseField", | ||||||
|  |     # metaclasses | ||||||
|  |     "DocumentMetaclass", | ||||||
|  |     "TopLevelDocumentMetaclass", | ||||||
|  | ) | ||||||
							
								
								
									
										62
									
								
								mongoengine/base/common.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								mongoengine/base/common.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | |||||||
|  | from mongoengine.errors import NotRegistered | ||||||
|  |  | ||||||
|  | __all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | UPDATE_OPERATORS = { | ||||||
|  |     "set", | ||||||
|  |     "unset", | ||||||
|  |     "inc", | ||||||
|  |     "dec", | ||||||
|  |     "mul", | ||||||
|  |     "pop", | ||||||
|  |     "push", | ||||||
|  |     "push_all", | ||||||
|  |     "pull", | ||||||
|  |     "pull_all", | ||||||
|  |     "add_to_set", | ||||||
|  |     "set_on_insert", | ||||||
|  |     "min", | ||||||
|  |     "max", | ||||||
|  |     "rename", | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | _document_registry = {} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_document(name): | ||||||
|  |     """Get a registered Document class by name.""" | ||||||
|  |     doc = _document_registry.get(name, None) | ||||||
|  |     if not doc: | ||||||
|  |         # Possible old style name | ||||||
|  |         single_end = name.split(".")[-1] | ||||||
|  |         compound_end = ".%s" % single_end | ||||||
|  |         possible_match = [ | ||||||
|  |             k for k in _document_registry if k.endswith(compound_end) or k == single_end | ||||||
|  |         ] | ||||||
|  |         if len(possible_match) == 1: | ||||||
|  |             doc = _document_registry.get(possible_match.pop(), None) | ||||||
|  |     if not doc: | ||||||
|  |         raise NotRegistered( | ||||||
|  |             """ | ||||||
|  |             `%s` has not been registered in the document registry. | ||||||
|  |             Importing the document class automatically registers it, has it | ||||||
|  |             been imported? | ||||||
|  |         """.strip() | ||||||
|  |             % name | ||||||
|  |         ) | ||||||
|  |     return doc | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_documents_by_db(connection_alias, default_connection_alias): | ||||||
|  |     """Get all registered Documents class attached to a given database""" | ||||||
|  |  | ||||||
|  |     def get_doc_alias(doc_cls): | ||||||
|  |         return doc_cls._meta.get("db_alias", default_connection_alias) | ||||||
|  |  | ||||||
|  |     return [ | ||||||
|  |         doc_cls | ||||||
|  |         for doc_cls in _document_registry.values() | ||||||
|  |         if get_doc_alias(doc_cls) == connection_alias | ||||||
|  |     ] | ||||||
							
								
								
									
										475
									
								
								mongoengine/base/datastructures.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										475
									
								
								mongoengine/base/datastructures.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,475 @@ | |||||||
|  | import weakref | ||||||
|  |  | ||||||
|  | from bson import DBRef | ||||||
|  |  | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     "BaseDict", | ||||||
|  |     "StrictDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorator that ensures _mark_as_changed method gets called.""" | ||||||
|  |  | ||||||
|  |     def wrapper(self, *args, **kwargs): | ||||||
|  |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, *args, **kwargs) | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return result | ||||||
|  |  | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_key_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorator that ensures _mark_as_changed method gets called with the key argument""" | ||||||
|  |  | ||||||
|  |     def wrapper(self, key, *args, **kwargs): | ||||||
|  |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, key, *args, **kwargs) | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return result | ||||||
|  |  | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BaseDict(dict): | ||||||
|  |     """A special dict so we can watch any changes.""" | ||||||
|  |  | ||||||
|  |     _dereferenced = False | ||||||
|  |     _instance = None | ||||||
|  |     _name = None | ||||||
|  |  | ||||||
|  |     def __init__(self, dict_items, instance, name): | ||||||
|  |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|  |         if isinstance(instance, BaseDocument): | ||||||
|  |             self._instance = weakref.proxy(instance) | ||||||
|  |         self._name = name | ||||||
|  |         super().__init__(dict_items) | ||||||
|  |  | ||||||
|  |     def get(self, key, default=None): | ||||||
|  |         # get does not use __getitem__ by default so we must override it as well | ||||||
|  |         try: | ||||||
|  |             return self.__getitem__(key) | ||||||
|  |         except KeyError: | ||||||
|  |             return default | ||||||
|  |  | ||||||
|  |     def __getitem__(self, key): | ||||||
|  |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|  |             value._instance = self._instance | ||||||
|  |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|  |             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||||
|  |             super().__setitem__(key, value) | ||||||
|  |             value._instance = self._instance | ||||||
|  |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|  |             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||||
|  |             super().__setitem__(key, value) | ||||||
|  |             value._instance = self._instance | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def __getstate__(self): | ||||||
|  |         self.instance = None | ||||||
|  |         self._dereferenced = False | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __setstate__(self, state): | ||||||
|  |         self = state | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     __setitem__ = mark_key_as_changed_wrapper(dict.__setitem__) | ||||||
|  |     __delattr__ = mark_key_as_changed_wrapper(dict.__delattr__) | ||||||
|  |     __delitem__ = mark_key_as_changed_wrapper(dict.__delitem__) | ||||||
|  |     pop = mark_as_changed_wrapper(dict.pop) | ||||||
|  |     clear = mark_as_changed_wrapper(dict.clear) | ||||||
|  |     update = mark_as_changed_wrapper(dict.update) | ||||||
|  |     popitem = mark_as_changed_wrapper(dict.popitem) | ||||||
|  |     setdefault = mark_as_changed_wrapper(dict.setdefault) | ||||||
|  |  | ||||||
|  |     def _mark_as_changed(self, key=None): | ||||||
|  |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|  |             if key: | ||||||
|  |                 self._instance._mark_as_changed("{}.{}".format(self._name, key)) | ||||||
|  |             else: | ||||||
|  |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BaseList(list): | ||||||
|  |     """A special list so we can watch any changes.""" | ||||||
|  |  | ||||||
|  |     _dereferenced = False | ||||||
|  |     _instance = None | ||||||
|  |     _name = None | ||||||
|  |  | ||||||
|  |     def __init__(self, list_items, instance, name): | ||||||
|  |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|  |         if isinstance(instance, BaseDocument): | ||||||
|  |             self._instance = weakref.proxy(instance) | ||||||
|  |         self._name = name | ||||||
|  |         super().__init__(list_items) | ||||||
|  |  | ||||||
|  |     def __getitem__(self, key): | ||||||
|  |         # change index to positive value because MongoDB does not support negative one | ||||||
|  |         if isinstance(key, int) and key < 0: | ||||||
|  |             key = len(self) + key | ||||||
|  |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|  |         if isinstance(key, slice): | ||||||
|  |             # When receiving a slice operator, we don't convert the structure and bind | ||||||
|  |             # to parent's instance. This is buggy for now but would require more work to be handled properly | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|  |             value._instance = self._instance | ||||||
|  |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|  |             # Replace dict by BaseDict | ||||||
|  |             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||||
|  |             super().__setitem__(key, value) | ||||||
|  |             value._instance = self._instance | ||||||
|  |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|  |             # Replace list by BaseList | ||||||
|  |             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||||
|  |             super().__setitem__(key, value) | ||||||
|  |             value._instance = self._instance | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def __iter__(self): | ||||||
|  |         yield from super().__iter__() | ||||||
|  |  | ||||||
|  |     def __getstate__(self): | ||||||
|  |         self.instance = None | ||||||
|  |         self._dereferenced = False | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __setstate__(self, state): | ||||||
|  |         self = state | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __setitem__(self, key, value): | ||||||
|  |         changed_key = key | ||||||
|  |         if isinstance(key, slice): | ||||||
|  |             # In case of slice, we don't bother to identify the exact elements being updated | ||||||
|  |             # instead, we simply marks the whole list as changed | ||||||
|  |             changed_key = None | ||||||
|  |  | ||||||
|  |         result = super().__setitem__(key, value) | ||||||
|  |         self._mark_as_changed(changed_key) | ||||||
|  |         return result | ||||||
|  |  | ||||||
|  |     append = mark_as_changed_wrapper(list.append) | ||||||
|  |     extend = mark_as_changed_wrapper(list.extend) | ||||||
|  |     insert = mark_as_changed_wrapper(list.insert) | ||||||
|  |     pop = mark_as_changed_wrapper(list.pop) | ||||||
|  |     remove = mark_as_changed_wrapper(list.remove) | ||||||
|  |     reverse = mark_as_changed_wrapper(list.reverse) | ||||||
|  |     sort = mark_as_changed_wrapper(list.sort) | ||||||
|  |     __delitem__ = mark_as_changed_wrapper(list.__delitem__) | ||||||
|  |     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||||
|  |     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||||
|  |  | ||||||
|  |     def _mark_as_changed(self, key=None): | ||||||
|  |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|  |             if key is not None: | ||||||
|  |                 self._instance._mark_as_changed( | ||||||
|  |                     "{}.{}".format(self._name, key % len(self)) | ||||||
|  |                 ) | ||||||
|  |             else: | ||||||
|  |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EmbeddedDocumentList(BaseList): | ||||||
|  |     def __init__(self, list_items, instance, name): | ||||||
|  |         super().__init__(list_items, instance, name) | ||||||
|  |         self._instance = instance | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def __match_all(cls, embedded_doc, kwargs): | ||||||
|  |         """Return True if a given embedded doc matches all the filter | ||||||
|  |         kwargs. If it doesn't return False. | ||||||
|  |         """ | ||||||
|  |         for key, expected_value in kwargs.items(): | ||||||
|  |             doc_val = getattr(embedded_doc, key) | ||||||
|  |             if doc_val != expected_value and str(doc_val) != expected_value: | ||||||
|  |                 return False | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def __only_matches(cls, embedded_docs, kwargs): | ||||||
|  |         """Return embedded docs that match the filter kwargs.""" | ||||||
|  |         if not kwargs: | ||||||
|  |             return embedded_docs | ||||||
|  |         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||||
|  |  | ||||||
|  |     def filter(self, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Filters the list by only including embedded documents with the | ||||||
|  |         given keyword arguments. | ||||||
|  |  | ||||||
|  |         This method only supports simple comparison (e.g. .filter(name='John Doe')) | ||||||
|  |         and does not support operators like __gte, __lte, __icontains like queryset.filter does | ||||||
|  |  | ||||||
|  |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|  |          filter on. *Multiple arguments are treated as if they are ANDed | ||||||
|  |          together.* | ||||||
|  |         :return: A new ``EmbeddedDocumentList`` containing the matching | ||||||
|  |          embedded documents. | ||||||
|  |  | ||||||
|  |         Raises ``AttributeError`` if a given keyword is not a valid field for | ||||||
|  |         the embedded document class. | ||||||
|  |         """ | ||||||
|  |         values = self.__only_matches(self, kwargs) | ||||||
|  |         return EmbeddedDocumentList(values, self._instance, self._name) | ||||||
|  |  | ||||||
|  |     def exclude(self, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Filters the list by excluding embedded documents with the given | ||||||
|  |         keyword arguments. | ||||||
|  |  | ||||||
|  |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|  |          exclude on. *Multiple arguments are treated as if they are ANDed | ||||||
|  |          together.* | ||||||
|  |         :return: A new ``EmbeddedDocumentList`` containing the non-matching | ||||||
|  |          embedded documents. | ||||||
|  |  | ||||||
|  |         Raises ``AttributeError`` if a given keyword is not a valid field for | ||||||
|  |         the embedded document class. | ||||||
|  |         """ | ||||||
|  |         exclude = self.__only_matches(self, kwargs) | ||||||
|  |         values = [item for item in self if item not in exclude] | ||||||
|  |         return EmbeddedDocumentList(values, self._instance, self._name) | ||||||
|  |  | ||||||
|  |     def count(self): | ||||||
|  |         """ | ||||||
|  |         The number of embedded documents in the list. | ||||||
|  |  | ||||||
|  |         :return: The length of the list, equivalent to the result of ``len()``. | ||||||
|  |         """ | ||||||
|  |         return len(self) | ||||||
|  |  | ||||||
|  |     def get(self, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Retrieves an embedded document determined by the given keyword | ||||||
|  |         arguments. | ||||||
|  |  | ||||||
|  |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|  |          search on. *Multiple arguments are treated as if they are ANDed | ||||||
|  |          together.* | ||||||
|  |         :return: The embedded document matched by the given keyword arguments. | ||||||
|  |  | ||||||
|  |         Raises ``DoesNotExist`` if the arguments used to query an embedded | ||||||
|  |         document returns no results. ``MultipleObjectsReturned`` if more | ||||||
|  |         than one result is returned. | ||||||
|  |         """ | ||||||
|  |         values = self.__only_matches(self, kwargs) | ||||||
|  |         if len(values) == 0: | ||||||
|  |             raise DoesNotExist("%s matching query does not exist." % self._name) | ||||||
|  |         elif len(values) > 1: | ||||||
|  |             raise MultipleObjectsReturned( | ||||||
|  |                 "%d items returned, instead of 1" % len(values) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         return values[0] | ||||||
|  |  | ||||||
|  |     def first(self): | ||||||
|  |         """Return the first embedded document in the list, or ``None`` | ||||||
|  |         if empty. | ||||||
|  |         """ | ||||||
|  |         if len(self) > 0: | ||||||
|  |             return self[0] | ||||||
|  |  | ||||||
|  |     def create(self, **values): | ||||||
|  |         """ | ||||||
|  |         Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. | ||||||
|  |  | ||||||
|  |         .. note:: | ||||||
|  |             the instance of the EmbeddedDocument is not automatically saved to the database. | ||||||
|  |             You still need to call .save() on the parent Document. | ||||||
|  |  | ||||||
|  |         :param values: A dictionary of values for the embedded document. | ||||||
|  |         :return: The new embedded document instance. | ||||||
|  |         """ | ||||||
|  |         name = self._name | ||||||
|  |         EmbeddedClass = self._instance._fields[name].field.document_type_obj | ||||||
|  |         self._instance[self._name].append(EmbeddedClass(**values)) | ||||||
|  |  | ||||||
|  |         return self._instance[self._name][-1] | ||||||
|  |  | ||||||
|  |     def save(self, *args, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Saves the ancestor document. | ||||||
|  |  | ||||||
|  |         :param args: Arguments passed up to the ancestor Document's save | ||||||
|  |          method. | ||||||
|  |         :param kwargs: Keyword arguments passed up to the ancestor Document's | ||||||
|  |          save method. | ||||||
|  |         """ | ||||||
|  |         self._instance.save(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def delete(self): | ||||||
|  |         """ | ||||||
|  |         Deletes the embedded documents from the database. | ||||||
|  |  | ||||||
|  |         .. note:: | ||||||
|  |             The embedded document changes are not automatically saved | ||||||
|  |             to the database after calling this method. | ||||||
|  |  | ||||||
|  |         :return: The number of entries deleted. | ||||||
|  |         """ | ||||||
|  |         values = list(self) | ||||||
|  |         for item in values: | ||||||
|  |             self._instance[self._name].remove(item) | ||||||
|  |  | ||||||
|  |         return len(values) | ||||||
|  |  | ||||||
|  |     def update(self, **update): | ||||||
|  |         """ | ||||||
|  |         Updates the embedded documents with the given replacement values. This | ||||||
|  |         function does not support mongoDB update operators such as ``inc__``. | ||||||
|  |  | ||||||
|  |         .. note:: | ||||||
|  |             The embedded document changes are not automatically saved | ||||||
|  |             to the database after calling this method. | ||||||
|  |  | ||||||
|  |         :param update: A dictionary of update values to apply to each | ||||||
|  |          embedded document. | ||||||
|  |         :return: The number of entries updated. | ||||||
|  |         """ | ||||||
|  |         if len(update) == 0: | ||||||
|  |             return 0 | ||||||
|  |         values = list(self) | ||||||
|  |         for item in values: | ||||||
|  |             for k, v in update.items(): | ||||||
|  |                 setattr(item, k, v) | ||||||
|  |  | ||||||
|  |         return len(values) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class StrictDict: | ||||||
|  |     __slots__ = () | ||||||
|  |     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||||
|  |     _classes = {} | ||||||
|  |  | ||||||
|  |     def __init__(self, **kwargs): | ||||||
|  |         for k, v in kwargs.items(): | ||||||
|  |             setattr(self, k, v) | ||||||
|  |  | ||||||
|  |     def __getitem__(self, key): | ||||||
|  |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|  |         try: | ||||||
|  |             return getattr(self, key) | ||||||
|  |         except AttributeError: | ||||||
|  |             raise KeyError(key) | ||||||
|  |  | ||||||
|  |     def __setitem__(self, key, value): | ||||||
|  |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|  |         return setattr(self, key, value) | ||||||
|  |  | ||||||
|  |     def __contains__(self, key): | ||||||
|  |         return hasattr(self, key) | ||||||
|  |  | ||||||
|  |     def get(self, key, default=None): | ||||||
|  |         try: | ||||||
|  |             return self[key] | ||||||
|  |         except KeyError: | ||||||
|  |             return default | ||||||
|  |  | ||||||
|  |     def pop(self, key, default=None): | ||||||
|  |         v = self.get(key, default) | ||||||
|  |         try: | ||||||
|  |             delattr(self, key) | ||||||
|  |         except AttributeError: | ||||||
|  |             pass | ||||||
|  |         return v | ||||||
|  |  | ||||||
|  |     def iteritems(self): | ||||||
|  |         for key in self: | ||||||
|  |             yield key, self[key] | ||||||
|  |  | ||||||
|  |     def items(self): | ||||||
|  |         return [(k, self[k]) for k in iter(self)] | ||||||
|  |  | ||||||
|  |     def iterkeys(self): | ||||||
|  |         return iter(self) | ||||||
|  |  | ||||||
|  |     def keys(self): | ||||||
|  |         return list(iter(self)) | ||||||
|  |  | ||||||
|  |     def __iter__(self): | ||||||
|  |         return (key for key in self.__slots__ if hasattr(self, key)) | ||||||
|  |  | ||||||
|  |     def __len__(self): | ||||||
|  |         return len(list(self.items())) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return list(self.items()) == list(other.items()) | ||||||
|  |  | ||||||
|  |     def __ne__(self, other): | ||||||
|  |         return not (self == other) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def create(cls, allowed_keys): | ||||||
|  |         allowed_keys_tuple = tuple( | ||||||
|  |             ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys | ||||||
|  |         ) | ||||||
|  |         allowed_keys = frozenset(allowed_keys_tuple) | ||||||
|  |         if allowed_keys not in cls._classes: | ||||||
|  |  | ||||||
|  |             class SpecificStrictDict(cls): | ||||||
|  |                 __slots__ = allowed_keys_tuple | ||||||
|  |  | ||||||
|  |                 def __repr__(self): | ||||||
|  |                     return "{%s}" % ", ".join( | ||||||
|  |                         '"{!s}": {!r}'.format(k, v) for k, v in self.items() | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|  |             cls._classes[allowed_keys] = SpecificStrictDict | ||||||
|  |         return cls._classes[allowed_keys] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LazyReference(DBRef): | ||||||
|  |     __slots__ = ("_cached_doc", "passthrough", "document_type") | ||||||
|  |  | ||||||
|  |     def fetch(self, force=False): | ||||||
|  |         if not self._cached_doc or force: | ||||||
|  |             self._cached_doc = self.document_type.objects.get(pk=self.pk) | ||||||
|  |             if not self._cached_doc: | ||||||
|  |                 raise DoesNotExist("Trying to dereference unknown document %s" % (self)) | ||||||
|  |         return self._cached_doc | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def pk(self): | ||||||
|  |         return self.id | ||||||
|  |  | ||||||
|  |     def __init__(self, document_type, pk, cached_doc=None, passthrough=False): | ||||||
|  |         self.document_type = document_type | ||||||
|  |         self._cached_doc = cached_doc | ||||||
|  |         self.passthrough = passthrough | ||||||
|  |         super().__init__(self.document_type._get_collection_name(), pk) | ||||||
|  |  | ||||||
|  |     def __getitem__(self, name): | ||||||
|  |         if not self.passthrough: | ||||||
|  |             raise KeyError() | ||||||
|  |         document = self.fetch() | ||||||
|  |         return document[name] | ||||||
|  |  | ||||||
|  |     def __getattr__(self, name): | ||||||
|  |         if not object.__getattribute__(self, "passthrough"): | ||||||
|  |             raise AttributeError() | ||||||
|  |         document = self.fetch() | ||||||
|  |         try: | ||||||
|  |             return document[name] | ||||||
|  |         except KeyError: | ||||||
|  |             raise AttributeError() | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return "<LazyReference({}, {!r})>".format(self.document_type, self.pk) | ||||||
							
								
								
									
										1198
									
								
								mongoengine/base/document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1198
									
								
								mongoengine/base/document.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										682
									
								
								mongoengine/base/fields.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										682
									
								
								mongoengine/base/fields.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,682 @@ | |||||||
|  | import operator | ||||||
|  | import warnings | ||||||
|  | import weakref | ||||||
|  |  | ||||||
|  | from bson import DBRef, ObjectId, SON | ||||||
|  | import pymongo | ||||||
|  |  | ||||||
|  | from mongoengine.base.common import UPDATE_OPERATORS | ||||||
|  | from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.errors import DeprecatedError, ValidationError | ||||||
|  |  | ||||||
|  | __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BaseField: | ||||||
|  |     """A base class for fields in a MongoDB document. Instances of this class | ||||||
|  |     may be added to subclasses of `Document` to define a document's schema. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.5 - added verbose and help text | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     name = None | ||||||
|  |     _geo_index = False | ||||||
|  |     _auto_gen = False  # Call `generate` to generate a value | ||||||
|  |     _auto_dereference = True | ||||||
|  |  | ||||||
|  |     # These track each time a Field instance is created. Used to retain order. | ||||||
|  |     # The auto_creation_counter is used for fields that MongoEngine implicitly | ||||||
|  |     # creates, creation_counter is used for all user-specified fields. | ||||||
|  |     creation_counter = 0 | ||||||
|  |     auto_creation_counter = -1 | ||||||
|  |  | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         db_field=None, | ||||||
|  |         required=False, | ||||||
|  |         default=None, | ||||||
|  |         unique=False, | ||||||
|  |         unique_with=None, | ||||||
|  |         primary_key=False, | ||||||
|  |         validation=None, | ||||||
|  |         choices=None, | ||||||
|  |         null=False, | ||||||
|  |         sparse=False, | ||||||
|  |         **kwargs | ||||||
|  |     ): | ||||||
|  |         """ | ||||||
|  |         :param db_field: The database field to store this field in | ||||||
|  |             (defaults to the name of the field) | ||||||
|  |         :param required: If the field is required. Whether it has to have a | ||||||
|  |             value or not. Defaults to False. | ||||||
|  |         :param default: (optional) The default value for this field if no value | ||||||
|  |             has been set (or if the value has been unset).  It can be a | ||||||
|  |             callable. | ||||||
|  |         :param unique: Is the field value unique or not.  Defaults to False. | ||||||
|  |         :param unique_with: (optional) The other field this field should be | ||||||
|  |             unique with. | ||||||
|  |         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||||
|  |         :param validation: (optional) A callable to validate the value of the | ||||||
|  |             field.  The callable takes the value as parameter and should raise | ||||||
|  |             a ValidationError if validation fails | ||||||
|  |         :param choices: (optional) The valid choices | ||||||
|  |         :param null: (optional) If the field value can be null. If no and there is a default value | ||||||
|  |             then the default value is set | ||||||
|  |         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||||
|  |             means that uniqueness won't be enforced for `None` values | ||||||
|  |         :param **kwargs: (optional) Arbitrary indirection-free metadata for | ||||||
|  |             this field can be supplied as additional keyword arguments and | ||||||
|  |             accessed as attributes of the field. Must not conflict with any | ||||||
|  |             existing attributes. Common metadata includes `verbose_name` and | ||||||
|  |             `help_text`. | ||||||
|  |         """ | ||||||
|  |         self.db_field = db_field if not primary_key else "_id" | ||||||
|  |  | ||||||
|  |         self.required = required or primary_key | ||||||
|  |         self.default = default | ||||||
|  |         self.unique = bool(unique or unique_with) | ||||||
|  |         self.unique_with = unique_with | ||||||
|  |         self.primary_key = primary_key | ||||||
|  |         self.validation = validation | ||||||
|  |         self.choices = choices | ||||||
|  |         self.null = null | ||||||
|  |         self.sparse = sparse | ||||||
|  |         self._owner_document = None | ||||||
|  |  | ||||||
|  |         # Make sure db_field is a string (if it's explicitly defined). | ||||||
|  |         if self.db_field is not None and not isinstance(self.db_field, str): | ||||||
|  |             raise TypeError("db_field should be a string.") | ||||||
|  |  | ||||||
|  |         # Make sure db_field doesn't contain any forbidden characters. | ||||||
|  |         if isinstance(self.db_field, str) and ( | ||||||
|  |             "." in self.db_field | ||||||
|  |             or "\0" in self.db_field | ||||||
|  |             or self.db_field.startswith("$") | ||||||
|  |         ): | ||||||
|  |             raise ValueError( | ||||||
|  |                 'field names cannot contain dots (".") or null characters ' | ||||||
|  |                 '("\\0"), and they must not start with a dollar sign ("$").' | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Detect and report conflicts between metadata and base properties. | ||||||
|  |         conflicts = set(dir(self)) & set(kwargs) | ||||||
|  |         if conflicts: | ||||||
|  |             raise TypeError( | ||||||
|  |                 "%s already has attribute(s): %s" | ||||||
|  |                 % (self.__class__.__name__, ", ".join(conflicts)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Assign metadata to the instance | ||||||
|  |         # This efficient method is available because no __slots__ are defined. | ||||||
|  |         self.__dict__.update(kwargs) | ||||||
|  |  | ||||||
|  |         # Adjust the appropriate creation counter, and save our local copy. | ||||||
|  |         if self.db_field == "_id": | ||||||
|  |             self.creation_counter = BaseField.auto_creation_counter | ||||||
|  |             BaseField.auto_creation_counter -= 1 | ||||||
|  |         else: | ||||||
|  |             self.creation_counter = BaseField.creation_counter | ||||||
|  |             BaseField.creation_counter += 1 | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         """Descriptor for retrieving a value from a field in a document. | ||||||
|  |         """ | ||||||
|  |         if instance is None: | ||||||
|  |             # Document class being used rather than a document object | ||||||
|  |             return self | ||||||
|  |  | ||||||
|  |         # Get value from document instance if available | ||||||
|  |         return instance._data.get(self.name) | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         """Descriptor for assigning a value to a field in a document.""" | ||||||
|  |         # If setting to None and there is a default value provided for this | ||||||
|  |         # field, then set the value to the default value. | ||||||
|  |         if value is None: | ||||||
|  |             if self.null: | ||||||
|  |                 value = None | ||||||
|  |             elif self.default is not None: | ||||||
|  |                 value = self.default | ||||||
|  |                 if callable(value): | ||||||
|  |                     value = value() | ||||||
|  |  | ||||||
|  |         if instance._initialised: | ||||||
|  |             try: | ||||||
|  |                 value_has_changed = ( | ||||||
|  |                     self.name not in instance._data | ||||||
|  |                     or instance._data[self.name] != value | ||||||
|  |                 ) | ||||||
|  |                 if value_has_changed: | ||||||
|  |                     instance._mark_as_changed(self.name) | ||||||
|  |             except Exception: | ||||||
|  |                 # Some values can't be compared and throw an error when we | ||||||
|  |                 # attempt to do so (e.g. tz-naive and tz-aware datetimes). | ||||||
|  |                 # Mark the field as changed in such cases. | ||||||
|  |                 instance._mark_as_changed(self.name) | ||||||
|  |  | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         if isinstance(value, EmbeddedDocument): | ||||||
|  |             value._instance = weakref.proxy(instance) | ||||||
|  |         elif isinstance(value, (list, tuple)): | ||||||
|  |             for v in value: | ||||||
|  |                 if isinstance(v, EmbeddedDocument): | ||||||
|  |                     v._instance = weakref.proxy(instance) | ||||||
|  |  | ||||||
|  |         instance._data[self.name] = value | ||||||
|  |  | ||||||
|  |     def error(self, message="", errors=None, field_name=None): | ||||||
|  |         """Raise a ValidationError.""" | ||||||
|  |         field_name = field_name if field_name else self.name | ||||||
|  |         raise ValidationError(message, errors=errors, field_name=field_name) | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         """Convert a MongoDB-compatible type to a Python type.""" | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         """Convert a Python type to a MongoDB-compatible type.""" | ||||||
|  |         return self.to_python(value) | ||||||
|  |  | ||||||
|  |     def _to_mongo_safe_call(self, value, use_db_field=True, fields=None): | ||||||
|  |         """Helper method to call to_mongo with proper inputs.""" | ||||||
|  |         f_inputs = self.to_mongo.__code__.co_varnames | ||||||
|  |         ex_vars = {} | ||||||
|  |         if "fields" in f_inputs: | ||||||
|  |             ex_vars["fields"] = fields | ||||||
|  |  | ||||||
|  |         if "use_db_field" in f_inputs: | ||||||
|  |             ex_vars["use_db_field"] = use_db_field | ||||||
|  |  | ||||||
|  |         return self.to_mongo(value, **ex_vars) | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         """Prepare a value that is being used in a query for PyMongo.""" | ||||||
|  |         if op in UPDATE_OPERATORS: | ||||||
|  |             self.validate(value) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def validate(self, value, clean=True): | ||||||
|  |         """Perform validation on a value.""" | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     def _validate_choices(self, value): | ||||||
|  |         Document = _import_class("Document") | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |  | ||||||
|  |         choice_list = self.choices | ||||||
|  |         if isinstance(next(iter(choice_list)), (list, tuple)): | ||||||
|  |             # next(iter) is useful for sets | ||||||
|  |             choice_list = [k for k, _ in choice_list] | ||||||
|  |  | ||||||
|  |         # Choices which are other types of Documents | ||||||
|  |         if isinstance(value, (Document, EmbeddedDocument)): | ||||||
|  |             if not any(isinstance(value, c) for c in choice_list): | ||||||
|  |                 self.error("Value must be an instance of %s" % (choice_list)) | ||||||
|  |         # Choices which are types other than Documents | ||||||
|  |         else: | ||||||
|  |             values = value if isinstance(value, (list, tuple)) else [value] | ||||||
|  |             if len(set(values) - set(choice_list)): | ||||||
|  |                 self.error("Value must be one of %s" % str(choice_list)) | ||||||
|  |  | ||||||
|  |     def _validate(self, value, **kwargs): | ||||||
|  |         # Check the Choices Constraint | ||||||
|  |         if self.choices: | ||||||
|  |             self._validate_choices(value) | ||||||
|  |  | ||||||
|  |         # check validation argument | ||||||
|  |         if self.validation is not None: | ||||||
|  |             if callable(self.validation): | ||||||
|  |                 try: | ||||||
|  |                     # breaking change of 0.18 | ||||||
|  |                     # Get rid of True/False-type return for the validation method | ||||||
|  |                     # in favor of having validation raising a ValidationError | ||||||
|  |                     ret = self.validation(value) | ||||||
|  |                     if ret is not None: | ||||||
|  |                         raise DeprecatedError( | ||||||
|  |                             "validation argument for `%s` must not return anything, " | ||||||
|  |                             "it should raise a ValidationError if validation fails" | ||||||
|  |                             % self.name | ||||||
|  |                         ) | ||||||
|  |                 except ValidationError as ex: | ||||||
|  |                     self.error(str(ex)) | ||||||
|  |             else: | ||||||
|  |                 raise ValueError( | ||||||
|  |                     'validation argument for `"%s"` must be a ' "callable." % self.name | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         self.validate(value, **kwargs) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def owner_document(self): | ||||||
|  |         return self._owner_document | ||||||
|  |  | ||||||
|  |     def _set_owner_document(self, owner_document): | ||||||
|  |         self._owner_document = owner_document | ||||||
|  |  | ||||||
|  |     @owner_document.setter | ||||||
|  |     def owner_document(self, owner_document): | ||||||
|  |         self._set_owner_document(owner_document) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ComplexBaseField(BaseField): | ||||||
|  |     """Handles complex fields, such as lists / dictionaries. | ||||||
|  |  | ||||||
|  |     Allows for nesting of embedded documents inside complex types. | ||||||
|  |     Handles the lazy dereferencing of a queryset by lazily dereferencing all | ||||||
|  |     items in a list / dict rather than one at a time. | ||||||
|  |  | ||||||
|  |     .. versionadded:: 0.5 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     field = None | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         """Descriptor to automatically dereference references.""" | ||||||
|  |         if instance is None: | ||||||
|  |             # Document class being used rather than a document object | ||||||
|  |             return self | ||||||
|  |  | ||||||
|  |         ReferenceField = _import_class("ReferenceField") | ||||||
|  |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |         EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|  |  | ||||||
|  |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|  |  | ||||||
|  |         dereference = auto_dereference and ( | ||||||
|  |             self.field is None | ||||||
|  |             or isinstance(self.field, (GenericReferenceField, ReferenceField)) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         _dereference = _import_class("DeReference")() | ||||||
|  |  | ||||||
|  |         if ( | ||||||
|  |             instance._initialised | ||||||
|  |             and dereference | ||||||
|  |             and instance._data.get(self.name) | ||||||
|  |             and not getattr(instance._data[self.name], "_dereferenced", False) | ||||||
|  |         ): | ||||||
|  |             instance._data[self.name] = _dereference( | ||||||
|  |                 instance._data.get(self.name), | ||||||
|  |                 max_depth=1, | ||||||
|  |                 instance=instance, | ||||||
|  |                 name=self.name, | ||||||
|  |             ) | ||||||
|  |             if hasattr(instance._data[self.name], "_dereferenced"): | ||||||
|  |                 instance._data[self.name]._dereferenced = True | ||||||
|  |  | ||||||
|  |         value = super().__get__(instance, owner) | ||||||
|  |  | ||||||
|  |         # Convert lists / values so we can watch for any changes on them | ||||||
|  |         if isinstance(value, (list, tuple)): | ||||||
|  |             if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( | ||||||
|  |                 value, EmbeddedDocumentList | ||||||
|  |             ): | ||||||
|  |                 value = EmbeddedDocumentList(value, instance, self.name) | ||||||
|  |             elif not isinstance(value, BaseList): | ||||||
|  |                 value = BaseList(value, instance, self.name) | ||||||
|  |             instance._data[self.name] = value | ||||||
|  |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|  |             value = BaseDict(value, instance, self.name) | ||||||
|  |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|  |         if ( | ||||||
|  |             auto_dereference | ||||||
|  |             and instance._initialised | ||||||
|  |             and isinstance(value, (BaseList, BaseDict)) | ||||||
|  |             and not value._dereferenced | ||||||
|  |         ): | ||||||
|  |             value = _dereference(value, max_depth=1, instance=instance, name=self.name) | ||||||
|  |             value._dereferenced = True | ||||||
|  |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         """Convert a MongoDB-compatible type to a Python type.""" | ||||||
|  |         if isinstance(value, str): | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         if hasattr(value, "to_python"): | ||||||
|  |             return value.to_python() | ||||||
|  |  | ||||||
|  |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |         if isinstance(value, BaseDocument): | ||||||
|  |             # Something is wrong, return the value as it is | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         is_list = False | ||||||
|  |         if not hasattr(value, "items"): | ||||||
|  |             try: | ||||||
|  |                 is_list = True | ||||||
|  |                 value = {idx: v for idx, v in enumerate(value)} | ||||||
|  |             except TypeError:  # Not iterable return the value | ||||||
|  |                 return value | ||||||
|  |  | ||||||
|  |         if self.field: | ||||||
|  |             self.field._auto_dereference = self._auto_dereference | ||||||
|  |             value_dict = { | ||||||
|  |                 key: self.field.to_python(item) for key, item in value.items() | ||||||
|  |             } | ||||||
|  |         else: | ||||||
|  |             Document = _import_class("Document") | ||||||
|  |             value_dict = {} | ||||||
|  |             for k, v in value.items(): | ||||||
|  |                 if isinstance(v, Document): | ||||||
|  |                     # We need the id from the saved object to create the DBRef | ||||||
|  |                     if v.pk is None: | ||||||
|  |                         self.error( | ||||||
|  |                             "You can only reference documents once they" | ||||||
|  |                             " have been saved to the database" | ||||||
|  |                         ) | ||||||
|  |                     collection = v._get_collection_name() | ||||||
|  |                     value_dict[k] = DBRef(collection, v.pk) | ||||||
|  |                 elif hasattr(v, "to_python"): | ||||||
|  |                     value_dict[k] = v.to_python() | ||||||
|  |                 else: | ||||||
|  |                     value_dict[k] = self.to_python(v) | ||||||
|  |  | ||||||
|  |         if is_list:  # Convert back to a list | ||||||
|  |             return [ | ||||||
|  |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|  |         return value_dict | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value, use_db_field=True, fields=None): | ||||||
|  |         """Convert a Python type to a MongoDB-compatible type.""" | ||||||
|  |         Document = _import_class("Document") | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|  |         if isinstance(value, str): | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         if hasattr(value, "to_mongo"): | ||||||
|  |             if isinstance(value, Document): | ||||||
|  |                 return GenericReferenceField().to_mongo(value) | ||||||
|  |             cls = value.__class__ | ||||||
|  |             val = value.to_mongo(use_db_field, fields) | ||||||
|  |             # If it's a document that is not inherited add _cls | ||||||
|  |             if isinstance(value, EmbeddedDocument): | ||||||
|  |                 val["_cls"] = cls.__name__ | ||||||
|  |             return val | ||||||
|  |  | ||||||
|  |         is_list = False | ||||||
|  |         if not hasattr(value, "items"): | ||||||
|  |             try: | ||||||
|  |                 is_list = True | ||||||
|  |                 value = {k: v for k, v in enumerate(value)} | ||||||
|  |             except TypeError:  # Not iterable return the value | ||||||
|  |                 return value | ||||||
|  |  | ||||||
|  |         if self.field: | ||||||
|  |             value_dict = { | ||||||
|  |                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||||
|  |                 for key, item in value.items() | ||||||
|  |             } | ||||||
|  |         else: | ||||||
|  |             value_dict = {} | ||||||
|  |             for k, v in value.items(): | ||||||
|  |                 if isinstance(v, Document): | ||||||
|  |                     # We need the id from the saved object to create the DBRef | ||||||
|  |                     if v.pk is None: | ||||||
|  |                         self.error( | ||||||
|  |                             "You can only reference documents once they" | ||||||
|  |                             " have been saved to the database" | ||||||
|  |                         ) | ||||||
|  |  | ||||||
|  |                     # If its a document that is not inheritable it won't have | ||||||
|  |                     # any _cls data so make it a generic reference allows | ||||||
|  |                     # us to dereference | ||||||
|  |                     meta = getattr(v, "_meta", {}) | ||||||
|  |                     allow_inheritance = meta.get("allow_inheritance") | ||||||
|  |                     if not allow_inheritance and not self.field: | ||||||
|  |                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||||
|  |                     else: | ||||||
|  |                         collection = v._get_collection_name() | ||||||
|  |                         value_dict[k] = DBRef(collection, v.pk) | ||||||
|  |                 elif hasattr(v, "to_mongo"): | ||||||
|  |                     cls = v.__class__ | ||||||
|  |                     val = v.to_mongo(use_db_field, fields) | ||||||
|  |                     # If it's a document that is not inherited add _cls | ||||||
|  |                     if isinstance(v, (Document, EmbeddedDocument)): | ||||||
|  |                         val["_cls"] = cls.__name__ | ||||||
|  |                     value_dict[k] = val | ||||||
|  |                 else: | ||||||
|  |                     value_dict[k] = self.to_mongo(v, use_db_field, fields) | ||||||
|  |  | ||||||
|  |         if is_list:  # Convert back to a list | ||||||
|  |             return [ | ||||||
|  |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|  |         return value_dict | ||||||
|  |  | ||||||
|  |     def validate(self, value): | ||||||
|  |         """If field is provided ensure the value is valid.""" | ||||||
|  |         errors = {} | ||||||
|  |         if self.field: | ||||||
|  |             if hasattr(value, "items"): | ||||||
|  |                 sequence = value.items() | ||||||
|  |             else: | ||||||
|  |                 sequence = enumerate(value) | ||||||
|  |             for k, v in sequence: | ||||||
|  |                 try: | ||||||
|  |                     self.field._validate(v) | ||||||
|  |                 except ValidationError as error: | ||||||
|  |                     errors[k] = error.errors or error | ||||||
|  |                 except (ValueError, AssertionError) as error: | ||||||
|  |                     errors[k] = error | ||||||
|  |  | ||||||
|  |             if errors: | ||||||
|  |                 field_class = self.field.__class__.__name__ | ||||||
|  |                 self.error( | ||||||
|  |                     "Invalid {} item ({})".format(field_class, value), errors=errors | ||||||
|  |                 ) | ||||||
|  |         # Don't allow empty values if required | ||||||
|  |         if self.required and not value: | ||||||
|  |             self.error("Field is required and cannot be empty") | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|  |     def lookup_member(self, member_name): | ||||||
|  |         if self.field: | ||||||
|  |             return self.field.lookup_member(member_name) | ||||||
|  |         return None | ||||||
|  |  | ||||||
|  |     def _set_owner_document(self, owner_document): | ||||||
|  |         if self.field: | ||||||
|  |             self.field.owner_document = owner_document | ||||||
|  |         self._owner_document = owner_document | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ObjectIdField(BaseField): | ||||||
|  |     """A field wrapper around MongoDB's ObjectIds.""" | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         try: | ||||||
|  |             if not isinstance(value, ObjectId): | ||||||
|  |                 value = ObjectId(value) | ||||||
|  |         except Exception: | ||||||
|  |             pass | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         if not isinstance(value, ObjectId): | ||||||
|  |             try: | ||||||
|  |                 return ObjectId(str(value)) | ||||||
|  |             except Exception as e: | ||||||
|  |                 self.error(str(e)) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|  |     def validate(self, value): | ||||||
|  |         try: | ||||||
|  |             ObjectId(str(value)) | ||||||
|  |         except Exception: | ||||||
|  |             self.error("Invalid ObjectID") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class GeoJsonBaseField(BaseField): | ||||||
|  |     """A geo json field storing a geojson style object. | ||||||
|  |  | ||||||
|  |     .. versionadded:: 0.8 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     _geo_index = pymongo.GEOSPHERE | ||||||
|  |     _type = "GeoBase" | ||||||
|  |  | ||||||
|  |     def __init__(self, auto_index=True, *args, **kwargs): | ||||||
|  |         """ | ||||||
|  |         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||||
|  |             Defaults to `True`. | ||||||
|  |         """ | ||||||
|  |         self._name = "%sField" % self._type | ||||||
|  |         if not auto_index: | ||||||
|  |             self._geo_index = False | ||||||
|  |         super().__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def validate(self, value): | ||||||
|  |         """Validate the GeoJson object based on its type.""" | ||||||
|  |         if isinstance(value, dict): | ||||||
|  |             if set(value.keys()) == {"type", "coordinates"}: | ||||||
|  |                 if value["type"] != self._type: | ||||||
|  |                     self.error('{} type must be "{}"'.format(self._name, self._type)) | ||||||
|  |                 return self.validate(value["coordinates"]) | ||||||
|  |             else: | ||||||
|  |                 self.error( | ||||||
|  |                     "%s can only accept a valid GeoJson dictionary" | ||||||
|  |                     " or lists of (x, y)" % self._name | ||||||
|  |                 ) | ||||||
|  |                 return | ||||||
|  |         elif not isinstance(value, (list, tuple)): | ||||||
|  |             self.error("%s can only accept lists of [x, y]" % self._name) | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||||
|  |         error = validate(value) | ||||||
|  |         if error: | ||||||
|  |             self.error(error) | ||||||
|  |  | ||||||
|  |     def _validate_polygon(self, value, top_level=True): | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "Polygons must contain list of linestrings" | ||||||
|  |  | ||||||
|  |         # Quick and dirty validator | ||||||
|  |         try: | ||||||
|  |             value[0][0][0] | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             return "Invalid Polygon must contain at least one valid linestring" | ||||||
|  |  | ||||||
|  |         errors = [] | ||||||
|  |         for val in value: | ||||||
|  |             error = self._validate_linestring(val, False) | ||||||
|  |             if not error and val[0] != val[-1]: | ||||||
|  |                 error = "LineStrings must start and end at the same point" | ||||||
|  |             if error and error not in errors: | ||||||
|  |                 errors.append(error) | ||||||
|  |         if errors: | ||||||
|  |             if top_level: | ||||||
|  |                 return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||||
|  |             else: | ||||||
|  |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|  |     def _validate_linestring(self, value, top_level=True): | ||||||
|  |         """Validate a linestring.""" | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "LineStrings must contain list of coordinate pairs" | ||||||
|  |  | ||||||
|  |         # Quick and dirty validator | ||||||
|  |         try: | ||||||
|  |             value[0][0] | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             return "Invalid LineString must contain at least one valid point" | ||||||
|  |  | ||||||
|  |         errors = [] | ||||||
|  |         for val in value: | ||||||
|  |             error = self._validate_point(val) | ||||||
|  |             if error and error not in errors: | ||||||
|  |                 errors.append(error) | ||||||
|  |         if errors: | ||||||
|  |             if top_level: | ||||||
|  |                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||||
|  |             else: | ||||||
|  |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|  |     def _validate_point(self, value): | ||||||
|  |         """Validate each set of coords""" | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "Points must be a list of coordinate pairs" | ||||||
|  |         elif not len(value) == 2: | ||||||
|  |             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||||
|  |         elif not isinstance(value[0], (float, int)) or not isinstance( | ||||||
|  |             value[1], (float, int) | ||||||
|  |         ): | ||||||
|  |             return "Both values (%s) in point must be float or int" % repr(value) | ||||||
|  |  | ||||||
|  |     def _validate_multipoint(self, value): | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "MultiPoint must be a list of Point" | ||||||
|  |  | ||||||
|  |         # Quick and dirty validator | ||||||
|  |         try: | ||||||
|  |             value[0][0] | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             return "Invalid MultiPoint must contain at least one valid point" | ||||||
|  |  | ||||||
|  |         errors = [] | ||||||
|  |         for point in value: | ||||||
|  |             error = self._validate_point(point) | ||||||
|  |             if error and error not in errors: | ||||||
|  |                 errors.append(error) | ||||||
|  |  | ||||||
|  |         if errors: | ||||||
|  |             return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|  |     def _validate_multilinestring(self, value, top_level=True): | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "MultiLineString must be a list of LineString" | ||||||
|  |  | ||||||
|  |         # Quick and dirty validator | ||||||
|  |         try: | ||||||
|  |             value[0][0][0] | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             return "Invalid MultiLineString must contain at least one valid linestring" | ||||||
|  |  | ||||||
|  |         errors = [] | ||||||
|  |         for linestring in value: | ||||||
|  |             error = self._validate_linestring(linestring, False) | ||||||
|  |             if error and error not in errors: | ||||||
|  |                 errors.append(error) | ||||||
|  |  | ||||||
|  |         if errors: | ||||||
|  |             if top_level: | ||||||
|  |                 return "Invalid MultiLineString:\n%s" % ", ".join(errors) | ||||||
|  |             else: | ||||||
|  |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|  |     def _validate_multipolygon(self, value): | ||||||
|  |         if not isinstance(value, (list, tuple)): | ||||||
|  |             return "MultiPolygon must be a list of Polygon" | ||||||
|  |  | ||||||
|  |         # Quick and dirty validator | ||||||
|  |         try: | ||||||
|  |             value[0][0][0][0] | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             return "Invalid MultiPolygon must contain at least one valid Polygon" | ||||||
|  |  | ||||||
|  |         errors = [] | ||||||
|  |         for polygon in value: | ||||||
|  |             error = self._validate_polygon(polygon, False) | ||||||
|  |             if error and error not in errors: | ||||||
|  |                 errors.append(error) | ||||||
|  |  | ||||||
|  |         if errors: | ||||||
|  |             return "Invalid MultiPolygon:\n%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         if isinstance(value, dict): | ||||||
|  |             return value | ||||||
|  |         return SON([("type", self._type), ("coordinates", value)]) | ||||||
							
								
								
									
										466
									
								
								mongoengine/base/metaclasses.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										466
									
								
								mongoengine/base/metaclasses.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,466 @@ | |||||||
|  | import itertools | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine.base.common import _document_registry | ||||||
|  | from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.errors import InvalidDocumentError | ||||||
|  | from mongoengine.queryset import ( | ||||||
|  |     DO_NOTHING, | ||||||
|  |     DoesNotExist, | ||||||
|  |     MultipleObjectsReturned, | ||||||
|  |     QuerySetManager, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | __all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DocumentMetaclass(type): | ||||||
|  |     """Metaclass for all documents.""" | ||||||
|  |  | ||||||
|  |     # TODO lower complexity of this method | ||||||
|  |     def __new__(mcs, name, bases, attrs): | ||||||
|  |         flattened_bases = mcs._get_bases(bases) | ||||||
|  |         super_new = super().__new__ | ||||||
|  |  | ||||||
|  |         # If a base class just call super | ||||||
|  |         metaclass = attrs.get("my_metaclass") | ||||||
|  |         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||||
|  |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|  |         attrs["_is_document"] = attrs.get("_is_document", False) | ||||||
|  |         attrs["_cached_reference_fields"] = [] | ||||||
|  |  | ||||||
|  |         # EmbeddedDocuments could have meta data for inheritance | ||||||
|  |         if "meta" in attrs: | ||||||
|  |             attrs["_meta"] = attrs.pop("meta") | ||||||
|  |  | ||||||
|  |         # EmbeddedDocuments should inherit meta data | ||||||
|  |         if "_meta" not in attrs: | ||||||
|  |             meta = MetaDict() | ||||||
|  |             for base in flattened_bases[::-1]: | ||||||
|  |                 # Add any mixin metadata from plain objects | ||||||
|  |                 if hasattr(base, "meta"): | ||||||
|  |                     meta.merge(base.meta) | ||||||
|  |                 elif hasattr(base, "_meta"): | ||||||
|  |                     meta.merge(base._meta) | ||||||
|  |             attrs["_meta"] = meta | ||||||
|  |             attrs["_meta"][ | ||||||
|  |                 "abstract" | ||||||
|  |             ] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||||
|  |  | ||||||
|  |         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||||
|  |         if attrs["_meta"].get("allow_inheritance"): | ||||||
|  |             StringField = _import_class("StringField") | ||||||
|  |             attrs["_cls"] = StringField() | ||||||
|  |  | ||||||
|  |         # Handle document Fields | ||||||
|  |  | ||||||
|  |         # Merge all fields from subclasses | ||||||
|  |         doc_fields = {} | ||||||
|  |         for base in flattened_bases[::-1]: | ||||||
|  |             if hasattr(base, "_fields"): | ||||||
|  |                 doc_fields.update(base._fields) | ||||||
|  |  | ||||||
|  |             # Standard object mixin - merge in any Fields | ||||||
|  |             if not hasattr(base, "_meta"): | ||||||
|  |                 base_fields = {} | ||||||
|  |                 for attr_name, attr_value in base.__dict__.items(): | ||||||
|  |                     if not isinstance(attr_value, BaseField): | ||||||
|  |                         continue | ||||||
|  |                     attr_value.name = attr_name | ||||||
|  |                     if not attr_value.db_field: | ||||||
|  |                         attr_value.db_field = attr_name | ||||||
|  |                     base_fields[attr_name] = attr_value | ||||||
|  |  | ||||||
|  |                 doc_fields.update(base_fields) | ||||||
|  |  | ||||||
|  |         # Discover any document fields | ||||||
|  |         field_names = {} | ||||||
|  |         for attr_name, attr_value in attrs.items(): | ||||||
|  |             if not isinstance(attr_value, BaseField): | ||||||
|  |                 continue | ||||||
|  |             attr_value.name = attr_name | ||||||
|  |             if not attr_value.db_field: | ||||||
|  |                 attr_value.db_field = attr_name | ||||||
|  |             doc_fields[attr_name] = attr_value | ||||||
|  |  | ||||||
|  |             # Count names to ensure no db_field redefinitions | ||||||
|  |             field_names[attr_value.db_field] = ( | ||||||
|  |                 field_names.get(attr_value.db_field, 0) + 1 | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Ensure no duplicate db_fields | ||||||
|  |         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||||
|  |         if duplicate_db_fields: | ||||||
|  |             msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) | ||||||
|  |             raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|  |         # Set _fields and db_field maps | ||||||
|  |         attrs["_fields"] = doc_fields | ||||||
|  |         attrs["_db_field_map"] = { | ||||||
|  |             k: getattr(v, "db_field", k) for k, v in doc_fields.items() | ||||||
|  |         } | ||||||
|  |         attrs["_reverse_db_field_map"] = { | ||||||
|  |             v: k for k, v in attrs["_db_field_map"].items() | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         attrs["_fields_ordered"] = tuple( | ||||||
|  |             i[1] | ||||||
|  |             for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         # | ||||||
|  |         # Set document hierarchy | ||||||
|  |         # | ||||||
|  |         superclasses = () | ||||||
|  |         class_name = [name] | ||||||
|  |         for base in flattened_bases: | ||||||
|  |             if not getattr(base, "_is_base_cls", True) and not getattr( | ||||||
|  |                 base, "_meta", {} | ||||||
|  |             ).get("abstract", True): | ||||||
|  |                 # Collate hierarchy for _cls and _subclasses | ||||||
|  |                 class_name.append(base.__name__) | ||||||
|  |  | ||||||
|  |             if hasattr(base, "_meta"): | ||||||
|  |                 # Warn if allow_inheritance isn't set and prevent | ||||||
|  |                 # inheritance of classes where inheritance is set to False | ||||||
|  |                 allow_inheritance = base._meta.get("allow_inheritance") | ||||||
|  |                 if not allow_inheritance and not base._meta.get("abstract"): | ||||||
|  |                     raise ValueError( | ||||||
|  |                         "Document %s may not be subclassed. " | ||||||
|  |                         'To enable inheritance, use the "allow_inheritance" meta attribute.' | ||||||
|  |                         % base.__name__ | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|  |         # Get superclasses from last base superclass | ||||||
|  |         document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] | ||||||
|  |         if document_bases: | ||||||
|  |             superclasses = document_bases[0]._superclasses | ||||||
|  |             superclasses += (document_bases[0]._class_name,) | ||||||
|  |  | ||||||
|  |         _cls = ".".join(reversed(class_name)) | ||||||
|  |         attrs["_class_name"] = _cls | ||||||
|  |         attrs["_superclasses"] = superclasses | ||||||
|  |         attrs["_subclasses"] = (_cls,) | ||||||
|  |         attrs["_types"] = attrs["_subclasses"]  # TODO depreciate _types | ||||||
|  |  | ||||||
|  |         # Create the new_class | ||||||
|  |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|  |         # Set _subclasses | ||||||
|  |         for base in document_bases: | ||||||
|  |             if _cls not in base._subclasses: | ||||||
|  |                 base._subclasses += (_cls,) | ||||||
|  |             base._types = base._subclasses  # TODO depreciate _types | ||||||
|  |  | ||||||
|  |         ( | ||||||
|  |             Document, | ||||||
|  |             EmbeddedDocument, | ||||||
|  |             DictField, | ||||||
|  |             CachedReferenceField, | ||||||
|  |         ) = mcs._import_classes() | ||||||
|  |  | ||||||
|  |         if issubclass(new_class, Document): | ||||||
|  |             new_class._collection = None | ||||||
|  |  | ||||||
|  |         # Add class to the _document_registry | ||||||
|  |         _document_registry[new_class._class_name] = new_class | ||||||
|  |  | ||||||
|  |         # Handle delete rules | ||||||
|  |         for field in new_class._fields.values(): | ||||||
|  |             f = field | ||||||
|  |             if f.owner_document is None: | ||||||
|  |                 f.owner_document = new_class | ||||||
|  |             delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) | ||||||
|  |             if isinstance(f, CachedReferenceField): | ||||||
|  |  | ||||||
|  |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|  |                     raise InvalidDocumentError( | ||||||
|  |                         "CachedReferenceFields is not allowed in EmbeddedDocuments" | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|  |                 if f.auto_sync: | ||||||
|  |                     f.start_listener() | ||||||
|  |  | ||||||
|  |                 f.document_type._cached_reference_fields.append(f) | ||||||
|  |  | ||||||
|  |             if isinstance(f, ComplexBaseField) and hasattr(f, "field"): | ||||||
|  |                 delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) | ||||||
|  |                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||||
|  |                     msg = ( | ||||||
|  |                         "Reverse delete rules are not supported " | ||||||
|  |                         "for %s (field: %s)" % (field.__class__.__name__, field.name) | ||||||
|  |                     ) | ||||||
|  |                     raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|  |                 f = field.field | ||||||
|  |  | ||||||
|  |             if delete_rule != DO_NOTHING: | ||||||
|  |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|  |                     msg = ( | ||||||
|  |                         "Reverse delete rules are not supported for " | ||||||
|  |                         "EmbeddedDocuments (field: %s)" % field.name | ||||||
|  |                     ) | ||||||
|  |                     raise InvalidDocumentError(msg) | ||||||
|  |                 f.document_type.register_delete_rule(new_class, field.name, delete_rule) | ||||||
|  |  | ||||||
|  |             if ( | ||||||
|  |                 field.name | ||||||
|  |                 and hasattr(Document, field.name) | ||||||
|  |                 and EmbeddedDocument not in new_class.mro() | ||||||
|  |             ): | ||||||
|  |                 msg = "%s is a document method and not a valid field name" % field.name | ||||||
|  |                 raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|  |         return new_class | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _get_bases(mcs, bases): | ||||||
|  |         if isinstance(bases, BasesTuple): | ||||||
|  |             return bases | ||||||
|  |         seen = [] | ||||||
|  |         bases = mcs.__get_bases(bases) | ||||||
|  |         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||||
|  |         return BasesTuple(unique_bases) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def __get_bases(mcs, bases): | ||||||
|  |         for base in bases: | ||||||
|  |             if base is object: | ||||||
|  |                 continue | ||||||
|  |             yield base | ||||||
|  |             yield from mcs.__get_bases(base.__bases__) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _import_classes(mcs): | ||||||
|  |         Document = _import_class("Document") | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         DictField = _import_class("DictField") | ||||||
|  |         CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|  |         return Document, EmbeddedDocument, DictField, CachedReferenceField | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||||
|  |     """Metaclass for top-level documents (i.e. documents that have their own | ||||||
|  |     collection in the database. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __new__(mcs, name, bases, attrs): | ||||||
|  |         flattened_bases = mcs._get_bases(bases) | ||||||
|  |         super_new = super().__new__ | ||||||
|  |  | ||||||
|  |         # Set default _meta data if base class, otherwise get user defined meta | ||||||
|  |         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||||
|  |             # defaults | ||||||
|  |             attrs["_meta"] = { | ||||||
|  |                 "abstract": True, | ||||||
|  |                 "max_documents": None, | ||||||
|  |                 "max_size": None, | ||||||
|  |                 "ordering": [],  # default ordering applied at runtime | ||||||
|  |                 "indexes": [],  # indexes to be ensured at runtime | ||||||
|  |                 "id_field": None, | ||||||
|  |                 "index_background": False, | ||||||
|  |                 "index_opts": None, | ||||||
|  |                 "delete_rules": None, | ||||||
|  |                 # allow_inheritance can be True, False, and None. True means | ||||||
|  |                 # "allow inheritance", False means "don't allow inheritance", | ||||||
|  |                 # None means "do whatever your parent does, or don't allow | ||||||
|  |                 # inheritance if you're a top-level class". | ||||||
|  |                 "allow_inheritance": None, | ||||||
|  |             } | ||||||
|  |             attrs["_is_base_cls"] = True | ||||||
|  |             attrs["_meta"].update(attrs.get("meta", {})) | ||||||
|  |         else: | ||||||
|  |             attrs["_meta"] = attrs.get("meta", {}) | ||||||
|  |             # Explicitly set abstract to false unless set | ||||||
|  |             attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) | ||||||
|  |             attrs["_is_base_cls"] = False | ||||||
|  |  | ||||||
|  |         # Set flag marking as document class - as opposed to an object mixin | ||||||
|  |         attrs["_is_document"] = True | ||||||
|  |  | ||||||
|  |         # Ensure queryset_class is inherited | ||||||
|  |         if "objects" in attrs: | ||||||
|  |             manager = attrs["objects"] | ||||||
|  |             if hasattr(manager, "queryset_class"): | ||||||
|  |                 attrs["_meta"]["queryset_class"] = manager.queryset_class | ||||||
|  |  | ||||||
|  |         # Clean up top level meta | ||||||
|  |         if "meta" in attrs: | ||||||
|  |             del attrs["meta"] | ||||||
|  |  | ||||||
|  |         # Find the parent document class | ||||||
|  |         parent_doc_cls = [ | ||||||
|  |             b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass | ||||||
|  |         ] | ||||||
|  |         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] | ||||||
|  |  | ||||||
|  |         # Prevent classes setting collection different to their parents | ||||||
|  |         # If parent wasn't an abstract class | ||||||
|  |         if ( | ||||||
|  |             parent_doc_cls | ||||||
|  |             and "collection" in attrs.get("_meta", {}) | ||||||
|  |             and not parent_doc_cls._meta.get("abstract", True) | ||||||
|  |         ): | ||||||
|  |             msg = "Trying to set a collection on a subclass (%s)" % name | ||||||
|  |             warnings.warn(msg, SyntaxWarning) | ||||||
|  |             del attrs["_meta"]["collection"] | ||||||
|  |  | ||||||
|  |         # Ensure abstract documents have abstract bases | ||||||
|  |         if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): | ||||||
|  |             if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): | ||||||
|  |                 msg = "Abstract document cannot have non-abstract base" | ||||||
|  |                 raise ValueError(msg) | ||||||
|  |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|  |         # Merge base class metas. | ||||||
|  |         # Uses a special MetaDict that handles various merging rules | ||||||
|  |         meta = MetaDict() | ||||||
|  |         for base in flattened_bases[::-1]: | ||||||
|  |             # Add any mixin metadata from plain objects | ||||||
|  |             if hasattr(base, "meta"): | ||||||
|  |                 meta.merge(base.meta) | ||||||
|  |             elif hasattr(base, "_meta"): | ||||||
|  |                 meta.merge(base._meta) | ||||||
|  |  | ||||||
|  |             # Set collection in the meta if its callable | ||||||
|  |             if getattr(base, "_is_document", False) and not base._meta.get("abstract"): | ||||||
|  |                 collection = meta.get("collection", None) | ||||||
|  |                 if callable(collection): | ||||||
|  |                     meta["collection"] = collection(base) | ||||||
|  |  | ||||||
|  |         meta.merge(attrs.get("_meta", {}))  # Top level meta | ||||||
|  |  | ||||||
|  |         # Only simple classes (i.e. direct subclasses of Document) may set | ||||||
|  |         # allow_inheritance to False. If the base Document allows inheritance, | ||||||
|  |         # none of its subclasses can override allow_inheritance to False. | ||||||
|  |         simple_class = all( | ||||||
|  |             [b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")] | ||||||
|  |         ) | ||||||
|  |         if ( | ||||||
|  |             not simple_class | ||||||
|  |             and meta["allow_inheritance"] is False | ||||||
|  |             and not meta["abstract"] | ||||||
|  |         ): | ||||||
|  |             raise ValueError( | ||||||
|  |                 "Only direct subclasses of Document may set " | ||||||
|  |                 '"allow_inheritance" to False' | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Set default collection name | ||||||
|  |         if "collection" not in meta: | ||||||
|  |             meta["collection"] = ( | ||||||
|  |                 "".join("_%s" % c if c.isupper() else c for c in name) | ||||||
|  |                 .strip("_") | ||||||
|  |                 .lower() | ||||||
|  |             ) | ||||||
|  |         attrs["_meta"] = meta | ||||||
|  |  | ||||||
|  |         # Call super and get the new class | ||||||
|  |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|  |         meta = new_class._meta | ||||||
|  |  | ||||||
|  |         # Set index specifications | ||||||
|  |         meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) | ||||||
|  |  | ||||||
|  |         # If collection is a callable - call it and set the value | ||||||
|  |         collection = meta.get("collection") | ||||||
|  |         if callable(collection): | ||||||
|  |             new_class._meta["collection"] = collection(new_class) | ||||||
|  |  | ||||||
|  |         # Provide a default queryset unless exists or one has been set | ||||||
|  |         if "objects" not in dir(new_class): | ||||||
|  |             new_class.objects = QuerySetManager() | ||||||
|  |  | ||||||
|  |         # Validate the fields and set primary key if needed | ||||||
|  |         for field_name, field in new_class._fields.items(): | ||||||
|  |             if field.primary_key: | ||||||
|  |                 # Ensure only one primary key is set | ||||||
|  |                 current_pk = new_class._meta.get("id_field") | ||||||
|  |                 if current_pk and current_pk != field_name: | ||||||
|  |                     raise ValueError("Cannot override primary key field") | ||||||
|  |  | ||||||
|  |                 # Set primary key | ||||||
|  |                 if not current_pk: | ||||||
|  |                     new_class._meta["id_field"] = field_name | ||||||
|  |                     new_class.id = field | ||||||
|  |  | ||||||
|  |         # If the document doesn't explicitly define a primary key field, create | ||||||
|  |         # one. Make it an ObjectIdField and give it a non-clashing name ("id" | ||||||
|  |         # by default, but can be different if that one's taken). | ||||||
|  |         if not new_class._meta.get("id_field"): | ||||||
|  |             id_name, id_db_name = mcs.get_auto_id_names(new_class) | ||||||
|  |             new_class._meta["id_field"] = id_name | ||||||
|  |             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||||
|  |             new_class._fields[id_name].name = id_name | ||||||
|  |             new_class.id = new_class._fields[id_name] | ||||||
|  |             new_class._db_field_map[id_name] = id_db_name | ||||||
|  |             new_class._reverse_db_field_map[id_db_name] = id_name | ||||||
|  |  | ||||||
|  |             # Prepend the ID field to _fields_ordered (so that it's *always* | ||||||
|  |             # the first field). | ||||||
|  |             new_class._fields_ordered = (id_name,) + new_class._fields_ordered | ||||||
|  |  | ||||||
|  |         # Merge in exceptions with parent hierarchy. | ||||||
|  |         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||||
|  |         module = attrs.get("__module__") | ||||||
|  |         for exc in exceptions_to_merge: | ||||||
|  |             name = exc.__name__ | ||||||
|  |             parents = tuple( | ||||||
|  |                 getattr(base, name) for base in flattened_bases if hasattr(base, name) | ||||||
|  |             ) or (exc,) | ||||||
|  |  | ||||||
|  |             # Create a new exception and set it as an attribute on the new | ||||||
|  |             # class. | ||||||
|  |             exception = type(name, parents, {"__module__": module}) | ||||||
|  |             setattr(new_class, name, exception) | ||||||
|  |  | ||||||
|  |         return new_class | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def get_auto_id_names(mcs, new_class): | ||||||
|  |         """Find a name for the automatic ID field for the given new class. | ||||||
|  |  | ||||||
|  |         Return a two-element tuple where the first item is the field name (i.e. | ||||||
|  |         the attribute name on the object) and the second element is the DB | ||||||
|  |         field name (i.e. the name of the key stored in MongoDB). | ||||||
|  |  | ||||||
|  |         Defaults to ('id', '_id'), or generates a non-clashing name in the form | ||||||
|  |         of ('auto_id_X', '_auto_id_X') if the default name is already taken. | ||||||
|  |         """ | ||||||
|  |         id_name, id_db_name = ("id", "_id") | ||||||
|  |         existing_fields = {field_name for field_name in new_class._fields} | ||||||
|  |         existing_db_fields = {v.db_field for v in new_class._fields.values()} | ||||||
|  |         if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|  |             return id_name, id_db_name | ||||||
|  |  | ||||||
|  |         id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) | ||||||
|  |         for i in itertools.count(): | ||||||
|  |             id_name = "{}_{}".format(id_basename, i) | ||||||
|  |             id_db_name = "{}_{}".format(id_db_basename, i) | ||||||
|  |             if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|  |                 return id_name, id_db_name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MetaDict(dict): | ||||||
|  |     """Custom dictionary for meta classes. | ||||||
|  |     Handles the merging of set indexes | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     _merge_options = ("indexes",) | ||||||
|  |  | ||||||
|  |     def merge(self, new_options): | ||||||
|  |         for k, v in new_options.items(): | ||||||
|  |             if k in self._merge_options: | ||||||
|  |                 self[k] = self.get(k, []) + v | ||||||
|  |             else: | ||||||
|  |                 self[k] = v | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BasesTuple(tuple): | ||||||
|  |     """Special class to handle introspection of bases tuple in __new__""" | ||||||
|  |  | ||||||
|  |     pass | ||||||
							
								
								
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | import re | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LazyRegexCompiler: | ||||||
|  |     """Descriptor to allow lazy compilation of regex""" | ||||||
|  |  | ||||||
|  |     def __init__(self, pattern, flags=0): | ||||||
|  |         self._pattern = pattern | ||||||
|  |         self._flags = flags | ||||||
|  |         self._compiled_regex = None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def compiled_regex(self): | ||||||
|  |         if self._compiled_regex is None: | ||||||
|  |             self._compiled_regex = re.compile(self._pattern, self._flags) | ||||||
|  |         return self._compiled_regex | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         return self.compiled_regex | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         raise AttributeError("Can not set attribute LazyRegexCompiler") | ||||||
							
								
								
									
										64
									
								
								mongoengine/common.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								mongoengine/common.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | |||||||
|  | _class_registry_cache = {} | ||||||
|  | _field_list_cache = [] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _import_class(cls_name): | ||||||
|  |     """Cache mechanism for imports. | ||||||
|  |  | ||||||
|  |     Due to complications of circular imports mongoengine needs to do lots of | ||||||
|  |     inline imports in functions.  This is inefficient as classes are | ||||||
|  |     imported repeated throughout the mongoengine code.  This is | ||||||
|  |     compounded by some recursive functions requiring inline imports. | ||||||
|  |  | ||||||
|  |     :mod:`mongoengine.common` provides a single point to import all these | ||||||
|  |     classes.  Circular imports aren't an issue as it dynamically imports the | ||||||
|  |     class when first needed.  Subsequent calls to the | ||||||
|  |     :func:`~mongoengine.common._import_class` can then directly retrieve the | ||||||
|  |     class from the :data:`mongoengine.common._class_registry_cache`. | ||||||
|  |     """ | ||||||
|  |     if cls_name in _class_registry_cache: | ||||||
|  |         return _class_registry_cache.get(cls_name) | ||||||
|  |  | ||||||
|  |     doc_classes = ( | ||||||
|  |         "Document", | ||||||
|  |         "DynamicEmbeddedDocument", | ||||||
|  |         "EmbeddedDocument", | ||||||
|  |         "MapReduceDocument", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     # Field Classes | ||||||
|  |     if not _field_list_cache: | ||||||
|  |         from mongoengine.fields import __all__ as fields | ||||||
|  |  | ||||||
|  |         _field_list_cache.extend(fields) | ||||||
|  |         from mongoengine.base.fields import __all__ as fields | ||||||
|  |  | ||||||
|  |         _field_list_cache.extend(fields) | ||||||
|  |  | ||||||
|  |     field_classes = _field_list_cache | ||||||
|  |  | ||||||
|  |     deref_classes = ("DeReference",) | ||||||
|  |  | ||||||
|  |     if cls_name == "BaseDocument": | ||||||
|  |         from mongoengine.base import document as module | ||||||
|  |  | ||||||
|  |         import_classes = ["BaseDocument"] | ||||||
|  |     elif cls_name in doc_classes: | ||||||
|  |         from mongoengine import document as module | ||||||
|  |  | ||||||
|  |         import_classes = doc_classes | ||||||
|  |     elif cls_name in field_classes: | ||||||
|  |         from mongoengine import fields as module | ||||||
|  |  | ||||||
|  |         import_classes = field_classes | ||||||
|  |     elif cls_name in deref_classes: | ||||||
|  |         from mongoengine import dereference as module | ||||||
|  |  | ||||||
|  |         import_classes = deref_classes | ||||||
|  |     else: | ||||||
|  |         raise ValueError("No import set for: %s" % cls_name) | ||||||
|  |  | ||||||
|  |     for cls in import_classes: | ||||||
|  |         _class_registry_cache[cls] = getattr(module, cls) | ||||||
|  |  | ||||||
|  |     return _class_registry_cache.get(cls_name) | ||||||
| @@ -1,140 +1,377 @@ | |||||||
| import pymongo | from pymongo import MongoClient, ReadPreference, uri_parser | ||||||
| from pymongo import Connection, ReplicaSetConnection, uri_parser | from pymongo.database import _check_name | ||||||
|  |  | ||||||
|  | __all__ = [ | ||||||
|  |     "DEFAULT_CONNECTION_NAME", | ||||||
|  |     "DEFAULT_DATABASE_NAME", | ||||||
|  |     "ConnectionFailure", | ||||||
|  |     "connect", | ||||||
|  |     "disconnect", | ||||||
|  |     "disconnect_all", | ||||||
|  |     "get_connection", | ||||||
|  |     "get_db", | ||||||
|  |     "register_connection", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | DEFAULT_CONNECTION_NAME = "default" | ||||||
|            'DEFAULT_CONNECTION_NAME'] | DEFAULT_DATABASE_NAME = "test" | ||||||
|  | DEFAULT_HOST = "localhost" | ||||||
|  | DEFAULT_PORT = 27017 | ||||||
| DEFAULT_CONNECTION_NAME = 'default' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionError(Exception): |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_settings = {} | _connection_settings = {} | ||||||
| _connections = {} | _connections = {} | ||||||
| _dbs = {} | _dbs = {} | ||||||
|  |  | ||||||
|  | READ_PREFERENCE = ReadPreference.PRIMARY | ||||||
|  |  | ||||||
| def register_connection(alias, name, host='localhost', port=27017, |  | ||||||
|                         is_slave=False, read_preference=False, slaves=None, |  | ||||||
|                         username=None, password=None, **kwargs): |  | ||||||
|     """Add a connection. |  | ||||||
|  |  | ||||||
|     :param alias: the name that will be used to refer to this connection |  | ||||||
|         throughout MongoEngine |  | ||||||
|     :param name: the name of the specific database to use |  | ||||||
|     :param host: the host name of the :program:`mongod` instance to connect to |  | ||||||
|     :param port: the port that the :program:`mongod` instance is running on |  | ||||||
|     :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ |  | ||||||
|     :param read_preference: The read preference for the collection ** Added pymongo 2.1 |  | ||||||
|     :param slaves: a list of aliases of slave connections; each of these must |  | ||||||
|         be a registered connection that has :attr:`is_slave` set to ``True`` |  | ||||||
|     :param username: username to authenticate with |  | ||||||
|     :param password: password to authenticate with |  | ||||||
|     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver |  | ||||||
|  |  | ||||||
|  | class ConnectionFailure(Exception): | ||||||
|  |     """Error raised when the database connection can't be established or | ||||||
|  |     when a connection with a requested alias can't be retrieved. | ||||||
|     """ |     """ | ||||||
|     global _connection_settings |  | ||||||
|  |  | ||||||
|     # Handle uri style connections |     pass | ||||||
|     if "://" in host: |  | ||||||
|         uri_dict = uri_parser.parse_uri(host) |  | ||||||
|         if uri_dict.get('database') is None: |  | ||||||
|             raise ConnectionError("If using URI style connection include "\ |  | ||||||
|                                   "database name in string") |  | ||||||
|         _connection_settings[alias] = { |  | ||||||
|             'host': host, |  | ||||||
|             'name': uri_dict.get('database'), |  | ||||||
|             'username': uri_dict.get('username'), |  | ||||||
|             'password': uri_dict.get('password') |  | ||||||
|         } |  | ||||||
|         _connection_settings[alias].update(kwargs) |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     _connection_settings[alias] = { |  | ||||||
|         'name': name, | def _check_db_name(name): | ||||||
|         'host': host, |     """Check if a database name is valid. | ||||||
|         'port': port, |     This functionality is copied from pymongo Database class constructor. | ||||||
|         'is_slave': is_slave, |     """ | ||||||
|         'slaves': slaves or [], |     if not isinstance(name, str): | ||||||
|         'username': username, |         raise TypeError("name must be an instance of %s" % str) | ||||||
|         'password': password, |     elif name != "$external": | ||||||
|         'read_preference': read_preference |         _check_name(name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_connection_settings( | ||||||
|  |     db=None, | ||||||
|  |     name=None, | ||||||
|  |     host=None, | ||||||
|  |     port=None, | ||||||
|  |     read_preference=READ_PREFERENCE, | ||||||
|  |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs | ||||||
|  | ): | ||||||
|  |     """Get the connection settings as a dict | ||||||
|  |  | ||||||
|  |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|  |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|  |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|  |     : param read_preference: The read preference for the collection | ||||||
|  |     : param username: username to authenticate with | ||||||
|  |     : param password: password to authenticate with | ||||||
|  |     : param authentication_source: database to authenticate against | ||||||
|  |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|  |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|  |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|  |     : param is_mock: explicitly use mongomock for this connection | ||||||
|  |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|  |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|  |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|  |         for pymongo's `MongoClient` for a full list. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|  |     """ | ||||||
|  |     conn_settings = { | ||||||
|  |         "name": name or db or DEFAULT_DATABASE_NAME, | ||||||
|  |         "host": host or DEFAULT_HOST, | ||||||
|  |         "port": port or DEFAULT_PORT, | ||||||
|  |         "read_preference": read_preference, | ||||||
|  |         "username": username, | ||||||
|  |         "password": password, | ||||||
|  |         "authentication_source": authentication_source, | ||||||
|  |         "authentication_mechanism": authentication_mechanism, | ||||||
|     } |     } | ||||||
|     _connection_settings[alias].update(kwargs) |  | ||||||
|  |     _check_db_name(conn_settings["name"]) | ||||||
|  |     conn_host = conn_settings["host"] | ||||||
|  |  | ||||||
|  |     # Host can be a list or a string, so if string, force to a list. | ||||||
|  |     if isinstance(conn_host, str): | ||||||
|  |         conn_host = [conn_host] | ||||||
|  |  | ||||||
|  |     resolved_hosts = [] | ||||||
|  |     for entity in conn_host: | ||||||
|  |  | ||||||
|  |         # Handle Mongomock | ||||||
|  |         if entity.startswith("mongomock://"): | ||||||
|  |             conn_settings["is_mock"] = True | ||||||
|  |             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||||
|  |             new_entity = entity.replace("mongomock://", "mongodb://", 1) | ||||||
|  |             resolved_hosts.append(new_entity) | ||||||
|  |  | ||||||
|  |             uri_dict = uri_parser.parse_uri(new_entity) | ||||||
|  |  | ||||||
|  |             database = uri_dict.get("database") | ||||||
|  |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|  |         # Handle URI style connections, only updating connection params which | ||||||
|  |         # were explicitly specified in the URI. | ||||||
|  |         elif "://" in entity: | ||||||
|  |             uri_dict = uri_parser.parse_uri(entity) | ||||||
|  |             resolved_hosts.append(entity) | ||||||
|  |  | ||||||
|  |             database = uri_dict.get("database") | ||||||
|  |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|  |             for param in ("read_preference", "username", "password"): | ||||||
|  |                 if uri_dict.get(param): | ||||||
|  |                     conn_settings[param] = uri_dict[param] | ||||||
|  |  | ||||||
|  |             uri_options = uri_dict["options"] | ||||||
|  |             if "replicaset" in uri_options: | ||||||
|  |                 conn_settings["replicaSet"] = uri_options["replicaset"] | ||||||
|  |             if "authsource" in uri_options: | ||||||
|  |                 conn_settings["authentication_source"] = uri_options["authsource"] | ||||||
|  |             if "authmechanism" in uri_options: | ||||||
|  |                 conn_settings["authentication_mechanism"] = uri_options["authmechanism"] | ||||||
|  |             if "readpreference" in uri_options: | ||||||
|  |                 read_preferences = ( | ||||||
|  |                     ReadPreference.NEAREST, | ||||||
|  |                     ReadPreference.PRIMARY, | ||||||
|  |                     ReadPreference.PRIMARY_PREFERRED, | ||||||
|  |                     ReadPreference.SECONDARY, | ||||||
|  |                     ReadPreference.SECONDARY_PREFERRED, | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |                 # Starting with PyMongo v3.5, the "readpreference" option is | ||||||
|  |                 # returned as a string (e.g. "secondaryPreferred") and not an | ||||||
|  |                 # int (e.g. 3). | ||||||
|  |                 # TODO simplify the code below once we drop support for | ||||||
|  |                 # PyMongo v3.4. | ||||||
|  |                 read_pf_mode = uri_options["readpreference"] | ||||||
|  |                 if isinstance(read_pf_mode, str): | ||||||
|  |                     read_pf_mode = read_pf_mode.lower() | ||||||
|  |                 for preference in read_preferences: | ||||||
|  |                     if ( | ||||||
|  |                         preference.name.lower() == read_pf_mode | ||||||
|  |                         or preference.mode == read_pf_mode | ||||||
|  |                     ): | ||||||
|  |                         conn_settings["read_preference"] = preference | ||||||
|  |                         break | ||||||
|  |         else: | ||||||
|  |             resolved_hosts.append(entity) | ||||||
|  |     conn_settings["host"] = resolved_hosts | ||||||
|  |  | ||||||
|  |     # Deprecated parameters that should not be passed on | ||||||
|  |     kwargs.pop("slaves", None) | ||||||
|  |     kwargs.pop("is_slave", None) | ||||||
|  |  | ||||||
|  |     conn_settings.update(kwargs) | ||||||
|  |     return conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def register_connection( | ||||||
|  |     alias, | ||||||
|  |     db=None, | ||||||
|  |     name=None, | ||||||
|  |     host=None, | ||||||
|  |     port=None, | ||||||
|  |     read_preference=READ_PREFERENCE, | ||||||
|  |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs | ||||||
|  | ): | ||||||
|  |     """Register the connection settings. | ||||||
|  |  | ||||||
|  |     : param alias: the name that will be used to refer to this connection | ||||||
|  |         throughout MongoEngine | ||||||
|  |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|  |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|  |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|  |     : param read_preference: The read preference for the collection | ||||||
|  |     : param username: username to authenticate with | ||||||
|  |     : param password: password to authenticate with | ||||||
|  |     : param authentication_source: database to authenticate against | ||||||
|  |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|  |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|  |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|  |     : param is_mock: explicitly use mongomock for this connection | ||||||
|  |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|  |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|  |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|  |         for pymongo's `MongoClient` for a full list. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|  |     """ | ||||||
|  |     conn_settings = _get_connection_settings( | ||||||
|  |         db=db, | ||||||
|  |         name=name, | ||||||
|  |         host=host, | ||||||
|  |         port=port, | ||||||
|  |         read_preference=read_preference, | ||||||
|  |         username=username, | ||||||
|  |         password=password, | ||||||
|  |         authentication_source=authentication_source, | ||||||
|  |         authentication_mechanism=authentication_mechanism, | ||||||
|  |         **kwargs | ||||||
|  |     ) | ||||||
|  |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|     global _connections |     """Close the connection with a given alias.""" | ||||||
|     global _dbs |     from mongoengine.base.common import _get_documents_by_db | ||||||
|  |     from mongoengine import Document | ||||||
|  |  | ||||||
|     if alias in _connections: |     if alias in _connections: | ||||||
|         get_connection(alias=alias).disconnect() |         get_connection(alias=alias).close() | ||||||
|         del _connections[alias] |         del _connections[alias] | ||||||
|  |  | ||||||
|     if alias in _dbs: |     if alias in _dbs: | ||||||
|  |         # Detach all cached collections in Documents | ||||||
|  |         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): | ||||||
|  |             if issubclass(doc_cls, Document):  # Skip EmbeddedDocument | ||||||
|  |                 doc_cls._disconnect() | ||||||
|  |  | ||||||
|         del _dbs[alias] |         del _dbs[alias] | ||||||
|  |  | ||||||
|  |     if alias in _connection_settings: | ||||||
|  |         del _connection_settings[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def disconnect_all(): | ||||||
|  |     """Close all registered database.""" | ||||||
|  |     for alias in list(_connections.keys()): | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     global _connections |     """Return a connection with a given alias.""" | ||||||
|  |  | ||||||
|     # Connect to the database if not already connected |     # Connect to the database if not already connected | ||||||
|     if reconnect: |     if reconnect: | ||||||
|         disconnect(alias) |         disconnect(alias) | ||||||
|  |  | ||||||
|     if alias not in _connections: |     # If the requested alias already exists in the _connections list, return | ||||||
|  |     # it immediately. | ||||||
|  |     if alias in _connections: | ||||||
|  |         return _connections[alias] | ||||||
|  |  | ||||||
|  |     # Validate that the requested alias exists in the _connection_settings. | ||||||
|  |     # Raise ConnectionFailure if it doesn't. | ||||||
|     if alias not in _connection_settings: |     if alias not in _connection_settings: | ||||||
|             msg = 'Connection with alias "%s" has not been defined' |  | ||||||
|         if alias == DEFAULT_CONNECTION_NAME: |         if alias == DEFAULT_CONNECTION_NAME: | ||||||
|                 msg = 'You have not defined a default connection' |             msg = "You have not defined a default connection" | ||||||
|             raise ConnectionError(msg) |  | ||||||
|         conn_settings = _connection_settings[alias].copy() |  | ||||||
|  |  | ||||||
|         if hasattr(pymongo, 'version_tuple'):  # Support for 2.1+ |  | ||||||
|             conn_settings.pop('name', None) |  | ||||||
|             conn_settings.pop('slaves', None) |  | ||||||
|             conn_settings.pop('is_slave', None) |  | ||||||
|             conn_settings.pop('username', None) |  | ||||||
|             conn_settings.pop('password', None) |  | ||||||
|         else: |         else: | ||||||
|             # Get all the slave connections |             msg = 'Connection with alias "%s" has not been defined' % alias | ||||||
|             if 'slaves' in conn_settings: |         raise ConnectionFailure(msg) | ||||||
|                 slaves = [] |  | ||||||
|                 for slave_alias in conn_settings['slaves']: |  | ||||||
|                     slaves.append(get_connection(slave_alias)) |  | ||||||
|                 conn_settings['slaves'] = slaves |  | ||||||
|                 conn_settings.pop('read_preference') |  | ||||||
|  |  | ||||||
|         connection_class = Connection |     def _clean_settings(settings_dict): | ||||||
|         if 'replicaSet' in conn_settings: |         irrelevant_fields_set = { | ||||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) |             "name", | ||||||
|             connection_class = ReplicaSetConnection |             "username", | ||||||
|  |             "password", | ||||||
|  |             "authentication_source", | ||||||
|  |             "authentication_mechanism", | ||||||
|  |         } | ||||||
|  |         return { | ||||||
|  |             k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     raw_conn_settings = _connection_settings[alias].copy() | ||||||
|  |  | ||||||
|  |     # Retrieve a copy of the connection settings associated with the requested | ||||||
|  |     # alias and remove the database name and authentication info (we don't | ||||||
|  |     # care about them at this point). | ||||||
|  |     conn_settings = _clean_settings(raw_conn_settings) | ||||||
|  |  | ||||||
|  |     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||||
|  |     is_mock = conn_settings.pop("is_mock", False) | ||||||
|  |     if is_mock: | ||||||
|         try: |         try: | ||||||
|             _connections[alias] = connection_class(**conn_settings) |             import mongomock | ||||||
|         except Exception, e: |         except ImportError: | ||||||
|             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) |             raise RuntimeError("You need mongomock installed to mock MongoEngine.") | ||||||
|  |         connection_class = mongomock.MongoClient | ||||||
|  |     else: | ||||||
|  |         connection_class = MongoClient | ||||||
|  |  | ||||||
|  |     # Re-use existing connection if one is suitable. | ||||||
|  |     existing_connection = _find_existing_connection(raw_conn_settings) | ||||||
|  |     if existing_connection: | ||||||
|  |         connection = existing_connection | ||||||
|  |     else: | ||||||
|  |         connection = _create_connection( | ||||||
|  |             alias=alias, connection_class=connection_class, **conn_settings | ||||||
|  |         ) | ||||||
|  |     _connections[alias] = connection | ||||||
|     return _connections[alias] |     return _connections[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _create_connection(alias, connection_class, **connection_settings): | ||||||
|  |     """ | ||||||
|  |     Create the new connection for this alias. Raise | ||||||
|  |     ConnectionFailure if it can't be established. | ||||||
|  |     """ | ||||||
|  |     try: | ||||||
|  |         return connection_class(**connection_settings) | ||||||
|  |     except Exception as e: | ||||||
|  |         raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _find_existing_connection(connection_settings): | ||||||
|  |     """ | ||||||
|  |     Check if an existing connection could be reused | ||||||
|  |  | ||||||
|  |     Iterate over all of the connection settings and if an existing connection | ||||||
|  |     with the same parameters is suitable, return it | ||||||
|  |  | ||||||
|  |     :param connection_settings: the settings of the new connection | ||||||
|  |     :return: An existing connection or None | ||||||
|  |     """ | ||||||
|  |     connection_settings_bis = ( | ||||||
|  |         (db_alias, settings.copy()) | ||||||
|  |         for db_alias, settings in _connection_settings.items() | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def _clean_settings(settings_dict): | ||||||
|  |         # Only remove the name but it's important to | ||||||
|  |         # keep the username/password/authentication_source/authentication_mechanism | ||||||
|  |         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) | ||||||
|  |         return {k: v for k, v in settings_dict.items() if k != "name"} | ||||||
|  |  | ||||||
|  |     cleaned_conn_settings = _clean_settings(connection_settings) | ||||||
|  |     for db_alias, connection_settings in connection_settings_bis: | ||||||
|  |         db_conn_settings = _clean_settings(connection_settings) | ||||||
|  |         if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias): | ||||||
|  |             return _connections[db_alias] | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     global _dbs |  | ||||||
|     if reconnect: |     if reconnect: | ||||||
|         disconnect(alias) |         disconnect(alias) | ||||||
|  |  | ||||||
|     if alias not in _dbs: |     if alias not in _dbs: | ||||||
|         conn = get_connection(alias) |         conn = get_connection(alias) | ||||||
|         conn_settings = _connection_settings[alias] |         conn_settings = _connection_settings[alias] | ||||||
|         _dbs[alias] = conn[conn_settings['name']] |         db = conn[conn_settings["name"]] | ||||||
|  |         auth_kwargs = {"source": conn_settings["authentication_source"]} | ||||||
|  |         if conn_settings["authentication_mechanism"] is not None: | ||||||
|  |             auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] | ||||||
|         # Authenticate if necessary |         # Authenticate if necessary | ||||||
|         if conn_settings['username'] and conn_settings['password']: |         if conn_settings["username"] and ( | ||||||
|             _dbs[alias].authenticate(conn_settings['username'], |             conn_settings["password"] | ||||||
|                                      conn_settings['password']) |             or conn_settings["authentication_mechanism"] == "MONGODB-X509" | ||||||
|  |         ): | ||||||
|  |             db.authenticate( | ||||||
|  |                 conn_settings["username"], conn_settings["password"], **auth_kwargs | ||||||
|  |             ) | ||||||
|  |         _dbs[alias] = db | ||||||
|     return _dbs[alias] |     return _dbs[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
| def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||||
|     """Connect to the database specified by the 'db' argument. |     """Connect to the database specified by the 'db' argument. | ||||||
|  |  | ||||||
|     Connection settings may be provided here as well if the database is not |     Connection settings may be provided here as well if the database is not | ||||||
| @@ -142,16 +379,32 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | |||||||
|     provide username and password arguments as well. |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|     Multiple databases are supported by using aliases. Provide a separate |     Multiple databases are supported by using aliases. Provide a separate | ||||||
|     `alias` to connect to a different instance of :program:`mongod`. |     `alias` to connect to a different instance of: program: `mongod`. | ||||||
|  |  | ||||||
|  |     In order to replace a connection identified by a given alias, you'll | ||||||
|  |     need to call ``disconnect`` first | ||||||
|  |  | ||||||
|  |     See the docstring for `register_connection` for more details about all | ||||||
|  |     supported kwargs. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.6 - added multiple database support. |     .. versionchanged:: 0.6 - added multiple database support. | ||||||
|     """ |     """ | ||||||
|     global _connections |     if alias in _connections: | ||||||
|     if alias not in _connections: |         prev_conn_setting = _connection_settings[alias] | ||||||
|  |         new_conn_settings = _get_connection_settings(db, **kwargs) | ||||||
|  |  | ||||||
|  |         if new_conn_settings != prev_conn_setting: | ||||||
|  |             err_msg = ( | ||||||
|  |                 "A different connection with alias `{}` was already " | ||||||
|  |                 "registered. Use disconnect() first" | ||||||
|  |             ).format(alias) | ||||||
|  |             raise ConnectionFailure(err_msg) | ||||||
|  |     else: | ||||||
|         register_connection(alias, db, **kwargs) |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|     return get_connection(alias) |     return get_connection(alias) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Support old naming convention | # Support old naming convention | ||||||
| _get_connection = get_connection | _get_connection = get_connection | ||||||
| _get_db = get_db | _get_db = get_db | ||||||
|   | |||||||
							
								
								
									
										278
									
								
								mongoengine/context_managers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										278
									
								
								mongoengine/context_managers.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,278 @@ | |||||||
|  | from contextlib import contextmanager | ||||||
|  |  | ||||||
|  | from pymongo.read_concern import ReadConcern | ||||||
|  | from pymongo.write_concern import WriteConcern | ||||||
|  |  | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
|  | from mongoengine.pymongo_support import count_documents | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     "switch_db", | ||||||
|  |     "switch_collection", | ||||||
|  |     "no_dereference", | ||||||
|  |     "no_sub_classes", | ||||||
|  |     "query_counter", | ||||||
|  |     "set_write_concern", | ||||||
|  |     "set_read_write_concern", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class switch_db: | ||||||
|  |     """switch_db alias context manager. | ||||||
|  |  | ||||||
|  |     Example :: | ||||||
|  |  | ||||||
|  |         # Register connections | ||||||
|  |         register_connection('default', 'mongoenginetest') | ||||||
|  |         register_connection('testdb-1', 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|  |         with switch_db(Group, 'testdb-1') as Group: | ||||||
|  |             Group(name='hello testdb!').save()  # Saves in testdb-1 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, cls, db_alias): | ||||||
|  |         """Construct the switch_db context manager | ||||||
|  |  | ||||||
|  |         :param cls: the class to change the registered db | ||||||
|  |         :param db_alias: the name of the specific database to use | ||||||
|  |         """ | ||||||
|  |         self.cls = cls | ||||||
|  |         self.collection = cls._get_collection() | ||||||
|  |         self.db_alias = db_alias | ||||||
|  |         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         """Change the db_alias and clear the cached collection.""" | ||||||
|  |         self.cls._meta["db_alias"] = self.db_alias | ||||||
|  |         self.cls._collection = None | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         """Reset the db_alias and collection.""" | ||||||
|  |         self.cls._meta["db_alias"] = self.ori_db_alias | ||||||
|  |         self.cls._collection = self.collection | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class switch_collection: | ||||||
|  |     """switch_collection alias context manager. | ||||||
|  |  | ||||||
|  |     Example :: | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|  |         with switch_collection(Group, 'group1') as Group: | ||||||
|  |             Group(name='hello testdb!').save()  # Saves in group1 collection | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, cls, collection_name): | ||||||
|  |         """Construct the switch_collection context manager. | ||||||
|  |  | ||||||
|  |         :param cls: the class to change the registered db | ||||||
|  |         :param collection_name: the name of the collection to use | ||||||
|  |         """ | ||||||
|  |         self.cls = cls | ||||||
|  |         self.ori_collection = cls._get_collection() | ||||||
|  |         self.ori_get_collection_name = cls._get_collection_name | ||||||
|  |         self.collection_name = collection_name | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         """Change the _get_collection_name and clear the cached collection.""" | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def _get_collection_name(cls): | ||||||
|  |             return self.collection_name | ||||||
|  |  | ||||||
|  |         self.cls._get_collection_name = _get_collection_name | ||||||
|  |         self.cls._collection = None | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         """Reset the collection.""" | ||||||
|  |         self.cls._collection = self.ori_collection | ||||||
|  |         self.cls._get_collection_name = self.ori_get_collection_name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class no_dereference: | ||||||
|  |     """no_dereference context manager. | ||||||
|  |  | ||||||
|  |     Turns off all dereferencing in Documents for the duration of the context | ||||||
|  |     manager:: | ||||||
|  |  | ||||||
|  |         with no_dereference(Group) as Group: | ||||||
|  |             Group.objects.find() | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, cls): | ||||||
|  |         """Construct the no_dereference context manager. | ||||||
|  |  | ||||||
|  |         :param cls: the class to turn dereferencing off on | ||||||
|  |         """ | ||||||
|  |         self.cls = cls | ||||||
|  |  | ||||||
|  |         ReferenceField = _import_class("ReferenceField") | ||||||
|  |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |         ComplexBaseField = _import_class("ComplexBaseField") | ||||||
|  |  | ||||||
|  |         self.deref_fields = [ | ||||||
|  |             k | ||||||
|  |             for k, v in self.cls._fields.items() | ||||||
|  |             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         """Change the objects default and _auto_dereference values.""" | ||||||
|  |         for field in self.deref_fields: | ||||||
|  |             self.cls._fields[field]._auto_dereference = False | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         """Reset the default and _auto_dereference values.""" | ||||||
|  |         for field in self.deref_fields: | ||||||
|  |             self.cls._fields[field]._auto_dereference = True | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class no_sub_classes: | ||||||
|  |     """no_sub_classes context manager. | ||||||
|  |  | ||||||
|  |     Only returns instances of this class and no sub (inherited) classes:: | ||||||
|  |  | ||||||
|  |         with no_sub_classes(Group) as Group: | ||||||
|  |             Group.objects.find() | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, cls): | ||||||
|  |         """Construct the no_sub_classes context manager. | ||||||
|  |  | ||||||
|  |         :param cls: the class to turn querying sub classes on | ||||||
|  |         """ | ||||||
|  |         self.cls = cls | ||||||
|  |         self.cls_initial_subclasses = None | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         """Change the objects default and _auto_dereference values.""" | ||||||
|  |         self.cls_initial_subclasses = self.cls._subclasses | ||||||
|  |         self.cls._subclasses = (self.cls._class_name,) | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         """Reset the default and _auto_dereference values.""" | ||||||
|  |         self.cls._subclasses = self.cls_initial_subclasses | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class query_counter: | ||||||
|  |     """Query_counter context manager to get the number of queries. | ||||||
|  |     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||||
|  |     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||||
|  |  | ||||||
|  |     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes | ||||||
|  |     can interfere with it | ||||||
|  |  | ||||||
|  |     Be aware that: | ||||||
|  |     - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of | ||||||
|  |         documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) | ||||||
|  |     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, alias=DEFAULT_CONNECTION_NAME): | ||||||
|  |         """Construct the query_counter | ||||||
|  |         """ | ||||||
|  |         self.db = get_db(alias=alias) | ||||||
|  |         self.initial_profiling_level = None | ||||||
|  |         self._ctx_query_counter = 0  # number of queries issued by the context | ||||||
|  |  | ||||||
|  |         self._ignored_query = { | ||||||
|  |             "ns": {"$ne": "%s.system.indexes" % self.db.name}, | ||||||
|  |             "op": {"$ne": "killcursors"},  # MONGODB < 3.2 | ||||||
|  |             "command.killCursors": {"$exists": False},  # MONGODB >= 3.2 | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def _turn_on_profiling(self): | ||||||
|  |         self.initial_profiling_level = self.db.profiling_level() | ||||||
|  |         self.db.set_profiling_level(0) | ||||||
|  |         self.db.system.profile.drop() | ||||||
|  |         self.db.set_profiling_level(2) | ||||||
|  |  | ||||||
|  |     def _resets_profiling(self): | ||||||
|  |         self.db.set_profiling_level(self.initial_profiling_level) | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         self._turn_on_profiling() | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __exit__(self, t, value, traceback): | ||||||
|  |         self._resets_profiling() | ||||||
|  |  | ||||||
|  |     def __eq__(self, value): | ||||||
|  |         counter = self._get_count() | ||||||
|  |         return value == counter | ||||||
|  |  | ||||||
|  |     def __ne__(self, value): | ||||||
|  |         return not self.__eq__(value) | ||||||
|  |  | ||||||
|  |     def __lt__(self, value): | ||||||
|  |         return self._get_count() < value | ||||||
|  |  | ||||||
|  |     def __le__(self, value): | ||||||
|  |         return self._get_count() <= value | ||||||
|  |  | ||||||
|  |     def __gt__(self, value): | ||||||
|  |         return self._get_count() > value | ||||||
|  |  | ||||||
|  |     def __ge__(self, value): | ||||||
|  |         return self._get_count() >= value | ||||||
|  |  | ||||||
|  |     def __int__(self): | ||||||
|  |         return self._get_count() | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         """repr query_counter as the number of queries.""" | ||||||
|  |         return "%s" % self._get_count() | ||||||
|  |  | ||||||
|  |     def _get_count(self): | ||||||
|  |         """Get the number of queries by counting the current number of entries in db.system.profile | ||||||
|  |         and substracting the queries issued by this context. In fact everytime this is called, 1 query is | ||||||
|  |         issued so we need to balance that | ||||||
|  |         """ | ||||||
|  |         count = ( | ||||||
|  |             count_documents(self.db.system.profile, self._ignored_query) | ||||||
|  |             - self._ctx_query_counter | ||||||
|  |         ) | ||||||
|  |         self._ctx_query_counter += ( | ||||||
|  |             1  # Account for the query we just issued to gather the information | ||||||
|  |         ) | ||||||
|  |         return count | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @contextmanager | ||||||
|  | def set_write_concern(collection, write_concerns): | ||||||
|  |     combined_concerns = dict(collection.write_concern.document.items()) | ||||||
|  |     combined_concerns.update(write_concerns) | ||||||
|  |     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @contextmanager | ||||||
|  | def set_read_write_concern(collection, write_concerns, read_concerns): | ||||||
|  |     combined_write_concerns = dict(collection.write_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if write_concerns is not None: | ||||||
|  |         combined_write_concerns.update(write_concerns) | ||||||
|  |  | ||||||
|  |     combined_read_concerns = dict(collection.read_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if read_concerns is not None: | ||||||
|  |         combined_read_concerns.update(read_concerns) | ||||||
|  |  | ||||||
|  |     yield collection.with_options( | ||||||
|  |         write_concern=WriteConcern(**combined_write_concerns), | ||||||
|  |         read_concern=ReadConcern(**combined_read_concerns), | ||||||
|  |     ) | ||||||
| @@ -1,18 +1,24 @@ | |||||||
| from bson import DBRef, SON | from bson import DBRef, SON | ||||||
|  |  | ||||||
| from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) | from mongoengine.base import ( | ||||||
| from fields import (ReferenceField, ListField, DictField, MapField) |     BaseDict, | ||||||
| from connection import get_db |     BaseList, | ||||||
| from queryset import QuerySet |     EmbeddedDocumentList, | ||||||
| from document import Document |     TopLevelDocumentMetaclass, | ||||||
|  |     get_document, | ||||||
|  | ) | ||||||
|  | from mongoengine.base.datastructures import LazyReference | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  | from mongoengine.document import Document, EmbeddedDocument | ||||||
|  | from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||||
|  | from mongoengine.queryset import QuerySet | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeReference(object): | class DeReference: | ||||||
|  |  | ||||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): |     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||||
|         """ |         """ | ||||||
|         Cheaply dereferences the items to a set depth. |         Cheaply dereferences the items to a set depth. | ||||||
|         Also handles the convertion of complex data types. |         Also handles the conversion of complex data types. | ||||||
|  |  | ||||||
|         :param items: The iterable (dict, list, queryset) to be dereferenced. |         :param items: The iterable (dict, list, queryset) to be dereferenced. | ||||||
|         :param max_depth: The maximum depth to recurse to |         :param max_depth: The maximum depth to recurse to | ||||||
| @@ -22,7 +28,7 @@ class DeReference(object): | |||||||
|             :class:`~mongoengine.base.ComplexBaseField` |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|         :param get: A boolean determining if being called by __get__ |         :param get: A boolean determining if being called by __get__ | ||||||
|         """ |         """ | ||||||
|         if items is None or isinstance(items, basestring): |         if items is None or isinstance(items, str): | ||||||
|             return items |             return items | ||||||
|  |  | ||||||
|         # cheapest way to convert a queryset to a list |         # cheapest way to convert a queryset to a list | ||||||
| @@ -31,15 +37,61 @@ class DeReference(object): | |||||||
|             items = [i for i in items] |             items = [i for i in items] | ||||||
|  |  | ||||||
|         self.max_depth = max_depth |         self.max_depth = max_depth | ||||||
|  |  | ||||||
|         doc_type = None |         doc_type = None | ||||||
|         if instance and instance._fields: |  | ||||||
|             doc_type = instance._fields[name].field |         if instance and isinstance( | ||||||
|  |             instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) | ||||||
|  |         ): | ||||||
|  |             doc_type = instance._fields.get(name) | ||||||
|  |             while hasattr(doc_type, "field"): | ||||||
|  |                 doc_type = doc_type.field | ||||||
|  |  | ||||||
|             if isinstance(doc_type, ReferenceField): |             if isinstance(doc_type, ReferenceField): | ||||||
|  |                 field = doc_type | ||||||
|                 doc_type = doc_type.document_type |                 doc_type = doc_type.document_type | ||||||
|                 if all([i.__class__ == doc_type for i in items]): |                 is_list = not hasattr(items, "items") | ||||||
|  |  | ||||||
|  |                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||||
|                     return items |                     return items | ||||||
|  |                 elif not is_list and all( | ||||||
|  |                     [i.__class__ == doc_type for i in items.values()] | ||||||
|  |                 ): | ||||||
|  |                     return items | ||||||
|  |                 elif not field.dbref: | ||||||
|  |                     # We must turn the ObjectIds into DBRefs | ||||||
|  |  | ||||||
|  |                     # Recursively dig into the sub items of a list/dict | ||||||
|  |                     # to turn the ObjectIds into DBRefs | ||||||
|  |                     def _get_items_from_list(items): | ||||||
|  |                         new_items = [] | ||||||
|  |                         for v in items: | ||||||
|  |                             value = v | ||||||
|  |                             if isinstance(v, dict): | ||||||
|  |                                 value = _get_items_from_dict(v) | ||||||
|  |                             elif isinstance(v, list): | ||||||
|  |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items.append(value) | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|  |                     def _get_items_from_dict(items): | ||||||
|  |                         new_items = {} | ||||||
|  |                         for k, v in items.items(): | ||||||
|  |                             value = v | ||||||
|  |                             if isinstance(v, list): | ||||||
|  |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif isinstance(v, dict): | ||||||
|  |                                 value = _get_items_from_dict(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items[k] = value | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|  |                     if not hasattr(items, "items"): | ||||||
|  |                         items = _get_items_from_list(items) | ||||||
|  |                     else: | ||||||
|  |                         items = _get_items_from_dict(items) | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
| @@ -57,36 +109,50 @@ class DeReference(object): | |||||||
|             return reference_map |             return reference_map | ||||||
|  |  | ||||||
|         # Determine the iterator to use |         # Determine the iterator to use | ||||||
|         if not hasattr(items, 'items'): |         if isinstance(items, dict): | ||||||
|             iterator = enumerate(items) |             iterator = items.values() | ||||||
|         else: |         else: | ||||||
|             iterator = items.iteritems() |             iterator = items | ||||||
|  |  | ||||||
|         # Recursively find dbreferences |         # Recursively find dbreferences | ||||||
|         depth += 1 |         depth += 1 | ||||||
|         for k, item in iterator: |         for item in iterator: | ||||||
|             if hasattr(item, '_fields'): |             if isinstance(item, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in item._fields.iteritems(): |                 for field_name, field in item._fields.items(): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, (DBRef)): |                     if isinstance(v, LazyReference): | ||||||
|                         reference_map.setdefault(field.document_type, []).append(v.id) |                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                         continue | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) |                     elif isinstance(v, DBRef): | ||||||
|  |                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||||
|  |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|  |                         reference_map.setdefault(get_document(v["_cls"]), set()).add( | ||||||
|  |                             v["_ref"].id | ||||||
|  |                         ) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr( | ||||||
|  |                             getattr(field, "field", None), "document_type", None | ||||||
|  |                         ) | ||||||
|                         references = self._find_references(v, depth) |                         references = self._find_references(v, depth) | ||||||
|                         for key, refs in references.iteritems(): |                         for key, refs in references.items(): | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance( | ||||||
|  |                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||||
|  |                             ): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, []).extend(refs) |                             reference_map.setdefault(key, set()).update(refs) | ||||||
|             elif isinstance(item, (DBRef)): |             elif isinstance(item, LazyReference): | ||||||
|                 reference_map.setdefault(item.collection, []).append(item.id) |                 # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: |                 continue | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) |             elif isinstance(item, DBRef): | ||||||
|  |                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||||
|  |             elif isinstance(item, (dict, SON)) and "_ref" in item: | ||||||
|  |                 reference_map.setdefault(get_document(item["_cls"]), set()).add( | ||||||
|  |                     item["_ref"].id | ||||||
|  |                 ) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth - 1) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in references.iteritems(): |                 for key, refs in references.items(): | ||||||
|                     reference_map.setdefault(key, []).extend(refs) |                     reference_map.setdefault(key, set()).update(refs) | ||||||
|  |  | ||||||
|         return reference_map |         return reference_map | ||||||
|  |  | ||||||
| @@ -94,27 +160,47 @@ class DeReference(object): | |||||||
|         """Fetch all references and convert to their document objects |         """Fetch all references and convert to their document objects | ||||||
|         """ |         """ | ||||||
|         object_map = {} |         object_map = {} | ||||||
|         for col, dbrefs in self.reference_map.iteritems(): |         for collection, dbrefs in self.reference_map.items(): | ||||||
|             keys = object_map.keys() |  | ||||||
|             refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) |             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||||
|             if hasattr(col, 'objects'):  # We have a document class for the refs |             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||||
|                 references = col.objects.in_bulk(refs) |             ref_document_cls_exists = getattr(collection, "objects", None) is not None | ||||||
|                 for key, doc in references.iteritems(): |  | ||||||
|                     object_map[key] = doc |             if ref_document_cls_exists: | ||||||
|  |                 col_name = collection._get_collection_name() | ||||||
|  |                 refs = [ | ||||||
|  |                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|  |                 references = collection.objects.in_bulk(refs) | ||||||
|  |                 for key, doc in references.items(): | ||||||
|  |                     object_map[(col_name, key)] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): |                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||||
|                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) |                     continue | ||||||
|  |  | ||||||
|  |                 refs = [ | ||||||
|  |                     dbref for dbref in dbrefs if (collection, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|  |  | ||||||
|  |                 if doc_type: | ||||||
|  |                     references = doc_type._get_db()[collection].find( | ||||||
|  |                         {"_id": {"$in": refs}} | ||||||
|  |                     ) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         doc = doc_type._from_son(ref) |                         doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
|                 else: |                 else: | ||||||
|                     references = get_db()[col].find({'_id': {'$in': refs}}) |                     references = get_db()[collection].find({"_id": {"$in": refs}}) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         if '_cls' in ref: |                         if "_cls" in ref: | ||||||
|                             doc = get_document(ref["_cls"])._from_son(ref) |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|  |                         elif doc_type is None: | ||||||
|  |                             doc = get_document( | ||||||
|  |                                 "".join(x.capitalize() for x in collection.split("_")) | ||||||
|  |                             )._from_son(ref) | ||||||
|                         else: |                         else: | ||||||
|                             doc = doc_type._from_son(ref) |                             doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
|         return object_map |         return object_map | ||||||
|  |  | ||||||
|     def _attach_objects(self, items, depth=0, instance=None, name=None): |     def _attach_objects(self, items, depth=0, instance=None, name=None): | ||||||
| @@ -139,20 +225,30 @@ class DeReference(object): | |||||||
|                     return BaseList(items, instance, name) |                     return BaseList(items, instance, name) | ||||||
|  |  | ||||||
|         if isinstance(items, (dict, SON)): |         if isinstance(items, (dict, SON)): | ||||||
|             if '_ref' in items: |             if "_ref" in items: | ||||||
|                 return self.object_map.get(items['_ref'].id, items) |                 return self.object_map.get( | ||||||
|             elif '_types' in items and '_cls' in items: |                     (items["_ref"].collection, items["_ref"].id), items | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |                 ) | ||||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, name) |             elif "_cls" in items: | ||||||
|  |                 doc = get_document(items["_cls"])._from_son(items) | ||||||
|  |                 _cls = doc._data.pop("_cls", None) | ||||||
|  |                 del items["_cls"] | ||||||
|  |                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||||
|  |                 if _cls is not None: | ||||||
|  |                     doc._data["_cls"] = _cls | ||||||
|                 return doc |                 return doc | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |         if not hasattr(items, "items"): | ||||||
|             is_list = True |             is_list = True | ||||||
|  |             list_type = BaseList | ||||||
|  |             if isinstance(items, EmbeddedDocumentList): | ||||||
|  |                 list_type = EmbeddedDocumentList | ||||||
|  |             as_tuple = isinstance(items, tuple) | ||||||
|             iterator = enumerate(items) |             iterator = enumerate(items) | ||||||
|             data = [] |             data = [] | ||||||
|         else: |         else: | ||||||
|             is_list = False |             is_list = False | ||||||
|             iterator = items.iteritems() |             iterator = items.items() | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|         depth += 1 |         depth += 1 | ||||||
| @@ -162,27 +258,35 @@ class DeReference(object): | |||||||
|             else: |             else: | ||||||
|                 data[k] = v |                 data[k] = v | ||||||
|  |  | ||||||
|             if k in self.object_map: |             if k in self.object_map and not is_list: | ||||||
|                 data[k] = self.object_map[k] |                 data[k] = self.object_map[k] | ||||||
|             elif hasattr(v, '_fields'): |             elif isinstance(v, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in v._fields.iteritems(): |                 for field_name in v._fields: | ||||||
|                     v = data[k]._data.get(field_name, None) |                     v = data[k]._data.get(field_name, None) | ||||||
|                     if isinstance(v, (DBRef)): |                     if isinstance(v, DBRef): | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v.id, v) |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                             (v.collection, v.id), v | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) |                         ) | ||||||
|                     elif isinstance(v, dict) and depth <= self.max_depth: |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: |                             (v["_ref"].collection, v["_ref"].id), v | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |                         ) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) |                         item_name = "{}.{}.{}".format(name, k, field_name) | ||||||
|             elif hasattr(v, 'id'): |                         data[k]._data[field_name] = self._attach_objects( | ||||||
|                 data[k] = self.object_map.get(v.id, v) |                             v, depth, instance=instance, name=item_name | ||||||
|  |                         ) | ||||||
|  |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|  |                 item_name = "{}.{}".format(name, k) if name else name | ||||||
|  |                 data[k] = self._attach_objects( | ||||||
|  |                     v, depth - 1, instance=instance, name=item_name | ||||||
|  |                 ) | ||||||
|  |             elif isinstance(v, DBRef) and hasattr(v, "id"): | ||||||
|  |                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||||
|  |  | ||||||
|         if instance and name: |         if instance and name: | ||||||
|             if is_list: |             if is_list: | ||||||
|                 return BaseList(data, instance, name) |                 return tuple(data) if as_tuple else list_type(data, instance, name) | ||||||
|             return BaseDict(data, instance, name) |             return BaseDict(data, instance, name) | ||||||
|         depth += 1 |         depth += 1 | ||||||
|         return data |         return data | ||||||
|   | |||||||
| @@ -1,147 +0,0 @@ | |||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from django.utils.hashcompat import md5_constructor, sha_constructor |  | ||||||
| from django.utils.encoding import smart_str |  | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from django.utils.translation import ugettext_lazy as _ |  | ||||||
|  |  | ||||||
| import datetime |  | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' |  | ||||||
|  |  | ||||||
| def get_hexdigest(algorithm, salt, raw_password): |  | ||||||
|     raw_password, salt = smart_str(raw_password), smart_str(salt) |  | ||||||
|     if algorithm == 'md5': |  | ||||||
|         return md5_constructor(salt + raw_password).hexdigest() |  | ||||||
|     elif algorithm == 'sha1': |  | ||||||
|         return sha_constructor(salt + raw_password).hexdigest() |  | ||||||
|     raise ValueError('Got unknown password algorithm type in password') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): |  | ||||||
|     """A User document that aims to mirror most of the API specified by Django |  | ||||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users |  | ||||||
|     """ |  | ||||||
|     username = StringField(max_length=30, required=True, |  | ||||||
|                            verbose_name=_('username'), |  | ||||||
|                            help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) |  | ||||||
|  |  | ||||||
|     first_name = StringField(max_length=30, |  | ||||||
|                              verbose_name=_('first name')) |  | ||||||
|  |  | ||||||
|     last_name = StringField(max_length=30, |  | ||||||
|                             verbose_name=_('last name')) |  | ||||||
|     email = EmailField(verbose_name=_('e-mail address')) |  | ||||||
|     password = StringField(max_length=128, |  | ||||||
|                            verbose_name=_('password'), |  | ||||||
|                            help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) |  | ||||||
|     is_staff = BooleanField(default=False, |  | ||||||
|                             verbose_name=_('staff status'), |  | ||||||
|                             help_text=_("Designates whether the user can log into this admin site.")) |  | ||||||
|     is_active = BooleanField(default=True, |  | ||||||
|                              verbose_name=_('active'), |  | ||||||
|                              help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) |  | ||||||
|     is_superuser = BooleanField(default=False, |  | ||||||
|                                 verbose_name=_('superuser status'), |  | ||||||
|                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) |  | ||||||
|     last_login = DateTimeField(default=datetime.datetime.now, |  | ||||||
|                                verbose_name=_('last login')) |  | ||||||
|     date_joined = DateTimeField(default=datetime.datetime.now, |  | ||||||
|                                 verbose_name=_('date joined')) |  | ||||||
|  |  | ||||||
|     meta = { |  | ||||||
|         'indexes': [ |  | ||||||
|             {'fields': ['username'], 'unique': True} |  | ||||||
|         ] |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |  | ||||||
|         return self.username |  | ||||||
|  |  | ||||||
|     def get_full_name(self): |  | ||||||
|         """Returns the users first and last names, separated by a space. |  | ||||||
|         """ |  | ||||||
|         full_name = u'%s %s' % (self.first_name or '', self.last_name or '') |  | ||||||
|         return full_name.strip() |  | ||||||
|  |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def set_password(self, raw_password): |  | ||||||
|         """Sets the user's password - always use this rather than directly |  | ||||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the |  | ||||||
|         password is hashed before storage. |  | ||||||
|         """ |  | ||||||
|         from random import random |  | ||||||
|         algo = 'sha1' |  | ||||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] |  | ||||||
|         hash = get_hexdigest(algo, salt, raw_password) |  | ||||||
|         self.password = '%s$%s$%s' % (algo, salt, hash) |  | ||||||
|         self.save() |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def check_password(self, raw_password): |  | ||||||
|         """Checks the user's password against a provided password - always use |  | ||||||
|         this rather than directly comparing to |  | ||||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is |  | ||||||
|         hashed before storage. |  | ||||||
|         """ |  | ||||||
|         algo, salt, hash = self.password.split('$') |  | ||||||
|         return hash == get_hexdigest(algo, salt, raw_password) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def create_user(cls, username, password, email=None): |  | ||||||
|         """Create (and save) a new user with the given username, password and |  | ||||||
|         email address. |  | ||||||
|         """ |  | ||||||
|         now = datetime.datetime.now() |  | ||||||
|  |  | ||||||
|         # Normalize the address by lowercasing the domain part of the email |  | ||||||
|         # address. |  | ||||||
|         if email is not None: |  | ||||||
|             try: |  | ||||||
|                 email_name, domain_part = email.strip().split('@', 1) |  | ||||||
|             except ValueError: |  | ||||||
|                 pass |  | ||||||
|             else: |  | ||||||
|                 email = '@'.join([email_name, domain_part.lower()]) |  | ||||||
|  |  | ||||||
|         user = cls(username=username, email=email, date_joined=now) |  | ||||||
|         user.set_password(password) |  | ||||||
|         user.save() |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def get_and_delete_messages(self): |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineBackend(object): |  | ||||||
|     """Authenticate using MongoEngine and mongoengine.django.auth.User. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     supports_object_permissions = False |  | ||||||
|     supports_anonymous_user = False |  | ||||||
|     supports_inactive_user = False |  | ||||||
|  |  | ||||||
|     def authenticate(self, username=None, password=None): |  | ||||||
|         user = User.objects(username=username).first() |  | ||||||
|         if user: |  | ||||||
|             if password and user.check_password(password): |  | ||||||
|                 return user |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def get_user(self, user_id): |  | ||||||
|         return User.objects.with_id(user_id) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user(userid): |  | ||||||
|     """Returns a User object from an id (User.id). Django's equivalent takes |  | ||||||
|     request, but taking an id instead leaves it up to the developer to store |  | ||||||
|     the id in any way they want (session, signed cookie, etc.) |  | ||||||
|     """ |  | ||||||
|     if not userid: |  | ||||||
|         return AnonymousUser() |  | ||||||
|     return MongoEngineBackend().get_user(userid) or AnonymousUser() |  | ||||||
| @@ -1,69 +0,0 @@ | |||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError |  | ||||||
| from django.core.exceptions import SuspiciousOperation |  | ||||||
| from django.utils.encoding import force_unicode |  | ||||||
|  |  | ||||||
| from mongoengine.document import Document |  | ||||||
| from mongoengine import fields |  | ||||||
| from mongoengine.queryset import OperationError |  | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME |  | ||||||
| from django.conf import settings |  | ||||||
| from datetime import datetime |  | ||||||
|  |  | ||||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( |  | ||||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', |  | ||||||
|     DEFAULT_CONNECTION_NAME) |  | ||||||
|  |  | ||||||
| class MongoSession(Document): |  | ||||||
|     session_key = fields.StringField(primary_key=True, max_length=40) |  | ||||||
|     session_data = fields.StringField() |  | ||||||
|     expire_date = fields.DateTimeField() |  | ||||||
|      |  | ||||||
|     meta = {'collection': 'django_session', |  | ||||||
|             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, |  | ||||||
|             'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): |  | ||||||
|     """A MongoEngine-based session store for Django. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def load(self): |  | ||||||
|         try: |  | ||||||
|             s = MongoSession.objects(session_key=self.session_key, |  | ||||||
|                                      expire_date__gt=datetime.now())[0] |  | ||||||
|             return self.decode(force_unicode(s.session_data)) |  | ||||||
|         except (IndexError, SuspiciousOperation): |  | ||||||
|             self.create() |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     def exists(self, session_key): |  | ||||||
|         return bool(MongoSession.objects(session_key=session_key).first()) |  | ||||||
|  |  | ||||||
|     def create(self): |  | ||||||
|         while True: |  | ||||||
|             self.session_key = self._get_new_session_key() |  | ||||||
|             try: |  | ||||||
|                 self.save(must_create=True) |  | ||||||
|             except CreateError: |  | ||||||
|                 continue |  | ||||||
|             self.modified = True |  | ||||||
|             self._session_cache = {} |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |  | ||||||
|         s = MongoSession(session_key=self.session_key) |  | ||||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) |  | ||||||
|         s.expire_date = self.get_expiry_date() |  | ||||||
|         try: |  | ||||||
|             s.save(force_insert=must_create, safe=True) |  | ||||||
|         except OperationError: |  | ||||||
|             if must_create: |  | ||||||
|                 raise CreateError |  | ||||||
|             raise |  | ||||||
|  |  | ||||||
|     def delete(self, session_key=None): |  | ||||||
|         if session_key is None: |  | ||||||
|             if self.session_key is None: |  | ||||||
|                 return |  | ||||||
|             session_key = self.session_key |  | ||||||
|         MongoSession.objects(session_key=session_key).delete() |  | ||||||
| @@ -1,46 +0,0 @@ | |||||||
| from django.http import Http404 |  | ||||||
| from mongoengine.queryset import QuerySet |  | ||||||
| from mongoengine.base import BaseDocument |  | ||||||
| from mongoengine.base import ValidationError |  | ||||||
|  |  | ||||||
| def _get_queryset(cls): |  | ||||||
|     """Inspired by django.shortcuts.*""" |  | ||||||
|     if isinstance(cls, QuerySet): |  | ||||||
|         return cls |  | ||||||
|     else: |  | ||||||
|         return cls.objects |  | ||||||
|  |  | ||||||
| def get_document_or_404(cls, *args, **kwargs): |  | ||||||
|     """ |  | ||||||
|     Uses get() to return an document, or raises a Http404 exception if the document |  | ||||||
|     does not exist. |  | ||||||
|  |  | ||||||
|     cls may be a Document or QuerySet object. All other passed |  | ||||||
|     arguments and keyword arguments are used in the get() query. |  | ||||||
|  |  | ||||||
|     Note: Like with get(), an MultipleObjectsReturned will be raised if more than one |  | ||||||
|     object is found. |  | ||||||
|  |  | ||||||
|     Inspired by django.shortcuts.* |  | ||||||
|     """ |  | ||||||
|     queryset = _get_queryset(cls) |  | ||||||
|     try: |  | ||||||
|         return queryset.get(*args, **kwargs) |  | ||||||
|     except (queryset._document.DoesNotExist, ValidationError): |  | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |  | ||||||
|  |  | ||||||
| def get_list_or_404(cls, *args, **kwargs): |  | ||||||
|     """ |  | ||||||
|     Uses filter() to return a list of documents, or raise a Http404 exception if |  | ||||||
|     the list is empty. |  | ||||||
|  |  | ||||||
|     cls may be a Document or QuerySet object. All other passed |  | ||||||
|     arguments and keyword arguments are used in the filter() query. |  | ||||||
|  |  | ||||||
|     Inspired by django.shortcuts.* |  | ||||||
|     """ |  | ||||||
|     queryset = _get_queryset(cls) |  | ||||||
|     obj_list = list(queryset.filter(*args, **kwargs)) |  | ||||||
|     if not obj_list: |  | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |  | ||||||
|     return obj_list |  | ||||||
| @@ -1,112 +0,0 @@ | |||||||
| import os |  | ||||||
| import itertools |  | ||||||
| import urlparse |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from django.conf import settings |  | ||||||
| from django.core.files.storage import Storage |  | ||||||
| from django.core.exceptions import ImproperlyConfigured |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FileDocument(Document): |  | ||||||
|     """A document used to store a single file in GridFS. |  | ||||||
|     """ |  | ||||||
|     file = FileField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GridFSStorage(Storage): |  | ||||||
|     """A custom storage backend to store files in GridFS |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self, base_url=None): |  | ||||||
|  |  | ||||||
|         if base_url is None: |  | ||||||
|             base_url = settings.MEDIA_URL |  | ||||||
|         self.base_url = base_url |  | ||||||
|         self.document = FileDocument |  | ||||||
|         self.field = 'file' |  | ||||||
|  |  | ||||||
|     def delete(self, name): |  | ||||||
|         """Deletes the specified file from the storage system. |  | ||||||
|         """ |  | ||||||
|         if self.exists(name): |  | ||||||
|             doc = self.document.objects.first() |  | ||||||
|             field = getattr(doc, self.field) |  | ||||||
|             self._get_doc_with_name(name).delete()  # Delete the FileField |  | ||||||
|             field.delete()                          # Delete the FileDocument |  | ||||||
|  |  | ||||||
|     def exists(self, name): |  | ||||||
|         """Returns True if a file referened by the given name already exists in the |  | ||||||
|         storage system, or False if the name is available for a new file. |  | ||||||
|         """ |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             field = getattr(doc, self.field) |  | ||||||
|             return bool(field.name) |  | ||||||
|         else: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|     def listdir(self, path=None): |  | ||||||
|         """Lists the contents of the specified path, returning a 2-tuple of lists; |  | ||||||
|         the first item being directories, the second item being files. |  | ||||||
|         """ |  | ||||||
|         def name(doc): |  | ||||||
|             return getattr(doc, self.field).name |  | ||||||
|         docs = self.document.objects |  | ||||||
|         return [], [name(d) for d in docs if name(d)] |  | ||||||
|  |  | ||||||
|     def size(self, name): |  | ||||||
|         """Returns the total size, in bytes, of the file specified by name. |  | ||||||
|         """ |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             return getattr(doc, self.field).length |  | ||||||
|         else: |  | ||||||
|             raise ValueError("No such file or directory: '%s'" % name) |  | ||||||
|  |  | ||||||
|     def url(self, name): |  | ||||||
|         """Returns an absolute URL where the file's contents can be accessed |  | ||||||
|         directly by a web browser. |  | ||||||
|         """ |  | ||||||
|         if self.base_url is None: |  | ||||||
|             raise ValueError("This file is not accessible via a URL.") |  | ||||||
|         return urlparse.urljoin(self.base_url, name).replace('\\', '/') |  | ||||||
|  |  | ||||||
|     def _get_doc_with_name(self, name): |  | ||||||
|         """Find the documents in the store with the given name |  | ||||||
|         """ |  | ||||||
|         docs = self.document.objects |  | ||||||
|         doc = [d for d in docs if getattr(d, self.field).name == name] |  | ||||||
|         if doc: |  | ||||||
|             return doc[0] |  | ||||||
|         else: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|     def _open(self, name, mode='rb'): |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             return getattr(doc, self.field) |  | ||||||
|         else: |  | ||||||
|             raise ValueError("No file found with the name '%s'." % name) |  | ||||||
|  |  | ||||||
|     def get_available_name(self, name): |  | ||||||
|         """Returns a filename that's free on the target storage system, and |  | ||||||
|         available for new content to be written to. |  | ||||||
|         """ |  | ||||||
|         file_root, file_ext = os.path.splitext(name) |  | ||||||
|         # If the filename already exists, add an underscore and a number (before |  | ||||||
|         # the file extension, if one exists) to the filename until the generated |  | ||||||
|         # filename doesn't exist. |  | ||||||
|         count = itertools.count(1) |  | ||||||
|         while self.exists(name): |  | ||||||
|             # file_ext includes the dot. |  | ||||||
|             name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext)) |  | ||||||
|  |  | ||||||
|         return name |  | ||||||
|  |  | ||||||
|     def _save(self, name, content): |  | ||||||
|         doc = self.document() |  | ||||||
|         getattr(doc, self.field).put(content, filename=name) |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         return name |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| #coding: utf-8 |  | ||||||
| from django.test import TestCase |  | ||||||
| from django.conf import settings |  | ||||||
|  |  | ||||||
| from mongoengine import connect |  | ||||||
|  |  | ||||||
| class MongoTestCase(TestCase): |  | ||||||
|     """ |  | ||||||
|     TestCase class that clear the collection between the tests |  | ||||||
|     """ |  | ||||||
|     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME |  | ||||||
|     def __init__(self, methodName='runtest'): |  | ||||||
|         self.db = connect(self.db_name) |  | ||||||
|         super(MongoTestCase, self).__init__(methodName) |  | ||||||
|  |  | ||||||
|     def _post_teardown(self): |  | ||||||
|         super(MongoTestCase, self)._post_teardown() |  | ||||||
|         for collection in self.db.collection_names(): |  | ||||||
|             if collection == 'system.indexes': |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										162
									
								
								mongoengine/errors.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										162
									
								
								mongoengine/errors.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,162 @@ | |||||||
|  | from collections import defaultdict | ||||||
|  |  | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     "NotRegistered", | ||||||
|  |     "InvalidDocumentError", | ||||||
|  |     "LookUpError", | ||||||
|  |     "DoesNotExist", | ||||||
|  |     "MultipleObjectsReturned", | ||||||
|  |     "InvalidQueryError", | ||||||
|  |     "OperationError", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "BulkWriteError", | ||||||
|  |     "FieldDoesNotExist", | ||||||
|  |     "ValidationError", | ||||||
|  |     "SaveConditionError", | ||||||
|  |     "DeprecatedError", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class NotRegistered(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class InvalidDocumentError(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LookUpError(AttributeError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DoesNotExist(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MultipleObjectsReturned(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class InvalidQueryError(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class OperationError(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class NotUniqueError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BulkWriteError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SaveConditionError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FieldDoesNotExist(Exception): | ||||||
|  |     """Raised when trying to set a field | ||||||
|  |     not declared in a :class:`~mongoengine.Document` | ||||||
|  |     or an :class:`~mongoengine.EmbeddedDocument`. | ||||||
|  |  | ||||||
|  |     To avoid this behavior on data loading, | ||||||
|  |     you should set the :attr:`strict` to ``False`` | ||||||
|  |     in the :attr:`meta` dictionary. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ValidationError(AssertionError): | ||||||
|  |     """Validation exception. | ||||||
|  |  | ||||||
|  |     May represent an error validating a field or a | ||||||
|  |     document containing fields with validation errors. | ||||||
|  |  | ||||||
|  |     :ivar errors: A dictionary of errors for fields within this | ||||||
|  |         document or list, or None if the error is for an | ||||||
|  |         individual field. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     errors = {} | ||||||
|  |     field_name = None | ||||||
|  |     _message = None | ||||||
|  |  | ||||||
|  |     def __init__(self, message="", **kwargs): | ||||||
|  |         super().__init__(message) | ||||||
|  |         self.errors = kwargs.get("errors", {}) | ||||||
|  |         self.field_name = kwargs.get("field_name") | ||||||
|  |         self.message = message | ||||||
|  |  | ||||||
|  |     def __str__(self): | ||||||
|  |         return str(self.message) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return "{}({},)".format(self.__class__.__name__, self.message) | ||||||
|  |  | ||||||
|  |     def __getattribute__(self, name): | ||||||
|  |         message = super().__getattribute__(name) | ||||||
|  |         if name == "message": | ||||||
|  |             if self.field_name: | ||||||
|  |                 message = "%s" % message | ||||||
|  |             if self.errors: | ||||||
|  |                 message = "{}({})".format(message, self._format_errors()) | ||||||
|  |         return message | ||||||
|  |  | ||||||
|  |     def _get_message(self): | ||||||
|  |         return self._message | ||||||
|  |  | ||||||
|  |     def _set_message(self, message): | ||||||
|  |         self._message = message | ||||||
|  |  | ||||||
|  |     message = property(_get_message, _set_message) | ||||||
|  |  | ||||||
|  |     def to_dict(self): | ||||||
|  |         """Returns a dictionary of all errors within a document | ||||||
|  |  | ||||||
|  |         Keys are field names or list indices and values are the | ||||||
|  |         validation error messages, or a nested dictionary of | ||||||
|  |         errors for an embedded document or list. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         def build_dict(source): | ||||||
|  |             errors_dict = {} | ||||||
|  |             if isinstance(source, dict): | ||||||
|  |                 for field_name, error in source.items(): | ||||||
|  |                     errors_dict[field_name] = build_dict(error) | ||||||
|  |             elif isinstance(source, ValidationError) and source.errors: | ||||||
|  |                 return build_dict(source.errors) | ||||||
|  |             else: | ||||||
|  |                 return str(source) | ||||||
|  |  | ||||||
|  |             return errors_dict | ||||||
|  |  | ||||||
|  |         if not self.errors: | ||||||
|  |             return {} | ||||||
|  |  | ||||||
|  |         return build_dict(self.errors) | ||||||
|  |  | ||||||
|  |     def _format_errors(self): | ||||||
|  |         """Returns a string listing all errors within a document""" | ||||||
|  |  | ||||||
|  |         def generate_key(value, prefix=""): | ||||||
|  |             if isinstance(value, list): | ||||||
|  |                 value = " ".join([generate_key(k) for k in value]) | ||||||
|  |             elif isinstance(value, dict): | ||||||
|  |                 value = " ".join([generate_key(v, k) for k, v in value.items()]) | ||||||
|  |  | ||||||
|  |             results = "{}.{}".format(prefix, value) if prefix else value | ||||||
|  |             return results | ||||||
|  |  | ||||||
|  |         error_dict = defaultdict(list) | ||||||
|  |         for k, v in self.to_dict().items(): | ||||||
|  |             error_dict[generate_key(v)].append(k) | ||||||
|  |         return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DeprecatedError(Exception): | ||||||
|  |     """Raise when a user uses a feature that has been Deprecated""" | ||||||
|  |  | ||||||
|  |     pass | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										19
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with MongoDB version support | ||||||
|  | """ | ||||||
|  | from mongoengine.connection import get_connection | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # Constant that can be used to compare the version retrieved with | ||||||
|  | # get_mongodb_version() | ||||||
|  | MONGODB_34 = (3, 4) | ||||||
|  | MONGODB_36 = (3, 6) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_mongodb_version(): | ||||||
|  |     """Return the version of the default connected mongoDB (first 2 digits) | ||||||
|  |  | ||||||
|  |     :return: tuple(int, int) | ||||||
|  |     """ | ||||||
|  |     version_list = get_connection().server_info()["versionArray"][:2]  # e.g: (3, 2) | ||||||
|  |     return tuple(version_list) | ||||||
							
								
								
									
										61
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | ||||||
|  | """ | ||||||
|  | import pymongo | ||||||
|  | from pymongo.errors import OperationFailure | ||||||
|  |  | ||||||
|  | _PYMONGO_37 = (3, 7) | ||||||
|  |  | ||||||
|  | PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) | ||||||
|  |  | ||||||
|  | IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def count_documents( | ||||||
|  |     collection, filter, skip=None, limit=None, hint=None, collation=None | ||||||
|  | ): | ||||||
|  |     """Pymongo>3.7 deprecates count in favour of count_documents | ||||||
|  |     """ | ||||||
|  |     if limit == 0: | ||||||
|  |         return 0  # Pymongo raises an OperationFailure if called with limit=0 | ||||||
|  |  | ||||||
|  |     kwargs = {} | ||||||
|  |     if skip is not None: | ||||||
|  |         kwargs["skip"] = skip | ||||||
|  |     if limit is not None: | ||||||
|  |         kwargs["limit"] = limit | ||||||
|  |     if hint not in (-1, None): | ||||||
|  |         kwargs["hint"] = hint | ||||||
|  |     if collation is not None: | ||||||
|  |         kwargs["collation"] = collation | ||||||
|  |  | ||||||
|  |     # count_documents appeared in pymongo 3.7 | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         try: | ||||||
|  |             return collection.count_documents(filter=filter, **kwargs) | ||||||
|  |         except OperationFailure: | ||||||
|  |             # OperationFailure - accounts for some operators that used to work | ||||||
|  |             # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) | ||||||
|  |             # fallback to deprecated Cursor.count | ||||||
|  |             # Keeping this should be reevaluated the day pymongo removes .count entirely | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |     cursor = collection.find(filter) | ||||||
|  |     for option, option_value in kwargs.items(): | ||||||
|  |         cursor_method = getattr(cursor, option) | ||||||
|  |         cursor = cursor_method(option_value) | ||||||
|  |     with_limit_and_skip = "skip" in kwargs or "limit" in kwargs | ||||||
|  |     return cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def list_collection_names(db, include_system_collections=False): | ||||||
|  |     """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         collections = db.list_collection_names() | ||||||
|  |     else: | ||||||
|  |         collections = db.collection_names() | ||||||
|  |  | ||||||
|  |     if not include_system_collections: | ||||||
|  |         collections = [c for c in collections if not c.startswith("system.")] | ||||||
|  |  | ||||||
|  |     return collections | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										28
									
								
								mongoengine/queryset/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								mongoengine/queryset/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | |||||||
|  | from mongoengine.errors import * | ||||||
|  | from mongoengine.queryset.field_list import * | ||||||
|  | from mongoengine.queryset.manager import * | ||||||
|  | from mongoengine.queryset.queryset import * | ||||||
|  | from mongoengine.queryset.transform import * | ||||||
|  | from mongoengine.queryset.visitor import * | ||||||
|  |  | ||||||
|  | # Expose just the public subset of all imported objects and constants. | ||||||
|  | __all__ = ( | ||||||
|  |     "QuerySet", | ||||||
|  |     "QuerySetNoCache", | ||||||
|  |     "Q", | ||||||
|  |     "queryset_manager", | ||||||
|  |     "QuerySetManager", | ||||||
|  |     "QueryFieldList", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|  |     # Errors that might be related to a queryset, mostly here for backward | ||||||
|  |     # compatibility | ||||||
|  |     "DoesNotExist", | ||||||
|  |     "InvalidQueryError", | ||||||
|  |     "MultipleObjectsReturned", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "OperationError", | ||||||
|  | ) | ||||||
							
								
								
									
										2001
									
								
								mongoengine/queryset/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2001
									
								
								mongoengine/queryset/base.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										88
									
								
								mongoengine/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										88
									
								
								mongoengine/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,88 @@ | |||||||
|  | __all__ = ("QueryFieldList",) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QueryFieldList: | ||||||
|  |     """Object that handles combinations of .only() and .exclude() calls""" | ||||||
|  |  | ||||||
|  |     ONLY = 1 | ||||||
|  |     EXCLUDE = 0 | ||||||
|  |  | ||||||
|  |     def __init__( | ||||||
|  |         self, fields=None, value=ONLY, always_include=None, _only_called=False | ||||||
|  |     ): | ||||||
|  |         """The QueryFieldList builder | ||||||
|  |  | ||||||
|  |         :param fields: A list of fields used in `.only()` or `.exclude()` | ||||||
|  |         :param value: How to handle the fields; either `ONLY` or `EXCLUDE` | ||||||
|  |         :param always_include: Any fields to always_include eg `_cls` | ||||||
|  |         :param _only_called: Has `.only()` been called?  If so its a set of fields | ||||||
|  |            otherwise it performs a union. | ||||||
|  |         """ | ||||||
|  |         self.value = value | ||||||
|  |         self.fields = set(fields or []) | ||||||
|  |         self.always_include = set(always_include or []) | ||||||
|  |         self._id = None | ||||||
|  |         self._only_called = _only_called | ||||||
|  |         self.slice = {} | ||||||
|  |  | ||||||
|  |     def __add__(self, f): | ||||||
|  |         if isinstance(f.value, dict): | ||||||
|  |             for field in f.fields: | ||||||
|  |                 self.slice[field] = f.value | ||||||
|  |             if not self.fields: | ||||||
|  |                 self.fields = f.fields | ||||||
|  |         elif not self.fields: | ||||||
|  |             self.fields = f.fields | ||||||
|  |             self.value = f.value | ||||||
|  |             self.slice = {} | ||||||
|  |         elif self.value is self.ONLY and f.value is self.ONLY: | ||||||
|  |             self._clean_slice() | ||||||
|  |             if self._only_called: | ||||||
|  |                 self.fields = self.fields.union(f.fields) | ||||||
|  |             else: | ||||||
|  |                 self.fields = f.fields | ||||||
|  |         elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: | ||||||
|  |             self.fields = self.fields.union(f.fields) | ||||||
|  |             self._clean_slice() | ||||||
|  |         elif self.value is self.ONLY and f.value is self.EXCLUDE: | ||||||
|  |             self.fields -= f.fields | ||||||
|  |             self._clean_slice() | ||||||
|  |         elif self.value is self.EXCLUDE and f.value is self.ONLY: | ||||||
|  |             self.value = self.ONLY | ||||||
|  |             self.fields = f.fields - self.fields | ||||||
|  |             self._clean_slice() | ||||||
|  |  | ||||||
|  |         if "_id" in f.fields: | ||||||
|  |             self._id = f.value | ||||||
|  |  | ||||||
|  |         if self.always_include: | ||||||
|  |             if self.value is self.ONLY and self.fields: | ||||||
|  |                 if sorted(self.slice.keys()) != sorted(self.fields): | ||||||
|  |                     self.fields = self.fields.union(self.always_include) | ||||||
|  |             else: | ||||||
|  |                 self.fields -= self.always_include | ||||||
|  |  | ||||||
|  |         if getattr(f, "_only_called", False): | ||||||
|  |             self._only_called = True | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.fields) | ||||||
|  |  | ||||||
|  |     def as_dict(self): | ||||||
|  |         field_list = {field: self.value for field in self.fields} | ||||||
|  |         if self.slice: | ||||||
|  |             field_list.update(self.slice) | ||||||
|  |         if self._id is not None: | ||||||
|  |             field_list["_id"] = self._id | ||||||
|  |         return field_list | ||||||
|  |  | ||||||
|  |     def reset(self): | ||||||
|  |         self.fields = set() | ||||||
|  |         self.slice = {} | ||||||
|  |         self.value = self.ONLY | ||||||
|  |  | ||||||
|  |     def _clean_slice(self): | ||||||
|  |         if self.slice: | ||||||
|  |             for field in set(self.slice.keys()) - self.fields: | ||||||
|  |                 del self.slice[field] | ||||||
							
								
								
									
										57
									
								
								mongoengine/queryset/manager.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								mongoengine/queryset/manager.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | |||||||
|  | from functools import partial | ||||||
|  | from mongoengine.queryset.queryset import QuerySet | ||||||
|  |  | ||||||
|  | __all__ = ("queryset_manager", "QuerySetManager") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QuerySetManager: | ||||||
|  |     """ | ||||||
|  |     The default QuerySet Manager. | ||||||
|  |  | ||||||
|  |     Custom QuerySet Manager functions can extend this class and users can | ||||||
|  |     add extra queryset functionality.  Any custom manager methods must accept a | ||||||
|  |     :class:`~mongoengine.Document` class as its first argument, and a | ||||||
|  |     :class:`~mongoengine.queryset.QuerySet` as its second argument. | ||||||
|  |  | ||||||
|  |     The method function should return a :class:`~mongoengine.queryset.QuerySet` | ||||||
|  |     , probably the same one that was passed in, but modified in some way. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     get_queryset = None | ||||||
|  |     default = QuerySet | ||||||
|  |  | ||||||
|  |     def __init__(self, queryset_func=None): | ||||||
|  |         if queryset_func: | ||||||
|  |             self.get_queryset = queryset_func | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         """Descriptor for instantiating a new QuerySet object when | ||||||
|  |         Document.objects is accessed. | ||||||
|  |         """ | ||||||
|  |         if instance is not None: | ||||||
|  |             # Document object being used rather than a document class | ||||||
|  |             return self | ||||||
|  |  | ||||||
|  |         # owner is the document that contains the QuerySetManager | ||||||
|  |         queryset_class = owner._meta.get("queryset_class", self.default) | ||||||
|  |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|  |         if self.get_queryset: | ||||||
|  |             arg_count = self.get_queryset.__code__.co_argcount | ||||||
|  |             if arg_count == 1: | ||||||
|  |                 queryset = self.get_queryset(queryset) | ||||||
|  |             elif arg_count == 2: | ||||||
|  |                 queryset = self.get_queryset(owner, queryset) | ||||||
|  |             else: | ||||||
|  |                 queryset = partial(self.get_queryset, owner, queryset) | ||||||
|  |         return queryset | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def queryset_manager(func): | ||||||
|  |     """Decorator that allows you to define custom QuerySet managers on | ||||||
|  |     :class:`~mongoengine.Document` classes. The manager must be a function that | ||||||
|  |     accepts a :class:`~mongoengine.Document` class as its first argument, and a | ||||||
|  |     :class:`~mongoengine.queryset.QuerySet` as its second argument. The method | ||||||
|  |     function should return a :class:`~mongoengine.queryset.QuerySet`, probably | ||||||
|  |     the same one that was passed in, but modified in some way. | ||||||
|  |     """ | ||||||
|  |     return QuerySetManager(func) | ||||||
							
								
								
									
										199
									
								
								mongoengine/queryset/queryset.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								mongoengine/queryset/queryset.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,199 @@ | |||||||
|  | from mongoengine.errors import OperationError | ||||||
|  | from mongoengine.queryset.base import ( | ||||||
|  |     BaseQuerySet, | ||||||
|  |     CASCADE, | ||||||
|  |     DENY, | ||||||
|  |     DO_NOTHING, | ||||||
|  |     NULLIFY, | ||||||
|  |     PULL, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     "QuerySet", | ||||||
|  |     "QuerySetNoCache", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | # The maximum number of items to display in a QuerySet.__repr__ | ||||||
|  | REPR_OUTPUT_SIZE = 20 | ||||||
|  | ITER_CHUNK_SIZE = 100 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QuerySet(BaseQuerySet): | ||||||
|  |     """The default queryset, that builds queries and handles a set of results | ||||||
|  |     returned from a query. | ||||||
|  |  | ||||||
|  |     Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as | ||||||
|  |     the results. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     _has_more = True | ||||||
|  |     _len = None | ||||||
|  |     _result_cache = None | ||||||
|  |  | ||||||
|  |     def __iter__(self): | ||||||
|  |         """Iteration utilises a results cache which iterates the cursor | ||||||
|  |         in batches of ``ITER_CHUNK_SIZE``. | ||||||
|  |  | ||||||
|  |         If ``self._has_more`` the cursor hasn't been exhausted so cache then | ||||||
|  |         batch. Otherwise iterate the result_cache. | ||||||
|  |         """ | ||||||
|  |         self._iter = True | ||||||
|  |  | ||||||
|  |         if self._has_more: | ||||||
|  |             return self._iter_results() | ||||||
|  |  | ||||||
|  |         # iterating over the cache. | ||||||
|  |         return iter(self._result_cache) | ||||||
|  |  | ||||||
|  |     def __len__(self): | ||||||
|  |         """Since __len__ is called quite frequently (for example, as part of | ||||||
|  |         list(qs)), we populate the result cache and cache the length. | ||||||
|  |         """ | ||||||
|  |         if self._len is not None: | ||||||
|  |             return self._len | ||||||
|  |  | ||||||
|  |         # Populate the result cache with *all* of the docs in the cursor | ||||||
|  |         if self._has_more: | ||||||
|  |             list(self._iter_results()) | ||||||
|  |  | ||||||
|  |         # Cache the length of the complete result cache and return it | ||||||
|  |         self._len = len(self._result_cache) | ||||||
|  |         return self._len | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         """Provide a string representation of the QuerySet""" | ||||||
|  |         if self._iter: | ||||||
|  |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|  |         self._populate_cache() | ||||||
|  |         data = self._result_cache[: REPR_OUTPUT_SIZE + 1] | ||||||
|  |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|  |             data[-1] = "...(remaining elements truncated)..." | ||||||
|  |         return repr(data) | ||||||
|  |  | ||||||
|  |     def _iter_results(self): | ||||||
|  |         """A generator for iterating over the result cache. | ||||||
|  |  | ||||||
|  |         Also populates the cache if there are more possible results to | ||||||
|  |         yield. Raises StopIteration when there are no more results. | ||||||
|  |         """ | ||||||
|  |         if self._result_cache is None: | ||||||
|  |             self._result_cache = [] | ||||||
|  |  | ||||||
|  |         pos = 0 | ||||||
|  |         while True: | ||||||
|  |  | ||||||
|  |             # For all positions lower than the length of the current result | ||||||
|  |             # cache, serve the docs straight from the cache w/o hitting the | ||||||
|  |             # database. | ||||||
|  |             # XXX it's VERY important to compute the len within the `while` | ||||||
|  |             # condition because the result cache might expand mid-iteration | ||||||
|  |             # (e.g. if we call len(qs) inside a loop that iterates over the | ||||||
|  |             # queryset). Fortunately len(list) is O(1) in Python, so this | ||||||
|  |             # doesn't cause performance issues. | ||||||
|  |             while pos < len(self._result_cache): | ||||||
|  |                 yield self._result_cache[pos] | ||||||
|  |                 pos += 1 | ||||||
|  |  | ||||||
|  |             # return if we already established there were no more | ||||||
|  |             # docs in the db cursor. | ||||||
|  |             if not self._has_more: | ||||||
|  |                 return | ||||||
|  |  | ||||||
|  |             # Otherwise, populate more of the cache and repeat. | ||||||
|  |             if len(self._result_cache) <= pos: | ||||||
|  |                 self._populate_cache() | ||||||
|  |  | ||||||
|  |     def _populate_cache(self): | ||||||
|  |         """ | ||||||
|  |         Populates the result cache with ``ITER_CHUNK_SIZE`` more entries | ||||||
|  |         (until the cursor is exhausted). | ||||||
|  |         """ | ||||||
|  |         if self._result_cache is None: | ||||||
|  |             self._result_cache = [] | ||||||
|  |  | ||||||
|  |         # Skip populating the cache if we already established there are no | ||||||
|  |         # more docs to pull from the database. | ||||||
|  |         if not self._has_more: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||||
|  |         # the result cache. | ||||||
|  |         try: | ||||||
|  |             for _ in range(ITER_CHUNK_SIZE): | ||||||
|  |                 self._result_cache.append(next(self)) | ||||||
|  |         except StopIteration: | ||||||
|  |             # Getting this exception means there are no more docs in the | ||||||
|  |             # db cursor. Set _has_more to False so that we can use that | ||||||
|  |             # information in other places. | ||||||
|  |             self._has_more = False | ||||||
|  |  | ||||||
|  |     def count(self, with_limit_and_skip=False): | ||||||
|  |         """Count the selected elements in the query. | ||||||
|  |  | ||||||
|  |         :param with_limit_and_skip (optional): take any :meth:`limit` or | ||||||
|  |             :meth:`skip` that has been applied to this cursor into account when | ||||||
|  |             getting the count | ||||||
|  |         """ | ||||||
|  |         if with_limit_and_skip is False: | ||||||
|  |             return super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|  |         if self._len is None: | ||||||
|  |             # cache the length | ||||||
|  |             self._len = super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|  |         return self._len | ||||||
|  |  | ||||||
|  |     def no_cache(self): | ||||||
|  |         """Convert to a non-caching queryset | ||||||
|  |  | ||||||
|  |         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||||
|  |         """ | ||||||
|  |         if self._result_cache is not None: | ||||||
|  |             raise OperationError("QuerySet already cached") | ||||||
|  |  | ||||||
|  |         return self._clone_into(QuerySetNoCache(self._document, self._collection)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QuerySetNoCache(BaseQuerySet): | ||||||
|  |     """A non caching QuerySet""" | ||||||
|  |  | ||||||
|  |     def cache(self): | ||||||
|  |         """Convert to a caching queryset | ||||||
|  |  | ||||||
|  |         .. versionadded:: 0.8.3 Convert to caching queryset | ||||||
|  |         """ | ||||||
|  |         return self._clone_into(QuerySet(self._document, self._collection)) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         """Provides the string representation of the QuerySet | ||||||
|  |  | ||||||
|  |         .. versionchanged:: 0.6.13 Now doesnt modify the cursor | ||||||
|  |         """ | ||||||
|  |         if self._iter: | ||||||
|  |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|  |         data = [] | ||||||
|  |         for _ in range(REPR_OUTPUT_SIZE + 1): | ||||||
|  |             try: | ||||||
|  |                 data.append(next(self)) | ||||||
|  |             except StopIteration: | ||||||
|  |                 break | ||||||
|  |  | ||||||
|  |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|  |             data[-1] = "...(remaining elements truncated)..." | ||||||
|  |  | ||||||
|  |         self.rewind() | ||||||
|  |         return repr(data) | ||||||
|  |  | ||||||
|  |     def __iter__(self): | ||||||
|  |         queryset = self | ||||||
|  |         if queryset._iter: | ||||||
|  |             queryset = self.clone() | ||||||
|  |         queryset.rewind() | ||||||
|  |         return queryset | ||||||
							
								
								
									
										502
									
								
								mongoengine/queryset/transform.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										502
									
								
								mongoengine/queryset/transform.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,502 @@ | |||||||
|  | from collections import defaultdict | ||||||
|  |  | ||||||
|  | from bson import ObjectId, SON | ||||||
|  | from bson.dbref import DBRef | ||||||
|  | import pymongo | ||||||
|  |  | ||||||
|  | from mongoengine.base import UPDATE_OPERATORS | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.errors import InvalidQueryError | ||||||
|  |  | ||||||
|  | __all__ = ("query", "update", "STRING_OPERATORS") | ||||||
|  |  | ||||||
|  | COMPARISON_OPERATORS = ( | ||||||
|  |     "ne", | ||||||
|  |     "gt", | ||||||
|  |     "gte", | ||||||
|  |     "lt", | ||||||
|  |     "lte", | ||||||
|  |     "in", | ||||||
|  |     "nin", | ||||||
|  |     "mod", | ||||||
|  |     "all", | ||||||
|  |     "size", | ||||||
|  |     "exists", | ||||||
|  |     "not", | ||||||
|  |     "elemMatch", | ||||||
|  |     "type", | ||||||
|  | ) | ||||||
|  | GEO_OPERATORS = ( | ||||||
|  |     "within_distance", | ||||||
|  |     "within_spherical_distance", | ||||||
|  |     "within_box", | ||||||
|  |     "within_polygon", | ||||||
|  |     "near", | ||||||
|  |     "near_sphere", | ||||||
|  |     "max_distance", | ||||||
|  |     "min_distance", | ||||||
|  |     "geo_within", | ||||||
|  |     "geo_within_box", | ||||||
|  |     "geo_within_polygon", | ||||||
|  |     "geo_within_center", | ||||||
|  |     "geo_within_sphere", | ||||||
|  |     "geo_intersects", | ||||||
|  | ) | ||||||
|  | STRING_OPERATORS = ( | ||||||
|  |     "contains", | ||||||
|  |     "icontains", | ||||||
|  |     "startswith", | ||||||
|  |     "istartswith", | ||||||
|  |     "endswith", | ||||||
|  |     "iendswith", | ||||||
|  |     "exact", | ||||||
|  |     "iexact", | ||||||
|  | ) | ||||||
|  | CUSTOM_OPERATORS = ("match",) | ||||||
|  | MATCH_OPERATORS = ( | ||||||
|  |     COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # TODO make this less complex | ||||||
|  | def query(_doc_cls=None, **kwargs): | ||||||
|  |     """Transform a query from Django-style format to Mongo format.""" | ||||||
|  |     mongo_query = {} | ||||||
|  |     merge_query = defaultdict(list) | ||||||
|  |     for key, value in sorted(kwargs.items()): | ||||||
|  |         if key == "__raw__": | ||||||
|  |             mongo_query.update(value) | ||||||
|  |             continue | ||||||
|  |  | ||||||
|  |         parts = key.rsplit("__") | ||||||
|  |         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||||
|  |         parts = [part for part in parts if not part.isdigit()] | ||||||
|  |         # Check for an operator and transform to mongo-style if there is | ||||||
|  |         op = None | ||||||
|  |         if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: | ||||||
|  |             op = parts.pop() | ||||||
|  |  | ||||||
|  |         # Allow to escape operator-like field name by __ | ||||||
|  |         if len(parts) > 1 and parts[-1] == "": | ||||||
|  |             parts.pop() | ||||||
|  |  | ||||||
|  |         negate = False | ||||||
|  |         if len(parts) > 1 and parts[-1] == "not": | ||||||
|  |             parts.pop() | ||||||
|  |             negate = True | ||||||
|  |  | ||||||
|  |         if _doc_cls: | ||||||
|  |             # Switch field names to proper names [set in Field(name='foo')] | ||||||
|  |             try: | ||||||
|  |                 fields = _doc_cls._lookup_field(parts) | ||||||
|  |             except Exception as e: | ||||||
|  |                 raise InvalidQueryError(e) | ||||||
|  |             parts = [] | ||||||
|  |  | ||||||
|  |             CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|  |             GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|  |             cleaned_fields = [] | ||||||
|  |             for field in fields: | ||||||
|  |                 append_field = True | ||||||
|  |                 if isinstance(field, str): | ||||||
|  |                     parts.append(field) | ||||||
|  |                     append_field = False | ||||||
|  |                 # is last and CachedReferenceField | ||||||
|  |                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: | ||||||
|  |                     parts.append("%s._id" % field.db_field) | ||||||
|  |                 else: | ||||||
|  |                     parts.append(field.db_field) | ||||||
|  |  | ||||||
|  |                 if append_field: | ||||||
|  |                     cleaned_fields.append(field) | ||||||
|  |  | ||||||
|  |             # Convert value to proper value | ||||||
|  |             field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|  |             singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] | ||||||
|  |             singular_ops += STRING_OPERATORS | ||||||
|  |             if op in singular_ops: | ||||||
|  |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|  |                 if isinstance(field, CachedReferenceField) and value: | ||||||
|  |                     value = value["_id"] | ||||||
|  |  | ||||||
|  |             elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): | ||||||
|  |                 # Raise an error if the in/nin/all/near param is not iterable. | ||||||
|  |                 value = _prepare_query_for_iterable(field, op, value) | ||||||
|  |  | ||||||
|  |             # If we're querying a GenericReferenceField, we need to alter the | ||||||
|  |             # key depending on the value: | ||||||
|  |             # * If the value is a DBRef, the key should be "field_name._ref". | ||||||
|  |             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||||
|  |             if isinstance(field, GenericReferenceField): | ||||||
|  |                 if isinstance(value, DBRef): | ||||||
|  |                     parts[-1] += "._ref" | ||||||
|  |                 elif isinstance(value, ObjectId): | ||||||
|  |                     parts[-1] += "._ref.$id" | ||||||
|  |  | ||||||
|  |         # if op and op not in COMPARISON_OPERATORS: | ||||||
|  |         if op: | ||||||
|  |             if op in GEO_OPERATORS: | ||||||
|  |                 value = _geo_operator(field, op, value) | ||||||
|  |             elif op in ("match", "elemMatch"): | ||||||
|  |                 ListField = _import_class("ListField") | ||||||
|  |                 EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||||
|  |                 if ( | ||||||
|  |                     isinstance(value, dict) | ||||||
|  |                     and isinstance(field, ListField) | ||||||
|  |                     and isinstance(field.field, EmbeddedDocumentField) | ||||||
|  |                 ): | ||||||
|  |                     value = query(field.field.document_type, **value) | ||||||
|  |                 else: | ||||||
|  |                     value = field.prepare_query_value(op, value) | ||||||
|  |                 value = {"$elemMatch": value} | ||||||
|  |             elif op in CUSTOM_OPERATORS: | ||||||
|  |                 NotImplementedError( | ||||||
|  |                     'Custom method "%s" has not ' "been implemented" % op | ||||||
|  |                 ) | ||||||
|  |             elif op not in STRING_OPERATORS: | ||||||
|  |                 value = {"$" + op: value} | ||||||
|  |  | ||||||
|  |         if negate: | ||||||
|  |             value = {"$not": value} | ||||||
|  |  | ||||||
|  |         for i, part in indices: | ||||||
|  |             parts.insert(i, part) | ||||||
|  |  | ||||||
|  |         key = ".".join(parts) | ||||||
|  |  | ||||||
|  |         if key not in mongo_query: | ||||||
|  |             mongo_query[key] = value | ||||||
|  |         else: | ||||||
|  |             if isinstance(mongo_query[key], dict) and isinstance(value, dict): | ||||||
|  |                 mongo_query[key].update(value) | ||||||
|  |                 # $max/minDistance needs to come last - convert to SON | ||||||
|  |                 value_dict = mongo_query[key] | ||||||
|  |                 if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( | ||||||
|  |                     "$near" in value_dict or "$nearSphere" in value_dict | ||||||
|  |                 ): | ||||||
|  |                     value_son = SON() | ||||||
|  |                     for k, v in value_dict.items(): | ||||||
|  |                         if k == "$maxDistance" or k == "$minDistance": | ||||||
|  |                             continue | ||||||
|  |                         value_son[k] = v | ||||||
|  |                     # Required for MongoDB >= 2.6, may fail when combining | ||||||
|  |                     # PyMongo 3+ and MongoDB < 2.6 | ||||||
|  |                     near_embedded = False | ||||||
|  |                     for near_op in ("$near", "$nearSphere"): | ||||||
|  |                         if isinstance(value_dict.get(near_op), dict): | ||||||
|  |                             value_son[near_op] = SON(value_son[near_op]) | ||||||
|  |                             if "$maxDistance" in value_dict: | ||||||
|  |                                 value_son[near_op]["$maxDistance"] = value_dict[ | ||||||
|  |                                     "$maxDistance" | ||||||
|  |                                 ] | ||||||
|  |                             if "$minDistance" in value_dict: | ||||||
|  |                                 value_son[near_op]["$minDistance"] = value_dict[ | ||||||
|  |                                     "$minDistance" | ||||||
|  |                                 ] | ||||||
|  |                             near_embedded = True | ||||||
|  |  | ||||||
|  |                     if not near_embedded: | ||||||
|  |                         if "$maxDistance" in value_dict: | ||||||
|  |                             value_son["$maxDistance"] = value_dict["$maxDistance"] | ||||||
|  |                         if "$minDistance" in value_dict: | ||||||
|  |                             value_son["$minDistance"] = value_dict["$minDistance"] | ||||||
|  |                     mongo_query[key] = value_son | ||||||
|  |             else: | ||||||
|  |                 # Store for manually merging later | ||||||
|  |                 merge_query[key].append(value) | ||||||
|  |  | ||||||
|  |     # The queryset has been filter in such a way we must manually merge | ||||||
|  |     for k, v in merge_query.items(): | ||||||
|  |         merge_query[k].append(mongo_query[k]) | ||||||
|  |         del mongo_query[k] | ||||||
|  |         if isinstance(v, list): | ||||||
|  |             value = [{k: val} for val in v] | ||||||
|  |             if "$and" in mongo_query.keys(): | ||||||
|  |                 mongo_query["$and"].extend(value) | ||||||
|  |             else: | ||||||
|  |                 mongo_query["$and"] = value | ||||||
|  |  | ||||||
|  |     return mongo_query | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def update(_doc_cls=None, **update): | ||||||
|  |     """Transform an update spec from Django-style format to Mongo | ||||||
|  |     format. | ||||||
|  |     """ | ||||||
|  |     mongo_update = {} | ||||||
|  |  | ||||||
|  |     for key, value in update.items(): | ||||||
|  |         if key == "__raw__": | ||||||
|  |             mongo_update.update(value) | ||||||
|  |             continue | ||||||
|  |  | ||||||
|  |         parts = key.split("__") | ||||||
|  |  | ||||||
|  |         # if there is no operator, default to 'set' | ||||||
|  |         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||||
|  |             parts.insert(0, "set") | ||||||
|  |  | ||||||
|  |         # Check for an operator and transform to mongo-style if there is | ||||||
|  |         op = None | ||||||
|  |         if parts[0] in UPDATE_OPERATORS: | ||||||
|  |             op = parts.pop(0) | ||||||
|  |             # Convert Pythonic names to Mongo equivalents | ||||||
|  |             operator_map = { | ||||||
|  |                 "push_all": "pushAll", | ||||||
|  |                 "pull_all": "pullAll", | ||||||
|  |                 "dec": "inc", | ||||||
|  |                 "add_to_set": "addToSet", | ||||||
|  |                 "set_on_insert": "setOnInsert", | ||||||
|  |             } | ||||||
|  |             if op == "dec": | ||||||
|  |                 # Support decrement by flipping a positive value's sign | ||||||
|  |                 # and using 'inc' | ||||||
|  |                 value = -value | ||||||
|  |             # If the operator doesn't found from operator map, the op value | ||||||
|  |             # will stay unchanged | ||||||
|  |             op = operator_map.get(op, op) | ||||||
|  |  | ||||||
|  |         match = None | ||||||
|  |         if parts[-1] in COMPARISON_OPERATORS: | ||||||
|  |             match = parts.pop() | ||||||
|  |  | ||||||
|  |         # Allow to escape operator-like field name by __ | ||||||
|  |         if len(parts) > 1 and parts[-1] == "": | ||||||
|  |             parts.pop() | ||||||
|  |  | ||||||
|  |         if _doc_cls: | ||||||
|  |             # Switch field names to proper names [set in Field(name='foo')] | ||||||
|  |             try: | ||||||
|  |                 fields = _doc_cls._lookup_field(parts) | ||||||
|  |             except Exception as e: | ||||||
|  |                 raise InvalidQueryError(e) | ||||||
|  |             parts = [] | ||||||
|  |  | ||||||
|  |             cleaned_fields = [] | ||||||
|  |             appended_sub_field = False | ||||||
|  |             for field in fields: | ||||||
|  |                 append_field = True | ||||||
|  |                 if isinstance(field, str): | ||||||
|  |                     # Convert the S operator to $ | ||||||
|  |                     if field == "S": | ||||||
|  |                         field = "$" | ||||||
|  |                     parts.append(field) | ||||||
|  |                     append_field = False | ||||||
|  |                 else: | ||||||
|  |                     parts.append(field.db_field) | ||||||
|  |                 if append_field: | ||||||
|  |                     appended_sub_field = False | ||||||
|  |                     cleaned_fields.append(field) | ||||||
|  |                     if hasattr(field, "field"): | ||||||
|  |                         cleaned_fields.append(field.field) | ||||||
|  |                         appended_sub_field = True | ||||||
|  |  | ||||||
|  |             # Convert value to proper value | ||||||
|  |             if appended_sub_field: | ||||||
|  |                 field = cleaned_fields[-2] | ||||||
|  |             else: | ||||||
|  |                 field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|  |             GeoJsonBaseField = _import_class("GeoJsonBaseField") | ||||||
|  |             if isinstance(field, GeoJsonBaseField): | ||||||
|  |                 value = field.to_mongo(value) | ||||||
|  |  | ||||||
|  |             if op == "pull": | ||||||
|  |                 if field.required or value is not None: | ||||||
|  |                     if match in ("in", "nin") and not isinstance(value, dict): | ||||||
|  |                         value = _prepare_query_for_iterable(field, op, value) | ||||||
|  |                     else: | ||||||
|  |                         value = field.prepare_query_value(op, value) | ||||||
|  |             elif op == "push" and isinstance(value, (list, tuple, set)): | ||||||
|  |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |             elif op in (None, "set", "push"): | ||||||
|  |                 if field.required or value is not None: | ||||||
|  |                     value = field.prepare_query_value(op, value) | ||||||
|  |             elif op in ("pushAll", "pullAll"): | ||||||
|  |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |             elif op in ("addToSet", "setOnInsert"): | ||||||
|  |                 if isinstance(value, (list, tuple, set)): | ||||||
|  |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |                 elif field.required or value is not None: | ||||||
|  |                     value = field.prepare_query_value(op, value) | ||||||
|  |             elif op == "unset": | ||||||
|  |                 value = 1 | ||||||
|  |             elif op == "inc": | ||||||
|  |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|  |         if match: | ||||||
|  |             match = "$" + match | ||||||
|  |             value = {match: value} | ||||||
|  |  | ||||||
|  |         key = ".".join(parts) | ||||||
|  |  | ||||||
|  |         if "pull" in op and "." in key: | ||||||
|  |             # Dot operators don't work on pull operations | ||||||
|  |             # unless they point to a list field | ||||||
|  |             # Otherwise it uses nested dict syntax | ||||||
|  |             if op == "pullAll": | ||||||
|  |                 raise InvalidQueryError( | ||||||
|  |                     "pullAll operations only support a single field depth" | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |             # Look for the last list field and use dot notation until there | ||||||
|  |             field_classes = [c.__class__ for c in cleaned_fields] | ||||||
|  |             field_classes.reverse() | ||||||
|  |             ListField = _import_class("ListField") | ||||||
|  |             EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|  |             if ListField in field_classes or EmbeddedDocumentListField in field_classes: | ||||||
|  |                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField | ||||||
|  |                 # Then process as normal | ||||||
|  |                 if ListField in field_classes: | ||||||
|  |                     _check_field = ListField | ||||||
|  |                 else: | ||||||
|  |                     _check_field = EmbeddedDocumentListField | ||||||
|  |  | ||||||
|  |                 last_listField = len(cleaned_fields) - field_classes.index(_check_field) | ||||||
|  |                 key = ".".join(parts[:last_listField]) | ||||||
|  |                 parts = parts[last_listField:] | ||||||
|  |                 parts.insert(0, key) | ||||||
|  |  | ||||||
|  |             parts.reverse() | ||||||
|  |             for key in parts: | ||||||
|  |                 value = {key: value} | ||||||
|  |         elif op == "addToSet" and isinstance(value, list): | ||||||
|  |             value = {key: {"$each": value}} | ||||||
|  |         elif op in ("push", "pushAll"): | ||||||
|  |             if parts[-1].isdigit(): | ||||||
|  |                 key = ".".join(parts[0:-1]) | ||||||
|  |                 position = int(parts[-1]) | ||||||
|  |                 # $position expects an iterable. If pushing a single value, | ||||||
|  |                 # wrap it in a list. | ||||||
|  |                 if not isinstance(value, (set, tuple, list)): | ||||||
|  |                     value = [value] | ||||||
|  |                 value = {key: {"$each": value, "$position": position}} | ||||||
|  |             else: | ||||||
|  |                 if op == "pushAll": | ||||||
|  |                     op = "push"  # convert to non-deprecated keyword | ||||||
|  |                     if not isinstance(value, (set, tuple, list)): | ||||||
|  |                         value = [value] | ||||||
|  |                     value = {key: {"$each": value}} | ||||||
|  |                 else: | ||||||
|  |                     value = {key: value} | ||||||
|  |         else: | ||||||
|  |             value = {key: value} | ||||||
|  |         key = "$" + op | ||||||
|  |         if key not in mongo_update: | ||||||
|  |             mongo_update[key] = value | ||||||
|  |         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||||
|  |             mongo_update[key].update(value) | ||||||
|  |  | ||||||
|  |     return mongo_update | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _geo_operator(field, op, value): | ||||||
|  |     """Helper to return the query for a given geo query.""" | ||||||
|  |     if op == "max_distance": | ||||||
|  |         value = {"$maxDistance": value} | ||||||
|  |     elif op == "min_distance": | ||||||
|  |         value = {"$minDistance": value} | ||||||
|  |     elif field._geo_index == pymongo.GEO2D: | ||||||
|  |         if op == "within_distance": | ||||||
|  |             value = {"$within": {"$center": value}} | ||||||
|  |         elif op == "within_spherical_distance": | ||||||
|  |             value = {"$within": {"$centerSphere": value}} | ||||||
|  |         elif op == "within_polygon": | ||||||
|  |             value = {"$within": {"$polygon": value}} | ||||||
|  |         elif op == "near": | ||||||
|  |             value = {"$near": value} | ||||||
|  |         elif op == "near_sphere": | ||||||
|  |             value = {"$nearSphere": value} | ||||||
|  |         elif op == "within_box": | ||||||
|  |             value = {"$within": {"$box": value}} | ||||||
|  |         else: | ||||||
|  |             raise NotImplementedError( | ||||||
|  |                 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op | ||||||
|  |             ) | ||||||
|  |     else: | ||||||
|  |         if op == "geo_within": | ||||||
|  |             value = {"$geoWithin": _infer_geometry(value)} | ||||||
|  |         elif op == "geo_within_box": | ||||||
|  |             value = {"$geoWithin": {"$box": value}} | ||||||
|  |         elif op == "geo_within_polygon": | ||||||
|  |             value = {"$geoWithin": {"$polygon": value}} | ||||||
|  |         elif op == "geo_within_center": | ||||||
|  |             value = {"$geoWithin": {"$center": value}} | ||||||
|  |         elif op == "geo_within_sphere": | ||||||
|  |             value = {"$geoWithin": {"$centerSphere": value}} | ||||||
|  |         elif op == "geo_intersects": | ||||||
|  |             value = {"$geoIntersects": _infer_geometry(value)} | ||||||
|  |         elif op == "near": | ||||||
|  |             value = {"$near": _infer_geometry(value)} | ||||||
|  |         else: | ||||||
|  |             raise NotImplementedError( | ||||||
|  |                 'Geo method "{}" has not been implemented for a {} '.format( | ||||||
|  |                     op, field._name | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |     return value | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _infer_geometry(value): | ||||||
|  |     """Helper method that tries to infer the $geometry shape for a | ||||||
|  |     given value. | ||||||
|  |     """ | ||||||
|  |     if isinstance(value, dict): | ||||||
|  |         if "$geometry" in value: | ||||||
|  |             return value | ||||||
|  |         elif "coordinates" in value and "type" in value: | ||||||
|  |             return {"$geometry": value} | ||||||
|  |         raise InvalidQueryError( | ||||||
|  |             "Invalid $geometry dictionary should have type and coordinates keys" | ||||||
|  |         ) | ||||||
|  |     elif isinstance(value, (list, set)): | ||||||
|  |         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             value[0][0][0] | ||||||
|  |             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             value[0][0] | ||||||
|  |             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             value[0] | ||||||
|  |             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||||
|  |         except (TypeError, IndexError): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |     raise InvalidQueryError( | ||||||
|  |         "Invalid $geometry data. Can be either a " | ||||||
|  |         "dictionary or (nested) lists of coordinate(s)" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _prepare_query_for_iterable(field, op, value): | ||||||
|  |     # We need a special check for BaseDocument, because - although it's iterable - using | ||||||
|  |     # it as such in the context of this method is most definitely a mistake. | ||||||
|  |     BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|  |     if isinstance(value, BaseDocument): | ||||||
|  |         raise TypeError( | ||||||
|  |             "When using the `in`, `nin`, `all`, or " | ||||||
|  |             "`near`-operators you can't use a " | ||||||
|  |             "`Document`, you must wrap your object " | ||||||
|  |             "in a list (object -> [object])." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     if not hasattr(value, "__iter__"): | ||||||
|  |         raise TypeError( | ||||||
|  |             "The `in`, `nin`, `all`, or " | ||||||
|  |             "`near`-operators must be applied to an " | ||||||
|  |             "iterable (e.g. a list)." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     return [field.prepare_query_value(op, v) for v in value] | ||||||
							
								
								
									
										193
									
								
								mongoengine/queryset/visitor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										193
									
								
								mongoengine/queryset/visitor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,193 @@ | |||||||
|  | import copy | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine.errors import InvalidQueryError | ||||||
|  | from mongoengine.queryset import transform | ||||||
|  |  | ||||||
|  | __all__ = ("Q", "QNode") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def warn_empty_is_deprecated(): | ||||||
|  |     msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||||
|  |     warnings.warn(msg, DeprecationWarning, stacklevel=2) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QNodeVisitor: | ||||||
|  |     """Base visitor class for visiting Q-object nodes in a query tree. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def visit_combination(self, combination): | ||||||
|  |         """Called by QCombination objects. | ||||||
|  |         """ | ||||||
|  |         return combination | ||||||
|  |  | ||||||
|  |     def visit_query(self, query): | ||||||
|  |         """Called by (New)Q objects. | ||||||
|  |         """ | ||||||
|  |         return query | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DuplicateQueryConditionsError(InvalidQueryError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SimplificationVisitor(QNodeVisitor): | ||||||
|  |     """Simplifies query trees by combining unnecessary 'and' connection nodes | ||||||
|  |     into a single Q-object. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def visit_combination(self, combination): | ||||||
|  |         if combination.operation == combination.AND: | ||||||
|  |             # The simplification only applies to 'simple' queries | ||||||
|  |             if all(isinstance(node, Q) for node in combination.children): | ||||||
|  |                 queries = [n.query for n in combination.children] | ||||||
|  |                 try: | ||||||
|  |                     return Q(**self._query_conjunction(queries)) | ||||||
|  |                 except DuplicateQueryConditionsError: | ||||||
|  |                     # Cannot be simplified | ||||||
|  |                     pass | ||||||
|  |         return combination | ||||||
|  |  | ||||||
|  |     def _query_conjunction(self, queries): | ||||||
|  |         """Merges query dicts - effectively &ing them together. | ||||||
|  |         """ | ||||||
|  |         query_ops = set() | ||||||
|  |         combined_query = {} | ||||||
|  |         for query in queries: | ||||||
|  |             ops = set(query.keys()) | ||||||
|  |             # Make sure that the same operation isn't applied more than once | ||||||
|  |             # to a single field | ||||||
|  |             intersection = ops.intersection(query_ops) | ||||||
|  |             if intersection: | ||||||
|  |                 raise DuplicateQueryConditionsError() | ||||||
|  |  | ||||||
|  |             query_ops.update(ops) | ||||||
|  |             combined_query.update(copy.deepcopy(query)) | ||||||
|  |         return combined_query | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QueryCompilerVisitor(QNodeVisitor): | ||||||
|  |     """Compiles the nodes in a query tree to a PyMongo-compatible query | ||||||
|  |     dictionary. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, document): | ||||||
|  |         self.document = document | ||||||
|  |  | ||||||
|  |     def visit_combination(self, combination): | ||||||
|  |         operator = "$and" | ||||||
|  |         if combination.operation == combination.OR: | ||||||
|  |             operator = "$or" | ||||||
|  |         return {operator: combination.children} | ||||||
|  |  | ||||||
|  |     def visit_query(self, query): | ||||||
|  |         return transform.query(self.document, **query.query) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QNode: | ||||||
|  |     """Base class for nodes in query trees.""" | ||||||
|  |  | ||||||
|  |     AND = 0 | ||||||
|  |     OR = 1 | ||||||
|  |  | ||||||
|  |     def to_query(self, document): | ||||||
|  |         query = self.accept(SimplificationVisitor()) | ||||||
|  |         query = query.accept(QueryCompilerVisitor(document)) | ||||||
|  |         return query | ||||||
|  |  | ||||||
|  |     def accept(self, visitor): | ||||||
|  |         raise NotImplementedError | ||||||
|  |  | ||||||
|  |     def _combine(self, other, operation): | ||||||
|  |         """Combine this node with another node into a QCombination | ||||||
|  |         object. | ||||||
|  |         """ | ||||||
|  |         # If the other Q() is empty, ignore it and just use `self`. | ||||||
|  |         if not bool(other): | ||||||
|  |             return self | ||||||
|  |  | ||||||
|  |         # Or if this Q is empty, ignore it and just use `other`. | ||||||
|  |         if not bool(self): | ||||||
|  |             return other | ||||||
|  |  | ||||||
|  |         return QCombination(operation, [self, other]) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|  |         return False | ||||||
|  |  | ||||||
|  |     def __or__(self, other): | ||||||
|  |         return self._combine(other, self.OR) | ||||||
|  |  | ||||||
|  |     def __and__(self, other): | ||||||
|  |         return self._combine(other, self.AND) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QCombination(QNode): | ||||||
|  |     """Represents the combination of several conditions by a given | ||||||
|  |     logical operator. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, operation, children): | ||||||
|  |         self.operation = operation | ||||||
|  |         self.children = [] | ||||||
|  |         for node in children: | ||||||
|  |             # If the child is a combination of the same type, we can merge its | ||||||
|  |             # children directly into this combinations children | ||||||
|  |             if isinstance(node, QCombination) and node.operation == operation: | ||||||
|  |                 self.children += node.children | ||||||
|  |             else: | ||||||
|  |                 self.children.append(node) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         op = " & " if self.operation is self.AND else " | " | ||||||
|  |         return "(%s)" % op.join([repr(node) for node in self.children]) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.children) | ||||||
|  |  | ||||||
|  |     def accept(self, visitor): | ||||||
|  |         for i in range(len(self.children)): | ||||||
|  |             if isinstance(self.children[i], QNode): | ||||||
|  |                 self.children[i] = self.children[i].accept(visitor) | ||||||
|  |  | ||||||
|  |         return visitor.visit_combination(self) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|  |         return not bool(self.children) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return ( | ||||||
|  |             self.__class__ == other.__class__ | ||||||
|  |             and self.operation == other.operation | ||||||
|  |             and self.children == other.children | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Q(QNode): | ||||||
|  |     """A simple query object, used in a query tree to build up more complex | ||||||
|  |     query structures. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, **query): | ||||||
|  |         self.query = query | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return "Q(**%s)" % repr(self.query) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.query) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return self.__class__ == other.__class__ and self.query == other.query | ||||||
|  |  | ||||||
|  |     def accept(self, visitor): | ||||||
|  |         return visitor.visit_query(self) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|  |         return not bool(self.query) | ||||||
| @@ -1,18 +1,25 @@ | |||||||
| # -*- coding: utf-8 -*- | __all__ = ( | ||||||
|  |     "pre_init", | ||||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', |     "post_init", | ||||||
|            'pre_delete', 'post_delete'] |     "pre_save", | ||||||
|  |     "pre_save_post_validation", | ||||||
|  |     "post_save", | ||||||
|  |     "pre_delete", | ||||||
|  |     "post_delete", | ||||||
|  | ) | ||||||
|  |  | ||||||
| signals_available = False | signals_available = False | ||||||
| try: | try: | ||||||
|     from blinker import Namespace |     from blinker import Namespace | ||||||
|  |  | ||||||
|     signals_available = True |     signals_available = True | ||||||
| except ImportError: | except ImportError: | ||||||
|     class Namespace(object): |  | ||||||
|  |     class Namespace: | ||||||
|         def signal(self, name, doc=None): |         def signal(self, name, doc=None): | ||||||
|             return _FakeSignal(name, doc) |             return _FakeSignal(name, doc) | ||||||
|  |  | ||||||
|     class _FakeSignal(object): |     class _FakeSignal: | ||||||
|         """If blinker is unavailable, create a fake class with the same |         """If blinker is unavailable, create a fake class with the same | ||||||
|         interface that allows sending of signals but will fail with an |         interface that allows sending of signals but will fail with an | ||||||
|         error on anything else.  Instead of doing anything on send, it |         error on anything else.  Instead of doing anything on send, it | ||||||
| @@ -24,23 +31,29 @@ except ImportError: | |||||||
|             self.__doc__ = doc |             self.__doc__ = doc | ||||||
|  |  | ||||||
|         def _fail(self, *args, **kwargs): |         def _fail(self, *args, **kwargs): | ||||||
|             raise RuntimeError('signalling support is unavailable ' |             raise RuntimeError( | ||||||
|                                'because the blinker library is ' |                 "signalling support is unavailable " | ||||||
|                                'not installed.') |                 "because the blinker library is " | ||||||
|         send = lambda *a, **kw: None |                 "not installed." | ||||||
|         connect = disconnect = has_receivers_for = receivers_for = \ |             ) | ||||||
|             temporarily_connected_to = _fail |  | ||||||
|  |         send = lambda *a, **kw: None  # noqa | ||||||
|  |         connect = ( | ||||||
|  |             disconnect | ||||||
|  |         ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail | ||||||
|         del _fail |         del _fail | ||||||
|  |  | ||||||
|  |  | ||||||
| # the namespace for code signals.  If you are not mongoengine code, do | # the namespace for code signals.  If you are not mongoengine code, do | ||||||
| # not put signals in here.  Create your own namespace instead. | # not put signals in here.  Create your own namespace instead. | ||||||
| _signals = Namespace() | _signals = Namespace() | ||||||
|  |  | ||||||
| pre_init = _signals.signal('pre_init') | pre_init = _signals.signal("pre_init") | ||||||
| post_init = _signals.signal('post_init') | post_init = _signals.signal("post_init") | ||||||
| pre_save = _signals.signal('pre_save') | pre_save = _signals.signal("pre_save") | ||||||
| post_save = _signals.signal('post_save') | pre_save_post_validation = _signals.signal("pre_save_post_validation") | ||||||
| pre_delete = _signals.signal('pre_delete') | post_save = _signals.signal("post_save") | ||||||
| post_delete = _signals.signal('post_delete') | pre_delete = _signals.signal("pre_delete") | ||||||
| pre_bulk_insert = _signals.signal('pre_bulk_insert') | post_delete = _signals.signal("post_delete") | ||||||
| post_bulk_insert = _signals.signal('post_bulk_insert') | pre_bulk_insert = _signals.signal("pre_bulk_insert") | ||||||
|  | post_bulk_insert = _signals.signal("post_bulk_insert") | ||||||
|   | |||||||
| @@ -1,59 +0,0 @@ | |||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): |  | ||||||
|     """ Query_counter contextmanager to get the number of queries. """ |  | ||||||
|  |  | ||||||
|     def __init__(self): |  | ||||||
|         """ Construct the query_counter. """ |  | ||||||
|         self.counter = 0 |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def __enter__(self): |  | ||||||
|         """ On every with block we need to drop the profile collection. """ |  | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|         self.db.system.profile.drop() |  | ||||||
|         self.db.set_profiling_level(2) |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |  | ||||||
|         """ Reset the profiling level. """ |  | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|  |  | ||||||
|     def __eq__(self, value): |  | ||||||
|         """ == Compare querycounter. """ |  | ||||||
|         return value == self._get_count() |  | ||||||
|  |  | ||||||
|     def __ne__(self, value): |  | ||||||
|         """ != Compare querycounter. """ |  | ||||||
|         return not self.__eq__(value) |  | ||||||
|  |  | ||||||
|     def __lt__(self, value): |  | ||||||
|         """ < Compare querycounter. """ |  | ||||||
|         return self._get_count() < value |  | ||||||
|  |  | ||||||
|     def __le__(self, value): |  | ||||||
|         """ <= Compare querycounter. """ |  | ||||||
|         return self._get_count() <= value |  | ||||||
|  |  | ||||||
|     def __gt__(self, value): |  | ||||||
|         """ > Compare querycounter. """ |  | ||||||
|         return self._get_count() > value |  | ||||||
|  |  | ||||||
|     def __ge__(self, value): |  | ||||||
|         """ >= Compare querycounter. """ |  | ||||||
|         return self._get_count() >= value |  | ||||||
|  |  | ||||||
|     def __int__(self): |  | ||||||
|         """ int representation. """ |  | ||||||
|         return self._get_count() |  | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         """ repr query_counter as the number of queries. """ |  | ||||||
|         return u"%s" % self._get_count() |  | ||||||
|  |  | ||||||
|     def _get_count(self): |  | ||||||
|         """ Get the number of queries. """ |  | ||||||
|         count = self.db.system.profile.find().count() - self.counter |  | ||||||
|         self.counter += 1 |  | ||||||
|         return count |  | ||||||
| @@ -5,7 +5,7 @@ | |||||||
| %define srcname mongoengine | %define srcname mongoengine | ||||||
|  |  | ||||||
| Name:           python-%{srcname} | Name:           python-%{srcname} | ||||||
| Version:        0.6.1 | Version:        0.8.7 | ||||||
| Release:        1%{?dist} | Release:        1%{?dist} | ||||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | Summary:        A Python Document-Object Mapper for working with MongoDB | ||||||
|  |  | ||||||
| @@ -51,12 +51,4 @@ rm -rf $RPM_BUILD_ROOT | |||||||
| # %{python_sitearch}/* | # %{python_sitearch}/* | ||||||
|  |  | ||||||
| %changelog | %changelog | ||||||
| * Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6 | * See: http://docs.mongoengine.org/en/latest/changelog.html | ||||||
| - 0.6 released |  | ||||||
| * Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1 |  | ||||||
| - Update to latest dev version |  | ||||||
| - Add PIL dependency for ImageField |  | ||||||
| * Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1 |  | ||||||
| - Update version |  | ||||||
| * Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1 |  | ||||||
| - Initial version |  | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | black | ||||||
|  | flake8 | ||||||
|  | flake8-import-order | ||||||
|  | pre-commit | ||||||
|  | pytest | ||||||
|  | ipdb | ||||||
|  | ipython | ||||||
|  | tox | ||||||
							
								
								
									
										10
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | |||||||
|  | [flake8] | ||||||
|  | ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 | ||||||
|  | exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||||
|  | max-complexity=47 | ||||||
|  | application-import-names=mongoengine,tests | ||||||
|  |  | ||||||
|  | [tool:pytest] | ||||||
|  | # Limits the discovery to tests directory | ||||||
|  | # avoids that it runs for instance the benchmark | ||||||
|  | testpaths = tests | ||||||
							
								
								
									
										158
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										158
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,53 +1,147 @@ | |||||||
| from setuptools import setup, find_packages |  | ||||||
| import os | import os | ||||||
|  | import sys | ||||||
|  |  | ||||||
| DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB" | from pkg_resources import normalize_path | ||||||
|  | from setuptools import find_packages, setup | ||||||
|  | from setuptools.command.test import test as TestCommand | ||||||
|  |  | ||||||
| LONG_DESCRIPTION = None | # Hack to silence atexit traceback in newer python versions | ||||||
| try: | try: | ||||||
|     LONG_DESCRIPTION = open('README.rst').read() |     import multiprocessing | ||||||
| except: | except ImportError: | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  | DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     with open("README.rst") as fin: | ||||||
|  |         LONG_DESCRIPTION = fin.read() | ||||||
|  | except Exception: | ||||||
|  |     LONG_DESCRIPTION = None | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(version_tuple): | def get_version(version_tuple): | ||||||
|     version = '%s.%s' % (version_tuple[0], version_tuple[1]) |     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||||
|     if version_tuple[2]: |     return '0.10.7'. | ||||||
|         version = '%s.%s' % (version, version_tuple[2]) |     """ | ||||||
|     return version |     return ".".join(map(str, version_tuple)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PyTest(TestCommand): | ||||||
|  |     """Will force pytest to search for tests inside the build directory | ||||||
|  |     for 2to3 converted code (used by tox), instead of the current directory. | ||||||
|  |     Required as long as we need 2to3 | ||||||
|  |  | ||||||
|  |     Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations | ||||||
|  |     Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands | ||||||
|  |     # Allows to provide pytest command argument through the test runner command `python setup.py test` | ||||||
|  |     # e.g: `python setup.py test -a "-k=test"` | ||||||
|  |     # This only works for 1 argument though | ||||||
|  |     user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] | ||||||
|  |  | ||||||
|  |     def initialize_options(self): | ||||||
|  |         TestCommand.initialize_options(self) | ||||||
|  |         self.pytest_args = "" | ||||||
|  |  | ||||||
|  |     def finalize_options(self): | ||||||
|  |         TestCommand.finalize_options(self) | ||||||
|  |         self.test_args = ["tests"] | ||||||
|  |         self.test_suite = True | ||||||
|  |  | ||||||
|  |     def run_tests(self): | ||||||
|  |         # import here, cause outside the eggs aren't loaded | ||||||
|  |         from pkg_resources import _namespace_packages | ||||||
|  |         import pytest | ||||||
|  |  | ||||||
|  |         # Purge modules under test from sys.modules. The test loader will | ||||||
|  |         # re-import them from the build location. Required when 2to3 is used | ||||||
|  |         # with namespace packages. | ||||||
|  |         if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): | ||||||
|  |             module = self.test_args[-1].split(".")[0] | ||||||
|  |             if module in _namespace_packages: | ||||||
|  |                 del_modules = [] | ||||||
|  |                 if module in sys.modules: | ||||||
|  |                     del_modules.append(module) | ||||||
|  |                 module += "." | ||||||
|  |                 for name in sys.modules: | ||||||
|  |                     if name.startswith(module): | ||||||
|  |                         del_modules.append(name) | ||||||
|  |                 map(sys.modules.__delitem__, del_modules) | ||||||
|  |  | ||||||
|  |             # Run on the build directory for 2to3-built code | ||||||
|  |             # This will prevent the old 2.x code from being found | ||||||
|  |             # by py.test discovery mechanism, that apparently | ||||||
|  |             # ignores sys.path.. | ||||||
|  |             ei_cmd = self.get_finalized_command("egg_info") | ||||||
|  |             self.test_args = [normalize_path(ei_cmd.egg_base)] | ||||||
|  |  | ||||||
|  |         cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) | ||||||
|  |         errno = pytest.main(cmd_args) | ||||||
|  |         sys.exit(errno) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") | ||||||
| version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0] | version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] | ||||||
| VERSION = get_version(eval(version_line.split('=')[-1])) |  | ||||||
| print VERSION | VERSION = get_version(eval(version_line.split("=")[-1])) | ||||||
|  |  | ||||||
| CLASSIFIERS = [ | CLASSIFIERS = [ | ||||||
|     'Development Status :: 4 - Beta', |     "Development Status :: 5 - Production/Stable", | ||||||
|     'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|     'License :: OSI Approved :: MIT License', |     "License :: OSI Approved :: MIT License", | ||||||
|     'Operating System :: OS Independent', |     "Operating System :: OS Independent", | ||||||
|     'Programming Language :: Python', |     "Programming Language :: Python", | ||||||
|     'Topic :: Database', |     "Programming Language :: Python :: 3", | ||||||
|     'Topic :: Software Development :: Libraries :: Python Modules', |     "Programming Language :: Python :: 3.5", | ||||||
|  |     "Programming Language :: Python :: 3.6", | ||||||
|  |     "Programming Language :: Python :: 3.7", | ||||||
|  |     "Programming Language :: Python :: 3.8", | ||||||
|  |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|  |     "Programming Language :: Python :: Implementation :: PyPy", | ||||||
|  |     "Topic :: Database", | ||||||
|  |     "Topic :: Software Development :: Libraries :: Python Modules", | ||||||
| ] | ] | ||||||
|  |  | ||||||
| setup(name='mongoengine', | extra_opts = { | ||||||
|  |     "packages": find_packages(exclude=["tests", "tests.*"]), | ||||||
|  |     "tests_require": [ | ||||||
|  |         "pytest<5.0", | ||||||
|  |         "pytest-cov", | ||||||
|  |         "coverage<5.0",  # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls | ||||||
|  |         "blinker", | ||||||
|  |         "Pillow>=2.0.0, <7.0.0",  # 7.0.0 dropped Python2 support | ||||||
|  |     ], | ||||||
|  | } | ||||||
|  |  | ||||||
|  | if "test" in sys.argv: | ||||||
|  |     extra_opts["packages"] = find_packages() | ||||||
|  |     extra_opts["package_data"] = { | ||||||
|  |         "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | setup( | ||||||
|  |     name="mongoengine", | ||||||
|     version=VERSION, |     version=VERSION, | ||||||
|       packages=find_packages(), |     author="Harry Marr", | ||||||
|       author='Harry Marr', |     author_email="harry.marr@gmail.com", | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |     maintainer="Stefan Wojcik", | ||||||
|       maintainer="Ross Lawley", |     maintainer_email="wojcikstefan@gmail.com", | ||||||
|       maintainer_email="ross.lawley@{nospam}gmail.com", |     url="http://mongoengine.org/", | ||||||
|       url='http://mongoengine.org/', |     download_url="https://github.com/MongoEngine/mongoengine/tarball/master", | ||||||
|       license='MIT', |     license="MIT", | ||||||
|     include_package_data=True, |     include_package_data=True, | ||||||
|     description=DESCRIPTION, |     description=DESCRIPTION, | ||||||
|     long_description=LONG_DESCRIPTION, |     long_description=LONG_DESCRIPTION, | ||||||
|       platforms=['any'], |     platforms=["any"], | ||||||
|     classifiers=CLASSIFIERS, |     classifiers=CLASSIFIERS, | ||||||
|       install_requires=['pymongo'], |     python_requires=">=3.5", | ||||||
|       test_suite='tests', |     install_requires=["pymongo>=3.4, <4.0"], | ||||||
|       tests_require=['blinker', 'django>=1.3', 'PIL'] |     cmdclass={"test": PyTest}, | ||||||
|  |     **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | """ | ||||||
|  | This test has been put into a module.  This is because it tests warnings that | ||||||
|  | only get triggered on first hit.  This way we can ensure its imported into the | ||||||
|  | top level and called first by the test suite. | ||||||
|  | """ | ||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestAllWarnings(unittest.TestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db="mongoenginetest") | ||||||
|  |         self.warning_list = [] | ||||||
|  |         self.showwarning_default = warnings.showwarning | ||||||
|  |         warnings.showwarning = self.append_to_warning_list | ||||||
|  |  | ||||||
|  |     def append_to_warning_list(self, message, category, *args): | ||||||
|  |         self.warning_list.append({"message": message, "category": category}) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         # restore default handling of warnings | ||||||
|  |         warnings.showwarning = self.showwarning_default | ||||||
|  |  | ||||||
|  |     def test_document_collection_syntax_warning(self): | ||||||
|  |         class NonAbstractBase(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class InheritedDocumentFailTest(NonAbstractBase): | ||||||
|  |             meta = {"collection": "fail"} | ||||||
|  |  | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         assert SyntaxWarning == warning["category"] | ||||||
|  |         assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() | ||||||
| @@ -1,70 +0,0 @@ | |||||||
| import unittest |  | ||||||
| import pymongo |  | ||||||
|  |  | ||||||
| import mongoengine.connection |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db, get_connection, ConnectionError |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         mongoengine.connection._connection_settings = {} |  | ||||||
|         mongoengine.connection._connections = {} |  | ||||||
|         mongoengine.connection._dbs = {} |  | ||||||
|  |  | ||||||
|     def test_connect(self): |  | ||||||
|         """Ensure that the connect() method works properly. |  | ||||||
|         """ |  | ||||||
|         connect('mongoenginetest') |  | ||||||
|  |  | ||||||
|         conn = get_connection() |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db() |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |  | ||||||
|  |  | ||||||
|         connect('mongoenginetest2', alias='testdb') |  | ||||||
|         conn = get_connection('testdb') |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|     def test_connect_uri(self): |  | ||||||
|         """Ensure that the connect() method works properly with uri's |  | ||||||
|         """ |  | ||||||
|         c = connect(db='mongoenginetest', alias='admin') |  | ||||||
|         c.admin.system.users.remove({}) |  | ||||||
|         c.mongoenginetest.system.users.remove({}) |  | ||||||
|  |  | ||||||
|         c.admin.add_user("admin", "password") |  | ||||||
|         c.admin.authenticate("admin", "password") |  | ||||||
|         c.mongoenginetest.add_user("username", "password") |  | ||||||
|  |  | ||||||
|         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') |  | ||||||
|  |  | ||||||
|         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') |  | ||||||
|  |  | ||||||
|         conn = get_connection() |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db() |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |  | ||||||
|  |  | ||||||
|     def test_register_connection(self): |  | ||||||
|         """Ensure that connections with different aliases may be registered. |  | ||||||
|         """ |  | ||||||
|         register_connection('testdb', 'mongoenginetest2') |  | ||||||
|  |  | ||||||
|         self.assertRaises(ConnectionError, get_connection) |  | ||||||
|         conn = get_connection('testdb') |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db('testdb') |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest2') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @@ -1,812 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
| from mongoengine.tests import query_counter |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FieldTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def test_list_item_dereference(self): |  | ||||||
|         """Ensure that DBRef items in ListFields are dereferenced. |  | ||||||
|         """ |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField(ReferenceField(User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             user = User(name='user %s' % i) |  | ||||||
|             user.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=User.objects) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=User.objects) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_recursive_reference(self): |  | ||||||
|         """Ensure that ReferenceFields can reference their own documents. |  | ||||||
|         """ |  | ||||||
|         class Employee(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             boss = ReferenceField('self') |  | ||||||
|             friends = ListField(ReferenceField('self')) |  | ||||||
|  |  | ||||||
|         Employee.drop_collection() |  | ||||||
|  |  | ||||||
|         bill = Employee(name='Bill Lumbergh') |  | ||||||
|         bill.save() |  | ||||||
|  |  | ||||||
|         michael = Employee(name='Michael Bolton') |  | ||||||
|         michael.save() |  | ||||||
|  |  | ||||||
|         samir = Employee(name='Samir Nagheenanajar') |  | ||||||
|         samir.save() |  | ||||||
|  |  | ||||||
|         friends = [michael, samir] |  | ||||||
|         peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) |  | ||||||
|         peter.save() |  | ||||||
|  |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             peter = Employee.objects.with_id(peter.id) |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             peter.boss |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             peter.friends |  | ||||||
|             self.assertEqual(q, 3) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             peter = Employee.objects.with_id(peter.id).select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             self.assertEquals(peter.boss, bill) |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             self.assertEquals(peter.friends, friends) |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             employees = Employee.objects(boss=bill).select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for employee in employees: |  | ||||||
|                 self.assertEquals(employee.boss, bill) |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 self.assertEquals(employee.friends, friends) |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|     def test_circular_reference(self): |  | ||||||
|         """Ensure you can handle circular references |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             relations = ListField(EmbeddedDocumentField('Relation')) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         class Relation(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             person = ReferenceField('Person') |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         mother = Person(name="Mother") |  | ||||||
|         daughter = Person(name="Daughter") |  | ||||||
|  |  | ||||||
|         mother.save() |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         daughter_rel = Relation(name="Daughter", person=daughter) |  | ||||||
|         mother.relations.append(daughter_rel) |  | ||||||
|         mother.save() |  | ||||||
|  |  | ||||||
|         mother_rel = Relation(name="Daughter", person=mother) |  | ||||||
|         self_rel = Relation(name="Self", person=daughter) |  | ||||||
|         daughter.relations.append(mother_rel) |  | ||||||
|         daughter.relations.append(self_rel) |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |  | ||||||
|  |  | ||||||
|     def test_circular_reference_on_self(self): |  | ||||||
|         """Ensure you can handle circular references |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             relations = ListField(ReferenceField('self')) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         mother = Person(name="Mother") |  | ||||||
|         daughter = Person(name="Daughter") |  | ||||||
|  |  | ||||||
|         mother.save() |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         mother.relations.append(daughter) |  | ||||||
|         mother.save() |  | ||||||
|  |  | ||||||
|         daughter.relations.append(mother) |  | ||||||
|         daughter.relations.append(daughter) |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |  | ||||||
|  |  | ||||||
|     def test_circular_tree_reference(self): |  | ||||||
|         """Ensure you can handle circular references with more than one level |  | ||||||
|         """ |  | ||||||
|         class Other(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             friends = ListField(ReferenceField('Person')) |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             other = EmbeddedDocumentField(Other, default=lambda: Other()) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         paul = Person(name="Paul") |  | ||||||
|         paul.save() |  | ||||||
|         maria = Person(name="Maria") |  | ||||||
|         maria.save() |  | ||||||
|         julia = Person(name='Julia') |  | ||||||
|         julia.save() |  | ||||||
|         anna = Person(name='Anna') |  | ||||||
|         anna.save() |  | ||||||
|  |  | ||||||
|         paul.other.friends = [maria, julia, anna] |  | ||||||
|         paul.other.name = "Paul's friends" |  | ||||||
|         paul.save() |  | ||||||
|  |  | ||||||
|         maria.other.friends = [paul, julia, anna] |  | ||||||
|         maria.other.name = "Maria's friends" |  | ||||||
|         maria.save() |  | ||||||
|  |  | ||||||
|         julia.other.friends = [paul, maria, anna] |  | ||||||
|         julia.other.name = "Julia's friends" |  | ||||||
|         julia.save() |  | ||||||
|  |  | ||||||
|         anna.other.friends = [paul, maria, julia] |  | ||||||
|         anna.other.name = "Anna's friends" |  | ||||||
|         anna.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals( |  | ||||||
|             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", |  | ||||||
|             "%s" % Person.objects() |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_generic_reference(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_list_field_complex(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_map_field_reference(self): |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = MapField(ReferenceField(User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             user = User(name='user %s' % i) |  | ||||||
|             user.save() |  | ||||||
|             members.append(user) |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|        # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_dict_field(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = DictField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         Group.objects.delete() |  | ||||||
|         Group().save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|             self.assertEqual(group_obj.members, {}) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_dict_field_no_field_inheritance(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = DictField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             members += [a] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_generic_reference_map_field(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = MapField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         Group.objects.delete() |  | ||||||
|         Group().save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_multidirectional_lists(self): |  | ||||||
|  |  | ||||||
|         class Asset(Document): |  | ||||||
|             name = StringField(max_length=250, required=True) |  | ||||||
|             parent = GenericReferenceField(default=None) |  | ||||||
|             parents = ListField(GenericReferenceField()) |  | ||||||
|             children = ListField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         Asset.drop_collection() |  | ||||||
|  |  | ||||||
|         root = Asset(name='', path="/", title="Site Root") |  | ||||||
|         root.save() |  | ||||||
|  |  | ||||||
|         company = Asset(name='company', title='Company', parent=root, parents=[root]) |  | ||||||
|         company.save() |  | ||||||
|  |  | ||||||
|         root.children = [company] |  | ||||||
|         root.save() |  | ||||||
|  |  | ||||||
|         root = root.reload() |  | ||||||
|         self.assertEquals(root.children, [company]) |  | ||||||
|         self.assertEquals(company.parents, [root]) |  | ||||||
|  |  | ||||||
|     def test_dict_in_dbref_instance(self): |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField(max_length=250, required=True) |  | ||||||
|  |  | ||||||
|         class Room(Document): |  | ||||||
|             number = StringField(max_length=250, required=True) |  | ||||||
|             staffs_with_position = ListField(DictField()) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         Room.drop_collection() |  | ||||||
|  |  | ||||||
|         bob = Person.objects.create(name='Bob') |  | ||||||
|         bob.save() |  | ||||||
|         sarah = Person.objects.create(name='Sarah') |  | ||||||
|         sarah.save() |  | ||||||
|  |  | ||||||
|         room_101 = Room.objects.create(number="101") |  | ||||||
|         room_101.staffs_with_position = [ |  | ||||||
|             {'position_key': 'window', 'staff': sarah}, |  | ||||||
|             {'position_key': 'door', 'staff': bob.to_dbref()}] |  | ||||||
|         room_101.save() |  | ||||||
|  |  | ||||||
|         room = Room.objects.first().select_related() |  | ||||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) |  | ||||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) |  | ||||||
| @@ -1,90 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.django.shortcuts import get_document_or_404 |  | ||||||
|  |  | ||||||
| from django.http import Http404 |  | ||||||
| from django.template import Context, Template |  | ||||||
| from django.conf import settings |  | ||||||
| from django.core.paginator import Paginator |  | ||||||
|  |  | ||||||
| settings.configure() |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_order_by_in_django_template(self): |  | ||||||
|         """Ensure that QuerySets are properly ordered in Django template. |  | ||||||
|         """ |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person(name="A", age=20).save() |  | ||||||
|         self.Person(name="D", age=10).save() |  | ||||||
|         self.Person(name="B", age=40).save() |  | ||||||
|         self.Person(name="C", age=30).save() |  | ||||||
|  |  | ||||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") |  | ||||||
|  |  | ||||||
|         d = {"ol": self.Person.objects.order_by('-name')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('+name')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('-age')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('+age')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') |  | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_q_object_filter_in_template(self): |  | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person(name="A", age=20).save() |  | ||||||
|         self.Person(name="D", age=10).save() |  | ||||||
|         self.Person(name="B", age=40).save() |  | ||||||
|         self.Person(name="C", age=30).save() |  | ||||||
|  |  | ||||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") |  | ||||||
|  |  | ||||||
|         d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} |  | ||||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') |  | ||||||
|  |  | ||||||
|         # Check double rendering doesn't throw an error |  | ||||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') |  | ||||||
|  |  | ||||||
|     def test_get_document_or_404(self): |  | ||||||
|         p = self.Person(name="G404") |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') |  | ||||||
|         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) |  | ||||||
|  |  | ||||||
|     def test_pagination(self): |  | ||||||
|         """Ensure that Pagination works as expected |  | ||||||
|         """ |  | ||||||
|         class Page(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Page.drop_collection() |  | ||||||
|  |  | ||||||
|         for i in xrange(1, 11): |  | ||||||
|             Page(name=str(i)).save() |  | ||||||
|  |  | ||||||
|         paginator = Paginator(Page.objects.all(), 2) |  | ||||||
|  |  | ||||||
|         t = Template("{% for i in page.object_list  %}{{ i.name }}:{% endfor %}") |  | ||||||
|         for p in paginator.page_range: |  | ||||||
|             d = {"page": paginator.page(p)} |  | ||||||
|             end = p * 2 |  | ||||||
|             start = end - 1 |  | ||||||
|             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) |  | ||||||
							
								
								
									
										2842
									
								
								tests/document.py
									
									
									
									
									
								
							
							
						
						
									
										2842
									
								
								tests/document.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										0
									
								
								tests/document/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								tests/document/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										356
									
								
								tests/document/test_class_methods.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										356
									
								
								tests/document/test_class_methods.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,356 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from mongoengine.queryset import NULLIFY, PULL | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestClassMethods(unittest.TestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db="mongoenginetest") | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             non_field = True | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         for collection in list_collection_names(self.db): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_definition(self): | ||||||
|  |         """Ensure that document may be defined using fields. | ||||||
|  |         """ | ||||||
|  |         assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys()) | ||||||
|  |         assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted( | ||||||
|  |             [x.__class__.__name__ for x in self.Person._fields.values()] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_get_db(self): | ||||||
|  |         """Ensure that get_db returns the expected db. | ||||||
|  |         """ | ||||||
|  |         db = self.Person._get_db() | ||||||
|  |         assert self.db == db | ||||||
|  |  | ||||||
|  |     def test_get_collection_name(self): | ||||||
|  |         """Ensure that get_collection_name returns the expected collection | ||||||
|  |         name. | ||||||
|  |         """ | ||||||
|  |         collection_name = "person" | ||||||
|  |         assert collection_name == self.Person._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_get_collection(self): | ||||||
|  |         """Ensure that get_collection returns the expected collection. | ||||||
|  |         """ | ||||||
|  |         collection_name = "person" | ||||||
|  |         collection = self.Person._get_collection() | ||||||
|  |         assert self.db[collection_name] == collection | ||||||
|  |  | ||||||
|  |     def test_drop_collection(self): | ||||||
|  |         """Ensure that the collection may be dropped from the database. | ||||||
|  |         """ | ||||||
|  |         collection_name = "person" | ||||||
|  |         self.Person(name="Test").save() | ||||||
|  |         assert collection_name in list_collection_names(self.db) | ||||||
|  |  | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |         assert collection_name not in list_collection_names(self.db) | ||||||
|  |  | ||||||
|  |     def test_register_delete_rule(self): | ||||||
|  |         """Ensure that register delete rule adds a delete rule to the document | ||||||
|  |         meta. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Job(Document): | ||||||
|  |             employee = ReferenceField(self.Person) | ||||||
|  |  | ||||||
|  |         assert self.Person._meta.get("delete_rules") is None | ||||||
|  |  | ||||||
|  |         self.Person.register_delete_rule(Job, "employee", NULLIFY) | ||||||
|  |         assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} | ||||||
|  |  | ||||||
|  |     def test_compare_indexes(self): | ||||||
|  |         """ Ensure that the indexes are properly created and that | ||||||
|  |         compare_indexes identifies the missing/extra indexes | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             author = StringField() | ||||||
|  |             title = StringField() | ||||||
|  |             description = StringField() | ||||||
|  |             tags = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "title")]} | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         BlogPost.ensure_indexes() | ||||||
|  |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |  | ||||||
|  |         BlogPost.ensure_index(["author", "description"]) | ||||||
|  |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [], | ||||||
|  |             "extra": [[("author", 1), ("description", 1)]], | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         BlogPost._get_collection().drop_index("author_1_description_1") | ||||||
|  |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |  | ||||||
|  |         BlogPost._get_collection().drop_index("author_1_title_1") | ||||||
|  |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [[("author", 1), ("title", 1)]], | ||||||
|  |             "extra": [], | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_compare_indexes_inheritance(self): | ||||||
|  |         """ Ensure that the indexes are properly created and that | ||||||
|  |         compare_indexes identifies the missing/extra indexes for subclassed | ||||||
|  |         documents (_cls included) | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             author = StringField() | ||||||
|  |             title = StringField() | ||||||
|  |             description = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class BlogPostWithTags(BlogPost): | ||||||
|  |             tags = StringField() | ||||||
|  |             tag_list = ListField(StringField()) | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "tags")]} | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         BlogPost.ensure_indexes() | ||||||
|  |         BlogPostWithTags.ensure_indexes() | ||||||
|  |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |  | ||||||
|  |         BlogPostWithTags.ensure_index(["author", "tag_list"]) | ||||||
|  |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [], | ||||||
|  |             "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") | ||||||
|  |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |  | ||||||
|  |         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") | ||||||
|  |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], | ||||||
|  |             "extra": [], | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_compare_indexes_multiple_subclasses(self): | ||||||
|  |         """ Ensure that compare_indexes behaves correctly if called from a | ||||||
|  |         class, which base class has multiple subclasses | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             author = StringField() | ||||||
|  |             title = StringField() | ||||||
|  |             description = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class BlogPostWithTags(BlogPost): | ||||||
|  |             tags = StringField() | ||||||
|  |             tag_list = ListField(StringField()) | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "tags")]} | ||||||
|  |  | ||||||
|  |         class BlogPostWithCustomField(BlogPost): | ||||||
|  |             custom = DictField() | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "custom")]} | ||||||
|  |  | ||||||
|  |         BlogPost.ensure_indexes() | ||||||
|  |         BlogPostWithTags.ensure_indexes() | ||||||
|  |         BlogPostWithCustomField.ensure_indexes() | ||||||
|  |  | ||||||
|  |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |         assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |         assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []} | ||||||
|  |  | ||||||
|  |     def test_compare_indexes_for_text_indexes(self): | ||||||
|  |         """ Ensure that compare_indexes behaves correctly for text indexes """ | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             a = StringField() | ||||||
|  |             b = StringField() | ||||||
|  |             meta = { | ||||||
|  |                 "indexes": [ | ||||||
|  |                     { | ||||||
|  |                         "fields": ["$a", "$b"], | ||||||
|  |                         "default_language": "english", | ||||||
|  |                         "weights": {"a": 10, "b": 2}, | ||||||
|  |                     } | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         Doc.ensure_indexes() | ||||||
|  |         actual = Doc.compare_indexes() | ||||||
|  |         expected = {"missing": [], "extra": []} | ||||||
|  |         assert actual == expected | ||||||
|  |  | ||||||
|  |     def test_list_indexes_inheritance(self): | ||||||
|  |         """ ensure that all of the indexes are listed regardless of the super- | ||||||
|  |         or sub-class that we call it from | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             author = StringField() | ||||||
|  |             title = StringField() | ||||||
|  |             description = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class BlogPostWithTags(BlogPost): | ||||||
|  |             tags = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "tags")]} | ||||||
|  |  | ||||||
|  |         class BlogPostWithTagsAndExtraText(BlogPostWithTags): | ||||||
|  |             extra_text = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"indexes": [("author", "tags", "extra_text")]} | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         BlogPost.ensure_indexes() | ||||||
|  |         BlogPostWithTags.ensure_indexes() | ||||||
|  |         BlogPostWithTagsAndExtraText.ensure_indexes() | ||||||
|  |  | ||||||
|  |         assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes() | ||||||
|  |         assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes() | ||||||
|  |         assert BlogPost.list_indexes() == [ | ||||||
|  |             [("_cls", 1), ("author", 1), ("tags", 1)], | ||||||
|  |             [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], | ||||||
|  |             [(u"_id", 1)], | ||||||
|  |             [("_cls", 1)], | ||||||
|  |         ] | ||||||
|  |  | ||||||
|  |     def test_register_delete_rule_inherited(self): | ||||||
|  |         class Vaccine(Document): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |             meta = {"indexes": ["name"]} | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             family = StringField(required=True) | ||||||
|  |             vaccine_made = ListField( | ||||||
|  |                 ReferenceField("Vaccine", reverse_delete_rule=PULL) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True, "indexes": ["family"]} | ||||||
|  |  | ||||||
|  |         class Cat(Animal): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |         assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL | ||||||
|  |         assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL | ||||||
|  |  | ||||||
|  |     def test_collection_naming(self): | ||||||
|  |         """Ensure that a collection with a specified name may be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class DefaultNamingTest(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert "default_naming_test" == DefaultNamingTest._get_collection_name() | ||||||
|  |  | ||||||
|  |         class CustomNamingTest(Document): | ||||||
|  |             meta = {"collection": "pimp_my_collection"} | ||||||
|  |  | ||||||
|  |         assert "pimp_my_collection" == CustomNamingTest._get_collection_name() | ||||||
|  |  | ||||||
|  |         class DynamicNamingTest(Document): | ||||||
|  |             meta = {"collection": lambda c: "DYNAMO"} | ||||||
|  |  | ||||||
|  |         assert "DYNAMO" == DynamicNamingTest._get_collection_name() | ||||||
|  |  | ||||||
|  |         # Use Abstract class to handle backwards compatibility | ||||||
|  |         class BaseDocument(Document): | ||||||
|  |             meta = {"abstract": True, "collection": lambda c: c.__name__.lower()} | ||||||
|  |  | ||||||
|  |         class OldNamingConvention(BaseDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert "oldnamingconvention" == OldNamingConvention._get_collection_name() | ||||||
|  |  | ||||||
|  |         class InheritedAbstractNamingTest(BaseDocument): | ||||||
|  |             meta = {"collection": "wibble"} | ||||||
|  |  | ||||||
|  |         assert "wibble" == InheritedAbstractNamingTest._get_collection_name() | ||||||
|  |  | ||||||
|  |         # Mixin tests | ||||||
|  |         class BaseMixin(object): | ||||||
|  |             meta = {"collection": lambda c: c.__name__.lower()} | ||||||
|  |  | ||||||
|  |         class OldMixinNamingConvention(Document, BaseMixin): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert ( | ||||||
|  |             "oldmixinnamingconvention" | ||||||
|  |             == OldMixinNamingConvention._get_collection_name() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         class BaseMixin(object): | ||||||
|  |             meta = {"collection": lambda c: c.__name__.lower()} | ||||||
|  |  | ||||||
|  |         class BaseDocument(Document, BaseMixin): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class MyDocument(BaseDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert "basedocument" == MyDocument._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_custom_collection_name_operations(self): | ||||||
|  |         """Ensure that a collection with a specified name is used as expected. | ||||||
|  |         """ | ||||||
|  |         collection_name = "personCollTest" | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"collection": collection_name} | ||||||
|  |  | ||||||
|  |         Person(name="Test User").save() | ||||||
|  |         assert collection_name in list_collection_names(self.db) | ||||||
|  |  | ||||||
|  |         user_obj = self.db[collection_name].find_one() | ||||||
|  |         assert user_obj["name"] == "Test User" | ||||||
|  |  | ||||||
|  |         user_obj = Person.objects[0] | ||||||
|  |         assert user_obj.name == "Test User" | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         assert collection_name not in list_collection_names(self.db) | ||||||
|  |  | ||||||
|  |     def test_collection_name_and_primary(self): | ||||||
|  |         """Ensure that a collection with a specified name may be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(primary_key=True) | ||||||
|  |             meta = {"collection": "app"} | ||||||
|  |  | ||||||
|  |         Person(name="Test User").save() | ||||||
|  |  | ||||||
|  |         user_obj = Person.objects.first() | ||||||
|  |         assert user_obj.name == "Test User" | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										953
									
								
								tests/document/test_delta.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										953
									
								
								tests/document/test_delta.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,953 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from bson import SON | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDelta(MongoDBTestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         super(TestDelta, self).setUp() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             non_field = True | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         for collection in list_collection_names(self.db): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_delta(self): | ||||||
|  |         self.delta(Document) | ||||||
|  |         self.delta(DynamicDocument) | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def delta(DocClass): | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["string_field"] | ||||||
|  |         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["int_field"] | ||||||
|  |         assert doc._delta() == ({"int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({"list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"list_field": 1}) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive(self): | ||||||
|  |         self.delta_recursive(Document, EmbeddedDocument) | ||||||
|  |         self.delta_recursive(DynamicDocument, EmbeddedDocument) | ||||||
|  |         self.delta_recursive(Document, DynamicEmbeddedDocument) | ||||||
|  |         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def delta_recursive(self, DocClass, EmbeddedClass): | ||||||
|  |         class Embedded(EmbeddedClass): | ||||||
|  |             id = StringField() | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |  | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |             embedded_field = EmbeddedDocumentField(Embedded) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.id = "010101" | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field"] | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             "id": "010101", | ||||||
|  |             "string_field": "hello", | ||||||
|  |             "int_field": 1, | ||||||
|  |             "dict_field": {"hello": "world"}, | ||||||
|  |             "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |         } | ||||||
|  |         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||||
|  |         assert doc._delta() == ({"embedded_field": embedded_delta}, {}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.dict_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"dict_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.dict_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.dict_field == {} | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"list_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.list_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field == [] | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = "hello" | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {"hello": "world"} | ||||||
|  |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "string_field": "hello", | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "int_field": 1, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "embedded_field.list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "string_field": "hello", | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "int_field": 1, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|  |         assert doc.embedded_field.list_field[1] == 2 | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "world" | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             {"list_field.2.string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "world" | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "hello world" | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field.2"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello world", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "embedded_field.list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello world", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"embedded_field.list_field.2.list_field.2.hello": 1}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.dict_field["Embedded"] = embedded_1 | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.dict_field["Embedded"].string_field = "Hello World" | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {}) | ||||||
|  |  | ||||||
|  |     def test_circular_reference_deltas(self): | ||||||
|  |         self.circular_reference_deltas(Document, Document) | ||||||
|  |         self.circular_reference_deltas(Document, DynamicDocument) | ||||||
|  |         self.circular_reference_deltas(DynamicDocument, Document) | ||||||
|  |         self.circular_reference_deltas(DynamicDocument, DynamicDocument) | ||||||
|  |  | ||||||
|  |     def circular_reference_deltas(self, DocClass1, DocClass2): | ||||||
|  |         class Person(DocClass1): | ||||||
|  |             name = StringField() | ||||||
|  |             owns = ListField(ReferenceField("Organization")) | ||||||
|  |  | ||||||
|  |         class Organization(DocClass2): | ||||||
|  |             name = StringField() | ||||||
|  |             owner = ReferenceField("Person") | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name="owner").save() | ||||||
|  |         organization = Organization(name="company").save() | ||||||
|  |  | ||||||
|  |         person.owns.append(organization) | ||||||
|  |         organization.owner = person | ||||||
|  |  | ||||||
|  |         person.save() | ||||||
|  |         organization.save() | ||||||
|  |  | ||||||
|  |         p = Person.objects[0].select_related() | ||||||
|  |         o = Organization.objects.first() | ||||||
|  |         assert p.owns[0] == o | ||||||
|  |         assert o.owner == p | ||||||
|  |  | ||||||
|  |     def test_circular_reference_deltas_2(self): | ||||||
|  |         self.circular_reference_deltas_2(Document, Document) | ||||||
|  |         self.circular_reference_deltas_2(Document, DynamicDocument) | ||||||
|  |         self.circular_reference_deltas_2(DynamicDocument, Document) | ||||||
|  |         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) | ||||||
|  |  | ||||||
|  |     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): | ||||||
|  |         class Person(DocClass1): | ||||||
|  |             name = StringField() | ||||||
|  |             owns = ListField(ReferenceField("Organization", dbref=dbref)) | ||||||
|  |             employer = ReferenceField("Organization", dbref=dbref) | ||||||
|  |  | ||||||
|  |         class Organization(DocClass2): | ||||||
|  |             name = StringField() | ||||||
|  |             owner = ReferenceField("Person", dbref=dbref) | ||||||
|  |             employees = ListField(ReferenceField("Person", dbref=dbref)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name="owner").save() | ||||||
|  |         employee = Person(name="employee").save() | ||||||
|  |         organization = Organization(name="company").save() | ||||||
|  |  | ||||||
|  |         person.owns.append(organization) | ||||||
|  |         organization.owner = person | ||||||
|  |  | ||||||
|  |         organization.employees.append(employee) | ||||||
|  |         employee.employer = organization | ||||||
|  |  | ||||||
|  |         person.save() | ||||||
|  |         organization.save() | ||||||
|  |         employee.save() | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="owner") | ||||||
|  |         e = Person.objects.get(name="employee") | ||||||
|  |         o = Organization.objects.first() | ||||||
|  |  | ||||||
|  |         assert p.owns[0] == o | ||||||
|  |         assert o.owner == p | ||||||
|  |         assert e.employer == o | ||||||
|  |  | ||||||
|  |         return person, organization, employee | ||||||
|  |  | ||||||
|  |     def test_delta_db_field(self): | ||||||
|  |         self.delta_db_field(Document) | ||||||
|  |         self.delta_db_field(DynamicDocument) | ||||||
|  |  | ||||||
|  |     def delta_db_field(self, DocClass): | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["db_string_field"] | ||||||
|  |         assert doc._delta() == ({"db_string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["db_int_field"] | ||||||
|  |         assert doc._delta() == ({"db_int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["db_dict_field"] | ||||||
|  |         assert doc._delta() == ({"db_dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["db_list_field"] | ||||||
|  |         assert doc._delta() == ({"db_list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["db_dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"db_dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["db_list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"db_list_field": 1}) | ||||||
|  |  | ||||||
|  |         # Test it saves that data | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         doc.dict_field = {"hello": "world"} | ||||||
|  |         doc.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.string_field == "hello" | ||||||
|  |         assert doc.int_field == 1 | ||||||
|  |         assert doc.dict_field == {"hello": "world"} | ||||||
|  |         assert doc.list_field == ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self): | ||||||
|  |         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self): | ||||||
|  |         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self): | ||||||
|  |         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self): | ||||||
|  |         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def delta_recursive_db_field(DocClass, EmbeddedClass): | ||||||
|  |         class Embedded(EmbeddedClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |  | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |             embedded_field = EmbeddedDocumentField( | ||||||
|  |                 Embedded, db_field="db_embedded_field" | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field"] | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             "db_string_field": "hello", | ||||||
|  |             "db_int_field": 1, | ||||||
|  |             "db_dict_field": {"hello": "world"}, | ||||||
|  |             "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |         } | ||||||
|  |         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||||
|  |         assert doc._delta() == ({"db_embedded_field": embedded_delta}, {}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.dict_field == {} | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field == [] | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = "hello" | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {"hello": "world"} | ||||||
|  |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "db_string_field": "hello", | ||||||
|  |                         "db_dict_field": {"hello": "world"}, | ||||||
|  |                         "db_int_field": 1, | ||||||
|  |                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "db_string_field": "hello", | ||||||
|  |                         "db_dict_field": {"hello": "world"}, | ||||||
|  |                         "db_int_field": 1, | ||||||
|  |                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|  |         assert doc.embedded_field.list_field[1] == 2 | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "world" | ||||||
|  |         assert doc._get_changed_fields() == [ | ||||||
|  |             "db_embedded_field.db_list_field.2.db_string_field" | ||||||
|  |         ] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             {"db_list_field.2.db_string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "world" | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "hello world" | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "db_string_field": "hello world", | ||||||
|  |                     "db_int_field": 1, | ||||||
|  |                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "db_dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "db_string_field": "hello world", | ||||||
|  |                     "db_int_field": 1, | ||||||
|  |                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "db_dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||||
|  |                     2, | ||||||
|  |                     {"hello": "world"}, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||||
|  |                     2, | ||||||
|  |                     {"hello": "world"}, | ||||||
|  |                     1, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ({}, {},) | ||||||
|  |         del doc.embedded_field.list_field[2].list_field | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_list_field": 1}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_delta_for_dynamic_documents(self): | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(name="James", age=34) | ||||||
|  |         assert p._delta() == ( | ||||||
|  |             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.doc = 123 | ||||||
|  |         del p.doc | ||||||
|  |         assert p._delta() == ( | ||||||
|  |             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p = Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p.age = 24 | ||||||
|  |         assert p.age == 24 | ||||||
|  |         assert p._get_changed_fields() == ["age"] | ||||||
|  |         assert p._delta() == ({"age": 24}, {}) | ||||||
|  |  | ||||||
|  |         p = Person.objects(age=22).get() | ||||||
|  |         p.age = 24 | ||||||
|  |         assert p.age == 24 | ||||||
|  |         assert p._get_changed_fields() == ["age"] | ||||||
|  |         assert p._delta() == ({"age": 24}, {}) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |         assert 1 == Person.objects(age=24).count() | ||||||
|  |  | ||||||
|  |     def test_dynamic_delta(self): | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["string_field"] | ||||||
|  |         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["int_field"] | ||||||
|  |         assert doc._delta() == ({"int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({"list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"list_field": 1}) | ||||||
|  |  | ||||||
|  |     def test_delta_with_dbref_true(self): | ||||||
|  |         person, organization, employee = self.circular_reference_deltas_2( | ||||||
|  |             Document, Document, True | ||||||
|  |         ) | ||||||
|  |         employee.name = "test" | ||||||
|  |  | ||||||
|  |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert removals == {} | ||||||
|  |         assert updates == {} | ||||||
|  |  | ||||||
|  |         organization.employees.append(person) | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert removals == {} | ||||||
|  |         assert "employees" in updates | ||||||
|  |  | ||||||
|  |     def test_delta_with_dbref_false(self): | ||||||
|  |         person, organization, employee = self.circular_reference_deltas_2( | ||||||
|  |             Document, Document, False | ||||||
|  |         ) | ||||||
|  |         employee.name = "test" | ||||||
|  |  | ||||||
|  |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert removals == {} | ||||||
|  |         assert updates == {} | ||||||
|  |  | ||||||
|  |         organization.employees.append(person) | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert removals == {} | ||||||
|  |         assert "employees" in updates | ||||||
|  |  | ||||||
|  |     def test_nested_nested_fields_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         subdoc = mydoc.subs["a"]["b"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["subs.a.b.name"] | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |     def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField(db_field="db_name") | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed") | ||||||
|  |             name = StringField(db_field="db_name") | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         mydoc.embed.name = "foo1" | ||||||
|  |  | ||||||
|  |         assert mydoc.embed._get_changed_fields() == ["db_name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["db_embed.db_name"] | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         embed = EmbeddedDoc(name="foo2") | ||||||
|  |         embed.name = "bar" | ||||||
|  |         mydoc.embed = embed | ||||||
|  |  | ||||||
|  |         assert embed._get_changed_fields() == ["db_name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["db_embed"] | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |     def test_lower_level_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc().save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|  |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|  |  | ||||||
|  |         subdoc = mydoc.subs["a"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|  |         mydoc.save() | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |     def test_upper_level_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         subdoc = mydoc.subs["a"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["subs.a.name"] | ||||||
|  |  | ||||||
|  |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|  |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|  |         mydoc.save() | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |     def test_referenced_object_changed_attributes(self): | ||||||
|  |         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||||
|  |  | ||||||
|  |         class Organization(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             org = ReferenceField("Organization", required=True) | ||||||
|  |  | ||||||
|  |         Organization.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |         org1 = Organization(name="Org 1") | ||||||
|  |         org1.save() | ||||||
|  |  | ||||||
|  |         org2 = Organization(name="Org 2") | ||||||
|  |         org2.save() | ||||||
|  |  | ||||||
|  |         user = User(name="Fred", org=org1) | ||||||
|  |         user.save() | ||||||
|  |  | ||||||
|  |         org1.reload() | ||||||
|  |         org2.reload() | ||||||
|  |         user.reload() | ||||||
|  |         assert org1.name == "Org 1" | ||||||
|  |         assert org2.name == "Org 2" | ||||||
|  |         assert user.name == "Fred" | ||||||
|  |  | ||||||
|  |         user.name = "Harold" | ||||||
|  |         user.org = org2 | ||||||
|  |  | ||||||
|  |         org2.name = "New Org 2" | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |  | ||||||
|  |         user.save() | ||||||
|  |         org2.save() | ||||||
|  |  | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |         org2.reload() | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |  | ||||||
|  |     def test_delta_for_nested_map_fields(self): | ||||||
|  |         class UInfoDocument(Document): | ||||||
|  |             phone = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedRole(EmbeddedDocument): | ||||||
|  |             type = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedUser(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||||
|  |             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||||
|  |             info = ReferenceField(UInfoDocument) | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) | ||||||
|  |             num = IntField(default=-1) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = Doc(num=1) | ||||||
|  |         doc.users["007"] = EmbeddedUser(name="Agent007") | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         uinfo = UInfoDocument(phone="79089269066") | ||||||
|  |         uinfo.save() | ||||||
|  |  | ||||||
|  |         d = Doc.objects(num=1).first() | ||||||
|  |         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") | ||||||
|  |         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) | ||||||
|  |         d.users["007"]["info"] = uinfo | ||||||
|  |         delta = d._delta() | ||||||
|  |         assert True == ("users.007.roles.666" in delta[0]) | ||||||
|  |         assert True == ("users.007.rolist" in delta[0]) | ||||||
|  |         assert True == ("users.007.info" in delta[0]) | ||||||
|  |         assert "superadmin" == delta[0]["users.007.roles.666"]["type"] | ||||||
|  |         assert "oops" == delta[0]["users.007.rolist"][0]["type"] | ||||||
|  |         assert uinfo.id == delta[0]["users.007.info"] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										439
									
								
								tests/document/test_dynamic.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										439
									
								
								tests/document/test_dynamic.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,439 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  | __all__ = ("TestDynamicDocument",) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDynamicDocument(MongoDBTestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         super(TestDynamicDocument, self).setUp() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def test_simple_dynamic_document(self): | ||||||
|  |         """Ensures simple dynamic documents are saved correctly""" | ||||||
|  |  | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "James" | ||||||
|  |         p.age = 34 | ||||||
|  |  | ||||||
|  |         assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} | ||||||
|  |         assert p.to_mongo().keys() == ["_cls", "name", "age"] | ||||||
|  |         p.save() | ||||||
|  |         assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] | ||||||
|  |  | ||||||
|  |         assert self.Person.objects.first().age == 34 | ||||||
|  |  | ||||||
|  |         # Confirm no changes to self.Person | ||||||
|  |         assert not hasattr(self.Person, "age") | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_parse_values_in_constructor_like_document_do(self): | ||||||
|  |         class ProductDynamicDocument(DynamicDocument): | ||||||
|  |             title = StringField() | ||||||
|  |             price = FloatField() | ||||||
|  |  | ||||||
|  |         class ProductDocument(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             price = FloatField() | ||||||
|  |  | ||||||
|  |         product = ProductDocument(title="Blabla", price="12.5") | ||||||
|  |         dyn_product = ProductDynamicDocument(title="Blabla", price="12.5") | ||||||
|  |         assert product.price == dyn_product.price == 12.5 | ||||||
|  |  | ||||||
|  |     def test_change_scope_of_variable(self): | ||||||
|  |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {"hello": "world"} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         assert p.misc == {"hello": "world"} | ||||||
|  |  | ||||||
|  |     def test_delete_dynamic_field(self): | ||||||
|  |         """Test deleting a dynamic field works""" | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {"hello": "world"} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         assert p.misc == {"hello": "world"} | ||||||
|  |         collection = self.db[self.Person._get_collection_name()] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"] | ||||||
|  |  | ||||||
|  |         del p.misc | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         assert not hasattr(p, "misc") | ||||||
|  |  | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         assert sorted(obj.keys()) == ["_cls", "_id", "name"] | ||||||
|  |  | ||||||
|  |     def test_reload_after_unsetting(self): | ||||||
|  |         p = self.Person() | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |         p.update(unset__misc=1) | ||||||
|  |         p.reload() | ||||||
|  |  | ||||||
|  |     def test_reload_dynamic_field(self): | ||||||
|  |         self.Person.objects.delete() | ||||||
|  |         p = self.Person.objects.create() | ||||||
|  |         p.update(age=1) | ||||||
|  |  | ||||||
|  |         assert len(p._data) == 3 | ||||||
|  |         assert sorted(p._data.keys()) == ["_cls", "id", "name"] | ||||||
|  |  | ||||||
|  |         p.reload() | ||||||
|  |         assert len(p._data) == 4 | ||||||
|  |         assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"] | ||||||
|  |  | ||||||
|  |     def test_fields_without_underscore(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         Person = self.Person | ||||||
|  |  | ||||||
|  |         p = self.Person(name="Dean") | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|  |         assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"} | ||||||
|  |  | ||||||
|  |         p.name = "OldDean" | ||||||
|  |         p.newattr = "garbage" | ||||||
|  |         p.save() | ||||||
|  |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|  |         assert raw_p == { | ||||||
|  |             "_cls": u"Person", | ||||||
|  |             "_id": p.id, | ||||||
|  |             "name": "OldDean", | ||||||
|  |             "newattr": u"garbage", | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_fields_containing_underscore(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |  | ||||||
|  |         class WeirdPerson(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             _name = StringField() | ||||||
|  |  | ||||||
|  |         WeirdPerson.drop_collection() | ||||||
|  |  | ||||||
|  |         p = WeirdPerson(name="Dean", _name="Dean") | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|  |         assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"} | ||||||
|  |  | ||||||
|  |         p.name = "OldDean" | ||||||
|  |         p._name = "NewDean" | ||||||
|  |         p._newattr1 = "garbage"  # Unknown fields won't be added | ||||||
|  |         p.save() | ||||||
|  |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|  |         assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"} | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_queries(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         assert 1 == self.Person.objects(age=22).count() | ||||||
|  |         p = self.Person.objects(age=22) | ||||||
|  |         p = p.get() | ||||||
|  |         assert 22 == p.age | ||||||
|  |  | ||||||
|  |     def test_complex_dynamic_document_queries(self): | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(name="test") | ||||||
|  |         p.age = "ten" | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="test1") | ||||||
|  |         p1.age = "less then ten and a half" | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="test2") | ||||||
|  |         p2.age = 10 | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         assert Person.objects(age__icontains="ten").count() == 2 | ||||||
|  |         assert Person.objects(age__gte=10).count() == 1 | ||||||
|  |  | ||||||
|  |     def test_complex_data_lookups(self): | ||||||
|  |         """Ensure you can query dynamic document dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.misc = {"hello": "world"} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         assert 1 == self.Person.objects(misc__hello="world").count() | ||||||
|  |  | ||||||
|  |     def test_three_level_complex_data_lookups(self): | ||||||
|  |         """Ensure you can query three level document dynamic fields""" | ||||||
|  |         self.Person.objects.create(misc={"hello": {"hello2": "world"}}) | ||||||
|  |         assert 1 == self.Person.objects(misc__hello__hello2="world").count() | ||||||
|  |  | ||||||
|  |     def test_complex_embedded_document_validation(self): | ||||||
|  |         """Ensure embedded dynamic documents may be validated""" | ||||||
|  |  | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             content = URLField() | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_doc_1 = Embedded(content="http://mongoengine.org") | ||||||
|  |         embedded_doc_1.validate() | ||||||
|  |  | ||||||
|  |         embedded_doc_2 = Embedded(content="this is not a url") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             embedded_doc_2.validate() | ||||||
|  |  | ||||||
|  |         doc.embedded_field_1 = embedded_doc_1 | ||||||
|  |         doc.embedded_field_2 = embedded_doc_2 | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             doc.validate() | ||||||
|  |  | ||||||
|  |     def test_inheritance(self): | ||||||
|  |         """Ensure that dynamic document plays nice with inheritance""" | ||||||
|  |  | ||||||
|  |         class Employee(self.Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         Employee.drop_collection() | ||||||
|  |  | ||||||
|  |         assert "name" in Employee._fields | ||||||
|  |         assert "salary" in Employee._fields | ||||||
|  |         assert Employee._get_collection_name() == self.Person._get_collection_name() | ||||||
|  |  | ||||||
|  |         joe_bloggs = Employee() | ||||||
|  |         joe_bloggs.name = "Joe Bloggs" | ||||||
|  |         joe_bloggs.salary = 10 | ||||||
|  |         joe_bloggs.age = 20 | ||||||
|  |         joe_bloggs.save() | ||||||
|  |  | ||||||
|  |         assert 1 == self.Person.objects(age=20).count() | ||||||
|  |         assert 1 == Employee.objects(age=20).count() | ||||||
|  |  | ||||||
|  |         joe_bloggs = self.Person.objects.first() | ||||||
|  |         assert isinstance(joe_bloggs, Employee) | ||||||
|  |  | ||||||
|  |     def test_embedded_dynamic_document(self): | ||||||
|  |         """Test dynamic embedded documents""" | ||||||
|  |  | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc.to_mongo() == { | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc.embedded_field.__class__ == Embedded | ||||||
|  |         assert doc.embedded_field.string_field == "hello" | ||||||
|  |         assert doc.embedded_field.int_field == 1 | ||||||
|  |         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||||
|  |         assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |     def test_complex_embedded_documents(self): | ||||||
|  |         """Test complex dynamic embedded documents setups""" | ||||||
|  |  | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = "hello" | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {"hello": "world"} | ||||||
|  |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         embedded_1.list_field = ["1", 2, embedded_2] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc.to_mongo() == { | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "string_field": "hello", | ||||||
|  |                         "int_field": 1, | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ], | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         doc.save() | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc.embedded_field.__class__ == Embedded | ||||||
|  |         assert doc.embedded_field.string_field == "hello" | ||||||
|  |         assert doc.embedded_field.int_field == 1 | ||||||
|  |         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||||
|  |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|  |         assert doc.embedded_field.list_field[1] == 2 | ||||||
|  |  | ||||||
|  |         embedded_field = doc.embedded_field.list_field[2] | ||||||
|  |  | ||||||
|  |         assert embedded_field.__class__ == Embedded | ||||||
|  |         assert embedded_field.string_field == "hello" | ||||||
|  |         assert embedded_field.int_field == 1 | ||||||
|  |         assert embedded_field.dict_field == {"hello": "world"} | ||||||
|  |         assert embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |     def test_dynamic_and_embedded(self): | ||||||
|  |         """Ensure embedded documents play nicely""" | ||||||
|  |  | ||||||
|  |         class Address(EmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(name="Ross", address=Address(city="London")).save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address.city = "Lundenne" | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().address.city == "Lundenne" | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address = Address(city="Londinium") | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().address.city == "Londinium" | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.age = 35 | ||||||
|  |         person.save() | ||||||
|  |         assert Person.objects.first().age == 35 | ||||||
|  |  | ||||||
|  |     def test_dynamic_embedded_works_with_only(self): | ||||||
|  |         """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" | ||||||
|  |  | ||||||
|  |         class Address(DynamicEmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             address = EmbeddedDocumentField(Address) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person( | ||||||
|  |             name="Eric", address=Address(city="San Francisco", street_number="1337") | ||||||
|  |         ).save() | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().address.street_number == "1337" | ||||||
|  |         assert ( | ||||||
|  |             Person.objects.only("address__street_number").first().address.street_number | ||||||
|  |             == "1337" | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_dynamic_and_embedded_dict_access(self): | ||||||
|  |         """Ensure embedded dynamic documents work with dict[] style access""" | ||||||
|  |  | ||||||
|  |         class Address(EmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(name="Ross", address=Address(city="London")).save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.attrval = "This works" | ||||||
|  |  | ||||||
|  |         person["phone"] = "555-1212"  # but this should too | ||||||
|  |  | ||||||
|  |         # Same thing two levels deep | ||||||
|  |         person["address"]["city"] = "Lundenne" | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().address.city == "Lundenne" | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().phone == "555-1212" | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address = Address(city="Londinium") | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         assert Person.objects.first().address.city == "Londinium" | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person["age"] = 35 | ||||||
|  |         person.save() | ||||||
|  |         assert Person.objects.first().age == 35 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										1081
									
								
								tests/document/test_indexes.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1081
									
								
								tests/document/test_indexes.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										630
									
								
								tests/document/test_inheritance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										630
									
								
								tests/document/test_inheritance.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,630 @@ | |||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import ( | ||||||
|  |     BooleanField, | ||||||
|  |     Document, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     GenericReferenceField, | ||||||
|  |     IntField, | ||||||
|  |     ReferenceField, | ||||||
|  |     StringField, | ||||||
|  | ) | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.fixtures import Base | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestInheritance(MongoDBTestCase): | ||||||
|  |     def tearDown(self): | ||||||
|  |         for collection in list_collection_names(self.db): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_constructor_cls(self): | ||||||
|  |         # Ensures _cls is properly set during construction | ||||||
|  |         # and when object gets reloaded (prevent regression of #1950) | ||||||
|  |         class EmbedData(EmbeddedDocument): | ||||||
|  |             data = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class DataDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             embed = EmbeddedDocumentField(EmbedData) | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         test_doc = DataDoc(name="test", embed=EmbedData(data="data")) | ||||||
|  |         assert test_doc._cls == "DataDoc" | ||||||
|  |         assert test_doc.embed._cls == "EmbedData" | ||||||
|  |         test_doc.save() | ||||||
|  |         saved_doc = DataDoc.objects.with_id(test_doc.id) | ||||||
|  |         assert test_doc._cls == saved_doc._cls | ||||||
|  |         assert test_doc.embed._cls == saved_doc.embed._cls | ||||||
|  |         test_doc.delete() | ||||||
|  |  | ||||||
|  |     def test_superclasses(self): | ||||||
|  |         """Ensure that the correct list of superclasses is assembled. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Guppy._superclasses == ("Animal", "Animal.Fish") | ||||||
|  |         assert Mammal._superclasses == ("Animal",) | ||||||
|  |         assert Dog._superclasses == ("Animal", "Animal.Mammal") | ||||||
|  |         assert Human._superclasses == ("Animal", "Animal.Mammal") | ||||||
|  |  | ||||||
|  |     def test_external_superclasses(self): | ||||||
|  |         """Ensure that the correct list of super classes is assembled when | ||||||
|  |         importing part of the model. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == ("Base",) | ||||||
|  |         assert Fish._superclasses == ("Base", "Base.Animal") | ||||||
|  |         assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish") | ||||||
|  |         assert Mammal._superclasses == ("Base", "Base.Animal") | ||||||
|  |         assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||||
|  |         assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||||
|  |  | ||||||
|  |     def test_subclasses(self): | ||||||
|  |         """Ensure that the correct list of _subclasses (subclasses) is | ||||||
|  |         assembled. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._subclasses == ( | ||||||
|  |             "Animal", | ||||||
|  |             "Animal.Fish", | ||||||
|  |             "Animal.Fish.Guppy", | ||||||
|  |             "Animal.Mammal", | ||||||
|  |             "Animal.Mammal.Dog", | ||||||
|  |             "Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy") | ||||||
|  |         assert Guppy._subclasses == ("Animal.Fish.Guppy",) | ||||||
|  |         assert Mammal._subclasses == ( | ||||||
|  |             "Animal.Mammal", | ||||||
|  |             "Animal.Mammal.Dog", | ||||||
|  |             "Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Human._subclasses == ("Animal.Mammal.Human",) | ||||||
|  |  | ||||||
|  |     def test_external_subclasses(self): | ||||||
|  |         """Ensure that the correct list of _subclasses (subclasses) is | ||||||
|  |         assembled when importing part of the model. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._subclasses == ( | ||||||
|  |             "Base.Animal", | ||||||
|  |             "Base.Animal.Fish", | ||||||
|  |             "Base.Animal.Fish.Guppy", | ||||||
|  |             "Base.Animal.Mammal", | ||||||
|  |             "Base.Animal.Mammal.Dog", | ||||||
|  |             "Base.Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") | ||||||
|  |         assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",) | ||||||
|  |         assert Mammal._subclasses == ( | ||||||
|  |             "Base.Animal.Mammal", | ||||||
|  |             "Base.Animal.Mammal.Dog", | ||||||
|  |             "Base.Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Human._subclasses == ("Base.Animal.Mammal.Human",) | ||||||
|  |  | ||||||
|  |     def test_dynamic_declarations(self): | ||||||
|  |         """Test that declaring an extra class updates meta data""" | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal",) | ||||||
|  |  | ||||||
|  |         # Test dynamically adding a class changes the meta data | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal", "Animal.Fish") | ||||||
|  |  | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish",) | ||||||
|  |  | ||||||
|  |         # Test dynamically adding an inherited class changes the meta data | ||||||
|  |         class Pike(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike") | ||||||
|  |  | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike") | ||||||
|  |  | ||||||
|  |         assert Pike._superclasses == ("Animal", "Animal.Fish") | ||||||
|  |         assert Pike._subclasses == ("Animal.Fish.Pike",) | ||||||
|  |  | ||||||
|  |     def test_inheritance_meta_data(self): | ||||||
|  |         """Ensure that document may inherit fields from a superclass document. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Employee(Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||||
|  |             Employee._fields.keys() | ||||||
|  |         ) | ||||||
|  |         assert Employee._get_collection_name() == Person._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_inheritance_to_mongo_keys(self): | ||||||
|  |         """Ensure that document may inherit fields from a superclass document. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Employee(Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||||
|  |             Employee._fields.keys() | ||||||
|  |         ) | ||||||
|  |         assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] | ||||||
|  |         assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ | ||||||
|  |             "_cls", | ||||||
|  |             "name", | ||||||
|  |             "age", | ||||||
|  |             "salary", | ||||||
|  |         ] | ||||||
|  |         assert Employee._get_collection_name() == Person._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_indexes_and_multiple_inheritance(self): | ||||||
|  |         """ Ensure that all of the indexes are created for a document with | ||||||
|  |         multiple inheritance. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class A(Document): | ||||||
|  |             a = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True, "indexes": ["a"]} | ||||||
|  |  | ||||||
|  |         class B(Document): | ||||||
|  |             b = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True, "indexes": ["b"]} | ||||||
|  |  | ||||||
|  |         class C(A, B): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         A.drop_collection() | ||||||
|  |         B.drop_collection() | ||||||
|  |         C.drop_collection() | ||||||
|  |  | ||||||
|  |         C.ensure_indexes() | ||||||
|  |  | ||||||
|  |         assert sorted( | ||||||
|  |             [idx["key"] for idx in C._get_collection().index_information().values()] | ||||||
|  |         ) == sorted( | ||||||
|  |             [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_polymorphic_queries(self): | ||||||
|  |         """Ensure that the correct subclasses are returned from a query | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         Animal().save() | ||||||
|  |         Fish().save() | ||||||
|  |         Mammal().save() | ||||||
|  |         Dog().save() | ||||||
|  |         Human().save() | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Animal.objects] | ||||||
|  |         assert classes == [Animal, Fish, Mammal, Dog, Human] | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Mammal.objects] | ||||||
|  |         assert classes == [Mammal, Dog, Human] | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Human.objects] | ||||||
|  |         assert classes == [Human] | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance(self): | ||||||
|  |         """Ensure that inheritance is disabled by default on simple | ||||||
|  |         classes and that _cls will not be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         # can't inherit because Animal didn't explicitly allow inheritance | ||||||
|  |         with pytest.raises(ValueError, match="Document Animal may not be subclassed"): | ||||||
|  |  | ||||||
|  |             class Dog(Animal): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         # Check that _cls etc aren't present on simple documents | ||||||
|  |         dog = Animal(name="dog").save() | ||||||
|  |         assert dog.to_mongo().keys() == ["_id", "name"] | ||||||
|  |  | ||||||
|  |         collection = self.db[Animal._get_collection_name()] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         assert "_cls" not in obj | ||||||
|  |  | ||||||
|  |     def test_cant_turn_off_inheritance_on_subclass(self): | ||||||
|  |         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError) as exc_info: | ||||||
|  |  | ||||||
|  |             class Mammal(Animal): | ||||||
|  |                 meta = {"allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         assert ( | ||||||
|  |             str(exc_info.value) | ||||||
|  |             == 'Only direct subclasses of Document may set "allow_inheritance" to False' | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_abstract_document(self): | ||||||
|  |         """Ensure that abstract documents can set inheritance rules and that | ||||||
|  |         _cls will not be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class FinalDocument(Document): | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class Animal(FinalDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class Mammal(Animal): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         # Check that _cls isn't present in simple documents | ||||||
|  |         doc = Animal(name="dog") | ||||||
|  |         assert "_cls" not in doc.to_mongo() | ||||||
|  |  | ||||||
|  |     def test_using_abstract_class_in_reference_field(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class AbstractHuman(Document): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class Dad(AbstractHuman): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Home(Document): | ||||||
|  |             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class | ||||||
|  |             address = StringField() | ||||||
|  |  | ||||||
|  |         dad = Dad(name="5").save() | ||||||
|  |         Home(dad=dad, address="street").save() | ||||||
|  |  | ||||||
|  |         home = Home.objects.first() | ||||||
|  |         home.address = "garbage" | ||||||
|  |         home.save()  # Was failing with ValidationError | ||||||
|  |  | ||||||
|  |     def test_abstract_class_referencing_self(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class Human(Document): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |             creator = ReferenceField("self", dbref=True) | ||||||
|  |  | ||||||
|  |         class User(Human): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         user = User(name="John").save() | ||||||
|  |         user2 = User(name="Foo", creator=user).save() | ||||||
|  |  | ||||||
|  |         user2 = User.objects.with_id(user2.id) | ||||||
|  |         user2.name = "Bar" | ||||||
|  |         user2.save()  # Was failing with ValidationError | ||||||
|  |  | ||||||
|  |     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 3 | ||||||
|  |         assert berlin._fields_ordered[0] == "id" | ||||||
|  |  | ||||||
|  |     def test_auto_id_not_set_if_specific_in_parent_class(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             city_id = IntField(primary_key=True) | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 3 | ||||||
|  |         assert berlin._fields_ordered[0] == "city_id" | ||||||
|  |  | ||||||
|  |     def test_auto_id_vs_non_pk_id_field(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             id = IntField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 4 | ||||||
|  |         assert berlin._fields_ordered[0] == "auto_id_0" | ||||||
|  |         berlin.save() | ||||||
|  |         assert berlin.pk == berlin.auto_id_0 | ||||||
|  |  | ||||||
|  |     def test_abstract_document_creation_does_not_fail(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         city = City(continent="asia") | ||||||
|  |         assert city.pk is None | ||||||
|  |         # TODO: expected error? Shouldn't we create a new error type? | ||||||
|  |         with pytest.raises(KeyError): | ||||||
|  |             setattr(city, "pk", 1) | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_embedded_document(self): | ||||||
|  |         """Ensure embedded documents respect inheritance.""" | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class SpecialComment(Comment): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         doc = Comment(content="test") | ||||||
|  |         assert "_cls" not in doc.to_mongo() | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         doc = Comment(content="test") | ||||||
|  |         assert "_cls" in doc.to_mongo() | ||||||
|  |  | ||||||
|  |     def test_document_inheritance(self): | ||||||
|  |         """Ensure mutliple inheritance of abstract documents | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class DateCreatedDocument(Document): | ||||||
|  |             meta = {"allow_inheritance": True, "abstract": True} | ||||||
|  |  | ||||||
|  |         class DateUpdatedDocument(Document): | ||||||
|  |             meta = {"allow_inheritance": True, "abstract": True} | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |  | ||||||
|  |             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         except Exception: | ||||||
|  |             assert False, "Couldn't create MyDocument class" | ||||||
|  |  | ||||||
|  |     def test_abstract_documents(self): | ||||||
|  |         """Ensure that a document superclass can be marked as abstract | ||||||
|  |         thereby not using it as the name for the collection.""" | ||||||
|  |  | ||||||
|  |         defaults = { | ||||||
|  |             "index_background": True, | ||||||
|  |             "index_opts": {"hello": "world"}, | ||||||
|  |             "allow_inheritance": True, | ||||||
|  |             "queryset_class": "QuerySet", | ||||||
|  |             "db_alias": "myDB", | ||||||
|  |             "shard_key": ("hello", "world"), | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         meta_settings = {"abstract": True} | ||||||
|  |         meta_settings.update(defaults) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = meta_settings | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         for k, v in defaults.items(): | ||||||
|  |             for cls in [Animal, Fish, Guppy]: | ||||||
|  |                 assert cls._meta[k] == v | ||||||
|  |  | ||||||
|  |         assert "collection" not in Animal._meta | ||||||
|  |         assert "collection" not in Mammal._meta | ||||||
|  |  | ||||||
|  |         assert Animal._get_collection_name() is None | ||||||
|  |         assert Mammal._get_collection_name() is None | ||||||
|  |  | ||||||
|  |         assert Fish._get_collection_name() == "fish" | ||||||
|  |         assert Guppy._get_collection_name() == "fish" | ||||||
|  |         assert Human._get_collection_name() == "human" | ||||||
|  |  | ||||||
|  |         # ensure that a subclass of a non-abstract class can't be abstract | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class EvilHuman(Human): | ||||||
|  |                 evil = BooleanField(default=True) | ||||||
|  |                 meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |     def test_abstract_embedded_documents(self): | ||||||
|  |         # 789: EmbeddedDocument shouldn't inherit abstract | ||||||
|  |         class A(EmbeddedDocument): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class B(A): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert not B._meta["abstract"] | ||||||
|  |  | ||||||
|  |     def test_inherited_collections(self): | ||||||
|  |         """Ensure that subclassed documents don't override parents' | ||||||
|  |         collections | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Drink(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Drinker(Document): | ||||||
|  |             drink = GenericReferenceField() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             warnings.simplefilter("error") | ||||||
|  |  | ||||||
|  |             class AcloholicDrink(Drink): | ||||||
|  |                 meta = {"collection": "booze"} | ||||||
|  |  | ||||||
|  |         except SyntaxWarning: | ||||||
|  |             warnings.simplefilter("ignore") | ||||||
|  |  | ||||||
|  |             class AlcoholicDrink(Drink): | ||||||
|  |                 meta = {"collection": "booze"} | ||||||
|  |  | ||||||
|  |         else: | ||||||
|  |             raise AssertionError("SyntaxWarning should be triggered") | ||||||
|  |  | ||||||
|  |         warnings.resetwarnings() | ||||||
|  |  | ||||||
|  |         Drink.drop_collection() | ||||||
|  |         AlcoholicDrink.drop_collection() | ||||||
|  |         Drinker.drop_collection() | ||||||
|  |  | ||||||
|  |         red_bull = Drink(name="Red Bull") | ||||||
|  |         red_bull.save() | ||||||
|  |  | ||||||
|  |         programmer = Drinker(drink=red_bull) | ||||||
|  |         programmer.save() | ||||||
|  |  | ||||||
|  |         beer = AlcoholicDrink(name="Beer") | ||||||
|  |         beer.save() | ||||||
|  |         real_person = Drinker(drink=beer) | ||||||
|  |         real_person.save() | ||||||
|  |  | ||||||
|  |         assert Drinker.objects[0].drink.name == red_bull.name | ||||||
|  |         assert Drinker.objects[1].drink.name == beer.name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										3826
									
								
								tests/document/test_instance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3826
									
								
								tests/document/test_instance.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										106
									
								
								tests/document/test_json_serialisation.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								tests/document/test_json_serialisation.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,106 @@ | |||||||
|  | import unittest | ||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | from datetime import datetime | ||||||
|  | from bson import ObjectId | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestJson(MongoDBTestCase): | ||||||
|  |     def test_json_names(self): | ||||||
|  |         """ | ||||||
|  |         Going to test reported issue: | ||||||
|  |             https://github.com/MongoEngine/mongoengine/issues/654 | ||||||
|  |         where the reporter asks for the availability to perform | ||||||
|  |         a to_json with the original class names and not the abreviated | ||||||
|  |         mongodb document keys | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             string = StringField(db_field="s") | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             string = StringField(db_field="s") | ||||||
|  |             embedded = EmbeddedDocumentField(Embedded, db_field="e") | ||||||
|  |  | ||||||
|  |         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) | ||||||
|  |         doc_json = doc.to_json( | ||||||
|  |             sort_keys=True, use_db_field=False, separators=(",", ":") | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||||
|  |  | ||||||
|  |         assert doc_json == expected_json | ||||||
|  |  | ||||||
|  |     def test_json_simple(self): | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             string = StringField() | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             string = StringField() | ||||||
|  |             embedded_field = EmbeddedDocumentField(Embedded) | ||||||
|  |  | ||||||
|  |             def __eq__(self, other): | ||||||
|  |                 return ( | ||||||
|  |                     self.string == other.string | ||||||
|  |                     and self.embedded_field == other.embedded_field | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) | ||||||
|  |  | ||||||
|  |         doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) | ||||||
|  |         expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" | ||||||
|  |         assert doc_json == expected_json | ||||||
|  |  | ||||||
|  |         assert doc == Doc.from_json(doc.to_json()) | ||||||
|  |  | ||||||
|  |     def test_json_complex(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Simple(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             string_field = StringField(default="1") | ||||||
|  |             int_field = IntField(default=1) | ||||||
|  |             float_field = FloatField(default=1.1) | ||||||
|  |             boolean_field = BooleanField(default=True) | ||||||
|  |             datetime_field = DateTimeField(default=datetime.now) | ||||||
|  |             embedded_document_field = EmbeddedDocumentField( | ||||||
|  |                 EmbeddedDoc, default=lambda: EmbeddedDoc() | ||||||
|  |             ) | ||||||
|  |             list_field = ListField(default=lambda: [1, 2, 3]) | ||||||
|  |             dict_field = DictField(default=lambda: {"hello": "world"}) | ||||||
|  |             objectid_field = ObjectIdField(default=ObjectId) | ||||||
|  |             reference_field = ReferenceField(Simple, default=lambda: Simple().save()) | ||||||
|  |             map_field = MapField(IntField(), default=lambda: {"simple": 1}) | ||||||
|  |             decimal_field = DecimalField(default=1.0) | ||||||
|  |             complex_datetime_field = ComplexDateTimeField(default=datetime.now) | ||||||
|  |             url_field = URLField(default="http://mongoengine.org") | ||||||
|  |             dynamic_field = DynamicField(default=1) | ||||||
|  |             generic_reference_field = GenericReferenceField( | ||||||
|  |                 default=lambda: Simple().save() | ||||||
|  |             ) | ||||||
|  |             sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) | ||||||
|  |             email_field = EmailField(default="ross@example.com") | ||||||
|  |             geo_point_field = GeoPointField(default=lambda: [1, 2]) | ||||||
|  |             sequence_field = SequenceField() | ||||||
|  |             uuid_field = UUIDField(default=uuid.uuid4) | ||||||
|  |             generic_embedded_document_field = GenericEmbeddedDocumentField( | ||||||
|  |                 default=lambda: EmbeddedDoc() | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             def __eq__(self, other): | ||||||
|  |                 import json | ||||||
|  |  | ||||||
|  |                 return json.loads(self.to_json()) == json.loads(other.to_json()) | ||||||
|  |  | ||||||
|  |         doc = Doc() | ||||||
|  |         assert doc == Doc.from_json(doc.to_json()) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										222
									
								
								tests/document/test_validation.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										222
									
								
								tests/document/test_validation.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,222 @@ | |||||||
|  | import unittest | ||||||
|  | from datetime import datetime | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestValidatorError(MongoDBTestCase): | ||||||
|  |     def test_to_dict(self): | ||||||
|  |         """Ensure a ValidationError handles error to_dict correctly. | ||||||
|  |         """ | ||||||
|  |         error = ValidationError("root") | ||||||
|  |         assert error.to_dict() == {} | ||||||
|  |  | ||||||
|  |         # 1st level error schema | ||||||
|  |         error.errors = {"1st": ValidationError("bad 1st")} | ||||||
|  |         assert "1st" in error.to_dict() | ||||||
|  |         assert error.to_dict()["1st"] == "bad 1st" | ||||||
|  |  | ||||||
|  |         # 2nd level error schema | ||||||
|  |         error.errors = { | ||||||
|  |             "1st": ValidationError( | ||||||
|  |                 "bad 1st", errors={"2nd": ValidationError("bad 2nd")} | ||||||
|  |             ) | ||||||
|  |         } | ||||||
|  |         assert "1st" in error.to_dict() | ||||||
|  |         assert isinstance(error.to_dict()["1st"], dict) | ||||||
|  |         assert "2nd" in error.to_dict()["1st"] | ||||||
|  |         assert error.to_dict()["1st"]["2nd"] == "bad 2nd" | ||||||
|  |  | ||||||
|  |         # moar levels | ||||||
|  |         error.errors = { | ||||||
|  |             "1st": ValidationError( | ||||||
|  |                 "bad 1st", | ||||||
|  |                 errors={ | ||||||
|  |                     "2nd": ValidationError( | ||||||
|  |                         "bad 2nd", | ||||||
|  |                         errors={ | ||||||
|  |                             "3rd": ValidationError( | ||||||
|  |                                 "bad 3rd", errors={"4th": ValidationError("Inception")} | ||||||
|  |                             ) | ||||||
|  |                         }, | ||||||
|  |                     ) | ||||||
|  |                 }, | ||||||
|  |             ) | ||||||
|  |         } | ||||||
|  |         assert "1st" in error.to_dict() | ||||||
|  |         assert "2nd" in error.to_dict()["1st"] | ||||||
|  |         assert "3rd" in error.to_dict()["1st"]["2nd"] | ||||||
|  |         assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"] | ||||||
|  |         assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception" | ||||||
|  |  | ||||||
|  |         assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])" | ||||||
|  |  | ||||||
|  |     def test_model_validation(self): | ||||||
|  |         class User(Document): | ||||||
|  |             username = StringField(primary_key=True) | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             User().validate() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             assert "User:None" in e.message | ||||||
|  |             assert e.to_dict() == { | ||||||
|  |                 "username": "Field is required", | ||||||
|  |                 "name": "Field is required", | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         user = User(username="RossC0", name="Ross").save() | ||||||
|  |         user.name = None | ||||||
|  |         try: | ||||||
|  |             user.save() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             assert "User:RossC0" in e.message | ||||||
|  |             assert e.to_dict() == {"name": "Field is required"} | ||||||
|  |  | ||||||
|  |     def test_fields_rewrite(self): | ||||||
|  |         class BasePerson(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class Person(BasePerson): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |         p = Person(age=15) | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             p.validate() | ||||||
|  |  | ||||||
|  |     def test_embedded_document_validation(self): | ||||||
|  |         """Ensure that embedded documents may be validated. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             date = DateTimeField() | ||||||
|  |             content = StringField(required=True) | ||||||
|  |  | ||||||
|  |         comment = Comment() | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             comment.validate() | ||||||
|  |  | ||||||
|  |         comment.content = "test" | ||||||
|  |         comment.validate() | ||||||
|  |  | ||||||
|  |         comment.date = 4 | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             comment.validate() | ||||||
|  |  | ||||||
|  |         comment.date = datetime.now() | ||||||
|  |         comment.validate() | ||||||
|  |         assert comment._instance is None | ||||||
|  |  | ||||||
|  |     def test_embedded_db_field_validate(self): | ||||||
|  |         class SubDoc(EmbeddedDocument): | ||||||
|  |             val = IntField(required=True) | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             id = StringField(primary_key=True) | ||||||
|  |             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             Doc(id="bad").validate() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             assert "SubDoc:None" in e.message | ||||||
|  |             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |  | ||||||
|  |         Doc(id="test", e=SubDoc(val=15)).save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         keys = doc._data.keys() | ||||||
|  |         assert 2 == len(keys) | ||||||
|  |         assert "e" in keys | ||||||
|  |         assert "id" in keys | ||||||
|  |  | ||||||
|  |         doc.e.val = "OK" | ||||||
|  |         try: | ||||||
|  |             doc.save() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             assert "Doc:test" in e.message | ||||||
|  |             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||||
|  |  | ||||||
|  |     def test_embedded_weakref(self): | ||||||
|  |         class SubDoc(EmbeddedDocument): | ||||||
|  |             val = IntField(required=True) | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = Doc() | ||||||
|  |         d2 = Doc() | ||||||
|  |  | ||||||
|  |         s = SubDoc() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             s.validate() | ||||||
|  |  | ||||||
|  |         d1.e = s | ||||||
|  |         d2.e = s | ||||||
|  |  | ||||||
|  |         del d1 | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             d2.validate() | ||||||
|  |  | ||||||
|  |     def test_parent_reference_in_child_document(self): | ||||||
|  |         """ | ||||||
|  |         Test to ensure a ReferenceField can store a reference to a parent | ||||||
|  |         class when inherited. Issue #954. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Parent(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |             reference = ReferenceField("self") | ||||||
|  |  | ||||||
|  |         class Child(Parent): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         parent = Parent() | ||||||
|  |         parent.save() | ||||||
|  |  | ||||||
|  |         child = Child(reference=parent) | ||||||
|  |  | ||||||
|  |         # Saving child should not raise a ValidationError | ||||||
|  |         try: | ||||||
|  |             child.save() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             self.fail("ValidationError raised: %s" % e.message) | ||||||
|  |  | ||||||
|  |     def test_parent_reference_set_as_attribute_in_child_document(self): | ||||||
|  |         """ | ||||||
|  |         Test to ensure a ReferenceField can store a reference to a parent | ||||||
|  |         class when inherited and when set via attribute. Issue #954. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Parent(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |             reference = ReferenceField("self") | ||||||
|  |  | ||||||
|  |         class Child(Parent): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         parent = Parent() | ||||||
|  |         parent.save() | ||||||
|  |  | ||||||
|  |         child = Child() | ||||||
|  |         child.reference = parent | ||||||
|  |  | ||||||
|  |         # Saving the child should not raise a ValidationError | ||||||
|  |         try: | ||||||
|  |             child.save() | ||||||
|  |         except ValidationError as e: | ||||||
|  |             self.fail("ValidationError raised: %s" % e.message) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
| @@ -1,502 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|         class Person(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_simple_dynamic_document(self): |  | ||||||
|         """Ensures simple dynamic documents are saved correctly""" |  | ||||||
|  |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "James" |  | ||||||
|         p.age = 34 |  | ||||||
|  |  | ||||||
|         self.assertEquals(p.to_mongo(), |  | ||||||
|             {"_types": ["Person"], "_cls": "Person", |  | ||||||
|              "name": "James", "age": 34} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(self.Person.objects.first().age, 34) |  | ||||||
|  |  | ||||||
|         # Confirm no changes to self.Person |  | ||||||
|         self.assertFalse(hasattr(self.Person, 'age')) |  | ||||||
|  |  | ||||||
|     def test_dynamic_document_delta(self): |  | ||||||
|         """Ensures simple dynamic documents can delta correctly""" |  | ||||||
|         p = self.Person(name="James", age=34) |  | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) |  | ||||||
|  |  | ||||||
|         p.doc = 123 |  | ||||||
|         del(p.doc) |  | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) |  | ||||||
|  |  | ||||||
|     def test_change_scope_of_variable(self): |  | ||||||
|         """Test changing the scope of a dynamic field has no adverse effects""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.misc = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |  | ||||||
|  |  | ||||||
|     def test_delete_dynamic_field(self): |  | ||||||
|         """Test deleting a dynamic field works""" |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.misc = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |  | ||||||
|         collection = self.db[self.Person._get_collection_name()] |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) |  | ||||||
|  |  | ||||||
|         del(p.misc) |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertFalse(hasattr(p, 'misc')) |  | ||||||
|  |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) |  | ||||||
|  |  | ||||||
|     def test_dynamic_document_queries(self): |  | ||||||
|         """Ensure we can query dynamic fields""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.age = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=22).count()) |  | ||||||
|         p = self.Person.objects(age=22) |  | ||||||
|         p = p.get() |  | ||||||
|         self.assertEquals(22, p.age) |  | ||||||
|  |  | ||||||
|     def test_complex_dynamic_document_queries(self): |  | ||||||
|         class Person(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person(name="test") |  | ||||||
|         p.age = "ten" |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="test1") |  | ||||||
|         p1.age = "less then ten and a half" |  | ||||||
|         p1.save() |  | ||||||
|  |  | ||||||
|         p2 = Person(name="test2") |  | ||||||
|         p2.age = 10 |  | ||||||
|         p2.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(Person.objects(age__icontains='ten').count(), 2) |  | ||||||
|         self.assertEquals(Person.objects(age__gte=10).count(), 1) |  | ||||||
|  |  | ||||||
|     def test_complex_data_lookups(self): |  | ||||||
|         """Ensure you can query dynamic document dynamic fields""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(misc__hello='world').count()) |  | ||||||
|  |  | ||||||
|     def test_inheritance(self): |  | ||||||
|         """Ensure that dynamic document plays nice with inheritance""" |  | ||||||
|         class Employee(self.Person): |  | ||||||
|             salary = IntField() |  | ||||||
|  |  | ||||||
|         Employee.drop_collection() |  | ||||||
|  |  | ||||||
|         self.assertTrue('name' in Employee._fields) |  | ||||||
|         self.assertTrue('salary' in Employee._fields) |  | ||||||
|         self.assertEqual(Employee._get_collection_name(), |  | ||||||
|                          self.Person._get_collection_name()) |  | ||||||
|  |  | ||||||
|         joe_bloggs = Employee() |  | ||||||
|         joe_bloggs.name = "Joe Bloggs" |  | ||||||
|         joe_bloggs.salary = 10 |  | ||||||
|         joe_bloggs.age = 20 |  | ||||||
|         joe_bloggs.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=20).count()) |  | ||||||
|         self.assertEquals(1, Employee.objects(age=20).count()) |  | ||||||
|  |  | ||||||
|         joe_bloggs = self.Person.objects.first() |  | ||||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) |  | ||||||
|  |  | ||||||
|     def test_embedded_dynamic_document(self): |  | ||||||
|         """Test dynamic embedded documents""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |  | ||||||
|             "embedded_field": { |  | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                 "string_field": "hello", |  | ||||||
|                 "int_field": 1, |  | ||||||
|                 "dict_field": {"hello": "world"}, |  | ||||||
|                 "list_field": ['1', 2, {'hello': 'world'}] |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|     def test_complex_embedded_documents(self): |  | ||||||
|         """Test complex dynamic embedded documents setups""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         embedded_1.list_field = ['1', 2, embedded_2] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |  | ||||||
|             "embedded_field": { |  | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                 "string_field": "hello", |  | ||||||
|                 "int_field": 1, |  | ||||||
|                 "dict_field": {"hello": "world"}, |  | ||||||
|                 "list_field": ['1', 2, |  | ||||||
|                     {"_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                     "string_field": "hello", |  | ||||||
|                     "int_field": 1, |  | ||||||
|                     "dict_field": {"hello": "world"}, |  | ||||||
|                     "list_field": ['1', 2, {'hello': 'world'}]} |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|         doc.save() |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |  | ||||||
|  |  | ||||||
|         embedded_field = doc.embedded_field.list_field[2] |  | ||||||
|  |  | ||||||
|         self.assertEquals(embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|     def test_delta_for_dynamic_documents(self): |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.age = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEquals(p.age, 24) |  | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p = self.Person.objects(age=22).get() |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEquals(p.age, 24) |  | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p.save() |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=24).count()) |  | ||||||
|  |  | ||||||
|     def test_delta(self): |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['string_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['int_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'int_field': 1}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |  | ||||||
|         doc.dict_field = dict_value |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.list_field = list_value |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) |  | ||||||
|  |  | ||||||
|         # Test unsetting |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.dict_field = {} |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.list_field = [] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'list_field': 1})) |  | ||||||
|  |  | ||||||
|     def test_delta_recursive(self): |  | ||||||
|         """Testing deltaing works with dynamic documents""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field']) |  | ||||||
|  |  | ||||||
|         embedded_delta = { |  | ||||||
|             'string_field': 'hello', |  | ||||||
|             'int_field': 1, |  | ||||||
|             'dict_field': {'hello': 'world'}, |  | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}] |  | ||||||
|         } |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) |  | ||||||
|         embedded_delta.update({ |  | ||||||
|             '_types': ['Embedded'], |  | ||||||
|             '_cls': 'Embedded', |  | ||||||
|         }) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.dict_field = {} |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = [] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |  | ||||||
|             'list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 '_types': ['Embedded'], |  | ||||||
|                 'string_field': 'hello', |  | ||||||
|                 'dict_field': {'hello': 'world'}, |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({ |  | ||||||
|             'embedded_field.list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                  '_types': ['Embedded'], |  | ||||||
|                  'string_field': 'hello', |  | ||||||
|                  'dict_field': {'hello': 'world'}, |  | ||||||
|                  'int_field': 1, |  | ||||||
|                  'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |  | ||||||
|         for k in doc.embedded_field.list_field[2]._fields: |  | ||||||
|             self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'world' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') |  | ||||||
|  |  | ||||||
|         # Test multiple assignments |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' |  | ||||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |  | ||||||
|             'list_field': ['1', 2, { |  | ||||||
|             '_types': ['Embedded'], |  | ||||||
|             '_cls': 'Embedded', |  | ||||||
|             'string_field': 'hello world', |  | ||||||
|             'int_field': 1, |  | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             'dict_field': {'hello': 'world'}}]}, {})) |  | ||||||
|         self.assertEquals(doc._delta(), ({ |  | ||||||
|             'embedded_field.list_field': ['1', 2, { |  | ||||||
|                 '_types': ['Embedded'], |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello world', |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|                 'dict_field': {'hello': 'world'}} |  | ||||||
|             ]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') |  | ||||||
|  |  | ||||||
|         # Test list native methods |  | ||||||
|         doc.embedded_field.list_field[2].list_field.pop(0) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.append(1) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.sort() |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.dict_field = {'embedded': embedded_1} |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.dict_field['embedded'].string_field = 'Hello World' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) |  | ||||||
|  |  | ||||||
|     def test_indexes(self): |  | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(DynamicDocument): |  | ||||||
|             meta = { |  | ||||||
|                 'indexes': [ |  | ||||||
|                     '-date', |  | ||||||
|                     ('category', '-date') |  | ||||||
|                 ], |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         # _id, '-date', ('cat', 'date') |  | ||||||
|         # NB: there is no index on _types by itself, since |  | ||||||
|         # the indices on -date and tags will both contain |  | ||||||
|         # _types as first element in the key |  | ||||||
|         self.assertEqual(len(info), 3) |  | ||||||
|  |  | ||||||
|         # Indexes are lazy so use list() to perform query |  | ||||||
|         list(BlogPost.objects) |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |  | ||||||
|         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] |  | ||||||
|                         in info) |  | ||||||
|         self.assertTrue([('_types', 1), ('date', -1)] in info) |  | ||||||
							
								
								
									
										1928
									
								
								tests/fields.py
									
									
									
									
									
								
							
							
						
						
									
										1928
									
								
								tests/fields.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										0
									
								
								tests/fields/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								tests/fields/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fields/mongodb_leaf.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fields/mongodb_leaf.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 4.9 KiB | 
| Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB | 
							
								
								
									
										142
									
								
								tests/fields/test_binary_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										142
									
								
								tests/fields/test_binary_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,142 @@ | |||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | from bson import Binary | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  | BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( | ||||||
|  |     "latin-1" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBinaryField(MongoDBTestCase): | ||||||
|  |     def test_binary_fields(self): | ||||||
|  |         """Ensure that binary fields can be stored and retrieved. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Attachment(Document): | ||||||
|  |             content_type = StringField() | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|  |         MIME_TYPE = "application/octet-stream" | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |  | ||||||
|  |         attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) | ||||||
|  |         attachment.save() | ||||||
|  |  | ||||||
|  |         attachment_1 = Attachment.objects().first() | ||||||
|  |         assert MIME_TYPE == attachment_1.content_type | ||||||
|  |         assert BLOB == bytes(attachment_1.blob) | ||||||
|  |  | ||||||
|  |     def test_validation_succeeds(self): | ||||||
|  |         """Ensure that valid values can be assigned to binary fields. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class AttachmentRequired(Document): | ||||||
|  |             blob = BinaryField(required=True) | ||||||
|  |  | ||||||
|  |         class AttachmentSizeLimit(Document): | ||||||
|  |             blob = BinaryField(max_bytes=4) | ||||||
|  |  | ||||||
|  |         attachment_required = AttachmentRequired() | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             attachment_required.validate() | ||||||
|  |         attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) | ||||||
|  |         attachment_required.validate() | ||||||
|  |  | ||||||
|  |         _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|  |         _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             AttachmentSizeLimit(blob=_5_BYTES).validate() | ||||||
|  |         AttachmentSizeLimit(blob=_4_BYTES).validate() | ||||||
|  |  | ||||||
|  |     def test_validation_fails(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to binary fields.""" | ||||||
|  |  | ||||||
|  |         class Attachment(Document): | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         for invalid_data in (2, u"Im_a_unicode", ["some_str"]): | ||||||
|  |             with pytest.raises(ValidationError): | ||||||
|  |                 Attachment(blob=invalid_data).validate() | ||||||
|  |  | ||||||
|  |     def test__primary(self): | ||||||
|  |         class Attachment(Document): | ||||||
|  |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         binary_id = uuid.uuid4().bytes | ||||||
|  |         att = Attachment(id=binary_id).save() | ||||||
|  |         assert 1 == Attachment.objects.count() | ||||||
|  |         assert 1 == Attachment.objects.filter(id=att.id).count() | ||||||
|  |         att.delete() | ||||||
|  |         assert 0 == Attachment.objects.count() | ||||||
|  |  | ||||||
|  |     def test_primary_filter_by_binary_pk_as_str(self): | ||||||
|  |         class Attachment(Document): | ||||||
|  |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         binary_id = uuid.uuid4().bytes | ||||||
|  |         att = Attachment(id=binary_id).save() | ||||||
|  |         assert 1 == Attachment.objects.filter(id=binary_id).count() | ||||||
|  |         att.delete() | ||||||
|  |         assert 0 == Attachment.objects.count() | ||||||
|  |  | ||||||
|  |     def test_match_querying_with_bytes(self): | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|  |         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() | ||||||
|  |         assert matched_doc.id == doc.id | ||||||
|  |  | ||||||
|  |     def test_match_querying_with_binary(self): | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|  |  | ||||||
|  |         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() | ||||||
|  |         assert matched_doc.id == doc.id | ||||||
|  |  | ||||||
|  |     def test_modify_operation__set(self): | ||||||
|  |         """Ensures no regression of bug #1127""" | ||||||
|  |  | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             some_field = StringField() | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument.objects(some_field="test").modify( | ||||||
|  |             upsert=True, new=True, set__bin_field=BIN_VALUE | ||||||
|  |         ) | ||||||
|  |         assert doc.some_field == "test" | ||||||
|  |         assert doc.bin_field == BIN_VALUE | ||||||
|  |  | ||||||
|  |     def test_update_one(self): | ||||||
|  |         """Ensures no regression of bug #1127""" | ||||||
|  |  | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|  |         doc = MyDocument(bin_field=bin_data).save() | ||||||
|  |  | ||||||
|  |         n_updated = MyDocument.objects(bin_field=bin_data).update_one( | ||||||
|  |             bin_field=BIN_VALUE | ||||||
|  |         ) | ||||||
|  |         assert n_updated == 1 | ||||||
|  |         fetched = MyDocument.objects.with_id(doc.id) | ||||||
|  |         assert fetched.bin_field == BIN_VALUE | ||||||
							
								
								
									
										51
									
								
								tests/fields/test_boolean_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								tests/fields/test_boolean_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | |||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBooleanField(MongoDBTestCase): | ||||||
|  |     def test_storage(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         person = Person(admin=True) | ||||||
|  |         person.save() | ||||||
|  |         assert get_as_pymongo(person) == {"_id": person.id, "admin": True} | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to boolean | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.admin = True | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.admin = 2 | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |         person.admin = "Yes" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |         person.admin = "False" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|  |     def test_weirdness_constructor(self): | ||||||
|  |         """When attribute is set in contructor, it gets cast into a bool | ||||||
|  |         which causes some weird behavior. We dont necessarily want to maintain this behavior | ||||||
|  |         but its a known issue | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         new_person = Person(admin="False") | ||||||
|  |         assert new_person.admin | ||||||
|  |  | ||||||
|  |         new_person = Person(admin="0") | ||||||
|  |         assert new_person.admin | ||||||
							
								
								
									
										376
									
								
								tests/fields/test_cached_reference_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										376
									
								
								tests/fields/test_cached_reference_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,376 @@ | |||||||
|  | from decimal import Decimal | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestCachedReferenceField(MongoDBTestCase): | ||||||
|  |     def test_get_and_save(self): | ||||||
|  |         """ | ||||||
|  |         Tests #1047: CachedReferenceField creates DBRefs on to_python, | ||||||
|  |         but can't save them on to_mongo. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         Ocorrence( | ||||||
|  |             person="testte", animal=Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         ).save() | ||||||
|  |         p = Ocorrence.objects.get() | ||||||
|  |         p.person = "new_testte" | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |     def test_general_things(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField(Animal, fields=["tag"]) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal(name="Leopard", tag="heavy") | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         assert Animal._cached_reference_fields == [Ocorrence.animal] | ||||||
|  |         o = Ocorrence(person="teste", animal=a) | ||||||
|  |         o.save() | ||||||
|  |  | ||||||
|  |         p = Ocorrence(person="Wilson") | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         assert Ocorrence.objects(animal=None).count() == 1 | ||||||
|  |  | ||||||
|  |         assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk} | ||||||
|  |  | ||||||
|  |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         count = Ocorrence.objects(animal__tag="heavy").count() | ||||||
|  |         assert count == 1 | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects(animal__tag="heavy").first() | ||||||
|  |         assert ocorrence.person == "teste" | ||||||
|  |         assert isinstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|  |     def test_with_decimal(self): | ||||||
|  |         class PersonAuto(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             salary = DecimalField() | ||||||
|  |  | ||||||
|  |         class SocialTest(Document): | ||||||
|  |             group = StringField() | ||||||
|  |             person = CachedReferenceField(PersonAuto, fields=("salary",)) | ||||||
|  |  | ||||||
|  |         PersonAuto.drop_collection() | ||||||
|  |         SocialTest.drop_collection() | ||||||
|  |  | ||||||
|  |         p = PersonAuto(name="Alberto", salary=Decimal("7000.00")) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         s = SocialTest(group="dev", person=p) | ||||||
|  |         s.save() | ||||||
|  |  | ||||||
|  |         assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == { | ||||||
|  |             "_id": s.pk, | ||||||
|  |             "group": s.group, | ||||||
|  |             "person": {"_id": p.pk, "salary": 7000.00}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_reference(self): | ||||||
|  |         class Group(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             group = ReferenceField(Group) | ||||||
|  |  | ||||||
|  |         class SocialData(Document): | ||||||
|  |             obs = StringField() | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |             person = CachedReferenceField(Person, fields=("group",)) | ||||||
|  |  | ||||||
|  |         Group.drop_collection() | ||||||
|  |         Person.drop_collection() | ||||||
|  |         SocialData.drop_collection() | ||||||
|  |  | ||||||
|  |         g1 = Group(name="dev") | ||||||
|  |         g1.save() | ||||||
|  |  | ||||||
|  |         g2 = Group(name="designers") | ||||||
|  |         g2.save() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="Alberto", group=g1) | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Andre", group=g1) | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         p3 = Person(name="Afro design", group=g2) | ||||||
|  |         p3.save() | ||||||
|  |  | ||||||
|  |         s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"]) | ||||||
|  |         s1.save() | ||||||
|  |  | ||||||
|  |         s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) | ||||||
|  |         s2.save() | ||||||
|  |  | ||||||
|  |         assert SocialData.objects._collection.find_one({"tags": "tag2"}) == { | ||||||
|  |             "_id": s1.pk, | ||||||
|  |             "obs": "testing 123", | ||||||
|  |             "tags": ["tag1", "tag2"], | ||||||
|  |             "person": {"_id": p1.pk, "group": g1.pk}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         assert SocialData.objects(person__group=g2).count() == 1 | ||||||
|  |         assert SocialData.objects(person__group=g2).first() == s2 | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_push_with_fields(self): | ||||||
|  |         class Product(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Product.drop_collection() | ||||||
|  |  | ||||||
|  |         class Basket(Document): | ||||||
|  |             products = ListField(CachedReferenceField(Product, fields=["name"])) | ||||||
|  |  | ||||||
|  |         Basket.drop_collection() | ||||||
|  |         product1 = Product(name="abc").save() | ||||||
|  |         product2 = Product(name="def").save() | ||||||
|  |         basket = Basket(products=[product1]).save() | ||||||
|  |         assert Basket.objects._collection.find_one() == { | ||||||
|  |             "_id": basket.pk, | ||||||
|  |             "products": [{"_id": product1.pk, "name": product1.name}], | ||||||
|  |         } | ||||||
|  |         # push to list | ||||||
|  |         basket.update(push__products=product2) | ||||||
|  |         basket.reload() | ||||||
|  |         assert Basket.objects._collection.find_one() == { | ||||||
|  |             "_id": basket.pk, | ||||||
|  |             "products": [ | ||||||
|  |                 {"_id": product1.pk, "name": product1.name}, | ||||||
|  |                 {"_id": product2.pk, "name": product2.name}, | ||||||
|  |             ], | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_update_all(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField(choices=TYPES) | ||||||
|  |             father = CachedReferenceField("self", fields=("tp",)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a2 = Person.objects.with_id(a2.id) | ||||||
|  |         assert a2.father.tp == a1.tp | ||||||
|  |  | ||||||
|  |         assert dict(a2.to_mongo()) == { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": u"Wilson Junior", | ||||||
|  |             "tp": u"pf", | ||||||
|  |             "father": {"_id": a1.pk, "tp": u"pj"}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         assert Person.objects(father=a1)._query == {"father._id": a1.pk} | ||||||
|  |         assert Person.objects(father=a1).count() == 1 | ||||||
|  |  | ||||||
|  |         Person.objects.update(set__tp="pf") | ||||||
|  |         Person.father.sync_all() | ||||||
|  |  | ||||||
|  |         a2.reload() | ||||||
|  |         assert dict(a2.to_mongo()) == { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": u"Wilson Junior", | ||||||
|  |             "tp": u"pf", | ||||||
|  |             "father": {"_id": a1.pk, "tp": u"pf"}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_cached_reference_fields_on_embedded_documents(self): | ||||||
|  |         with pytest.raises(InvalidDocumentError): | ||||||
|  |  | ||||||
|  |             class Test(Document): | ||||||
|  |                 name = StringField() | ||||||
|  |  | ||||||
|  |             type( | ||||||
|  |                 "WrongEmbeddedDocument", | ||||||
|  |                 (EmbeddedDocument,), | ||||||
|  |                 {"test": CachedReferenceField(Test)}, | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_auto_sync(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField(choices=TYPES) | ||||||
|  |  | ||||||
|  |             father = CachedReferenceField("self", fields=("tp",)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a1.tp = "pf" | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2.reload() | ||||||
|  |         assert dict(a2.to_mongo()) == { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": "Wilson Junior", | ||||||
|  |             "tp": "pf", | ||||||
|  |             "father": {"_id": a1.pk, "tp": "pf"}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_cached_reference_auto_sync_disabled(self): | ||||||
|  |         class Persone(Document): | ||||||
|  |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField(choices=TYPES) | ||||||
|  |  | ||||||
|  |             father = CachedReferenceField("self", fields=("tp",), auto_sync=False) | ||||||
|  |  | ||||||
|  |         Persone.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Persone(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Persone(name="Wilson Junior", tp="pf", father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a1.tp = "pf" | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         assert Persone.objects._collection.find_one({"_id": a2.pk}) == { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": "Wilson Junior", | ||||||
|  |             "tp": "pf", | ||||||
|  |             "father": {"_id": a1.pk, "tp": "pj"}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def test_cached_reference_embedded_fields(self): | ||||||
|  |         class Owner(EmbeddedDocument): | ||||||
|  |             TPS = (("n", "Normal"), ("u", "Urgent")) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField(verbose_name="Type", db_field="t", choices=TPS) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |             owner = EmbeddedDocumentField(Owner) | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"]) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal( | ||||||
|  |             name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior") | ||||||
|  |         ) | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         o = Ocorrence(person="teste", animal=a) | ||||||
|  |         o.save() | ||||||
|  |         assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == { | ||||||
|  |             "_id": a.pk, | ||||||
|  |             "tag": "heavy", | ||||||
|  |             "owner": {"t": "u"}, | ||||||
|  |         } | ||||||
|  |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|  |         assert o.to_mongo()["animal"]["owner"]["t"] == "u" | ||||||
|  |  | ||||||
|  |         # Check to_mongo with fields | ||||||
|  |         assert "animal" not in o.to_mongo(fields=["person"]) | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() | ||||||
|  |         assert count == 1 | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects( | ||||||
|  |             animal__tag="heavy", animal__owner__tp="u" | ||||||
|  |         ).first() | ||||||
|  |         assert ocorrence.person == "teste" | ||||||
|  |         assert isinstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_embedded_list_fields(self): | ||||||
|  |         class Owner(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |             owner = EmbeddedDocumentField(Owner) | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"]) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal( | ||||||
|  |             name="Leopard", | ||||||
|  |             tag="heavy", | ||||||
|  |             owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"), | ||||||
|  |         ) | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         o = Ocorrence(person="teste 2", animal=a) | ||||||
|  |         o.save() | ||||||
|  |         assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == { | ||||||
|  |             "_id": a.pk, | ||||||
|  |             "tag": "heavy", | ||||||
|  |             "owner": {"tags": ["cool", "funny"]}, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|  |         assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"] | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         query = Ocorrence.objects( | ||||||
|  |             animal__tag="heavy", animal__owner__tags="cool" | ||||||
|  |         )._query | ||||||
|  |         assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"} | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects( | ||||||
|  |             animal__tag="heavy", animal__owner__tags="cool" | ||||||
|  |         ).first() | ||||||
|  |         assert ocorrence.person == "teste 2" | ||||||
|  |         assert isinstance(ocorrence.animal, Animal) | ||||||
							
								
								
									
										209
									
								
								tests/fields/test_complex_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										209
									
								
								tests/fields/test_complex_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,209 @@ | |||||||
|  | import datetime | ||||||
|  | import itertools | ||||||
|  | import math | ||||||
|  | import re | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ComplexDateTimeFieldTest(MongoDBTestCase): | ||||||
|  |     def test_complexdatetime_storage(self): | ||||||
|  |         """Tests for complex datetime fields - which can handle | ||||||
|  |         microseconds without rounding. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = ComplexDateTimeField() | ||||||
|  |             date_with_dots = ComplexDateTimeField(separator=".") | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||||
|  |         # dropped - with default datetimefields | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == d1 | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond - with | ||||||
|  |         # default datetimefields | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == d1 | ||||||
|  |  | ||||||
|  |         # Pre UTC dates microseconds below 1000 are dropped - with default | ||||||
|  |         # datetimefields | ||||||
|  |         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == d1 | ||||||
|  |  | ||||||
|  |         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||||
|  |         # log.date has an invalid microsecond value so I can't construct | ||||||
|  |         # a date to compare. | ||||||
|  |         for i in range(1001, 3113, 33): | ||||||
|  |             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) | ||||||
|  |             log.date = d1 | ||||||
|  |             log.save() | ||||||
|  |             log.reload() | ||||||
|  |             assert log.date == d1 | ||||||
|  |             log1 = LogEntry.objects.get(date=d1) | ||||||
|  |             assert log == log1 | ||||||
|  |  | ||||||
|  |         # Test string padding | ||||||
|  |         microsecond = map(int, [math.pow(10, x) for x in range(6)]) | ||||||
|  |         mm = dd = hh = ii = ss = [1, 10] | ||||||
|  |  | ||||||
|  |         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): | ||||||
|  |             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] | ||||||
|  |             assert ( | ||||||
|  |                 re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) | ||||||
|  |                 is not None | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Test separator | ||||||
|  |         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ | ||||||
|  |             "date_with_dots" | ||||||
|  |         ] | ||||||
|  |         assert ( | ||||||
|  |             re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_complexdatetime_usage(self): | ||||||
|  |         """Tests for complex datetime fields - which can handle | ||||||
|  |         microseconds without rounding. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         log1 = LogEntry.objects.get(date=d1) | ||||||
|  |         assert log == log1 | ||||||
|  |  | ||||||
|  |         # create extra 59 log entries for a total of 60 | ||||||
|  |         for i in range(1951, 2010): | ||||||
|  |             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) | ||||||
|  |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|  |         assert LogEntry.objects.count() == 60 | ||||||
|  |  | ||||||
|  |         # Test ordering | ||||||
|  |         logs = LogEntry.objects.order_by("date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 59: | ||||||
|  |             assert logs[i].date <= logs[i + 1].date | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.order_by("-date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 59: | ||||||
|  |             assert logs[i].date >= logs[i + 1].date | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         # Test searching | ||||||
|  |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         assert logs.count() == 30 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         assert logs.count() == 30 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter( | ||||||
|  |             date__lte=datetime.datetime(2011, 1, 1), | ||||||
|  |             date__gte=datetime.datetime(2000, 1, 1), | ||||||
|  |         ) | ||||||
|  |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Test microsecond-level ordering/filtering | ||||||
|  |         for microsecond in (99, 999, 9999, 10000): | ||||||
|  |             LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() | ||||||
|  |  | ||||||
|  |         logs = list(LogEntry.objects.order_by("date")) | ||||||
|  |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|  |             next_log = logs[next_idx] | ||||||
|  |             assert log.date < next_log.date | ||||||
|  |  | ||||||
|  |         logs = list(LogEntry.objects.order_by("-date")) | ||||||
|  |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|  |             next_log = logs[next_idx] | ||||||
|  |             assert log.date > next_log.date | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter( | ||||||
|  |             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) | ||||||
|  |         ) | ||||||
|  |         assert logs.count() == 4 | ||||||
|  |  | ||||||
|  |     def test_no_default_value(self): | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         assert log.timestamp is None | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         assert fetched_log.timestamp is None | ||||||
|  |  | ||||||
|  |     def test_default_static_value(self): | ||||||
|  |         NOW = datetime.datetime.utcnow() | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField(default=NOW) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         assert log.timestamp == NOW | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         assert fetched_log.timestamp == NOW | ||||||
|  |  | ||||||
|  |     def test_default_callable(self): | ||||||
|  |         NOW = datetime.datetime.utcnow() | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         assert log.timestamp >= NOW | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         assert fetched_log.timestamp >= NOW | ||||||
|  |  | ||||||
|  |     def test_setting_bad_value_does_not_raise_unless_validate_is_called(self): | ||||||
|  |         # test regression of #2253 | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log(timestamp="garbage") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.save() | ||||||
							
								
								
									
										163
									
								
								tests/fields/test_date_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										163
									
								
								tests/fields/test_date_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,163 @@ | |||||||
|  | import datetime | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     import dateutil | ||||||
|  | except ImportError: | ||||||
|  |     dateutil = None | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDateField(MongoDBTestCase): | ||||||
|  |     def test_date_from_empty_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast an empty string to datetime. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt="") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|  |     def test_date_from_whitespace_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast a whitespace-only string to datetime. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt="   ") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|  |     def test_default_values_today(self): | ||||||
|  |         """Ensure that default field values are used when creating | ||||||
|  |         a document. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             day = DateField(default=datetime.date.today) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.validate() | ||||||
|  |         assert person.day == person.day | ||||||
|  |         assert person.day == datetime.date.today() | ||||||
|  |         assert person._data["day"] == person.day | ||||||
|  |  | ||||||
|  |     def test_date(self): | ||||||
|  |         """Tests showing pymongo date fields | ||||||
|  |  | ||||||
|  |         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Test can save dates | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = datetime.date.today() | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == datetime.date.today() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|  |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == d1.date() | ||||||
|  |         assert log.date == d2.date() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|  |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         assert log.date == d1.date() | ||||||
|  |         assert log.date == d2.date() | ||||||
|  |  | ||||||
|  |     def test_regular_usage(self): | ||||||
|  |         """Tests for regular datetime fields""" | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.validate() | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         for query in (d1, d1.isoformat(" ")): | ||||||
|  |             log1 = LogEntry.objects.get(date=query) | ||||||
|  |             assert log == log1 | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log1 = LogEntry.objects.get(date=d1.isoformat("T")) | ||||||
|  |             assert log == log1 | ||||||
|  |  | ||||||
|  |         # create additional 19 log entries for a total of 20 | ||||||
|  |         for i in range(1971, 1990): | ||||||
|  |             d = datetime.datetime(i, 1, 1, 0, 0, 1) | ||||||
|  |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|  |         assert LogEntry.objects.count() == 20 | ||||||
|  |  | ||||||
|  |         # Test ordering | ||||||
|  |         logs = LogEntry.objects.order_by("date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             assert logs[i].date <= logs[i + 1].date | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.order_by("-date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             assert logs[i].date >= logs[i + 1].date | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         # Test searching | ||||||
|  |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to datetime | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             time = DateField() | ||||||
|  |  | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.time = datetime.datetime.now() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = datetime.date.today() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = datetime.datetime.now().isoformat(" ") | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log.time = datetime.datetime.now().isoformat("T") | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|  |         log.time = -1 | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|  |         log.time = "ABC" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user