Compare commits
	
		
			1588 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 04c26acdd6 | ||
|  | d0a15a8924 | ||
|  | 2215e2746b | ||
|  | 0e6bcbc030 | ||
|  | 232071f8f4 | ||
|  | fc0fb31d43 | ||
|  | 1bd8cd803e | ||
|  | ef57a58155 | ||
|  | 9680259904 | ||
|  | 49a4d23371 | ||
|  | b9d370c885 | ||
|  | e5a2714baf | ||
|  | ff596fcb7e | ||
|  | f0fad6df19 | ||
|  | da173cf0e2 | ||
|  | 1669f0c5a4 | ||
|  | b045925efe | ||
|  | b3ce65453a | ||
|  | 50d891cb7b | ||
|  | e31f9150d2 | ||
|  | 74ceb9703b | ||
|  | 58a3c6de03 | ||
|  | 86ad8d119d | ||
|  | 34d273015c | ||
|  | 7147043d63 | ||
|  | b9b536133d | ||
|  | 8fd969aba9 | ||
|  | f244207168 | ||
|  | 0620ac5641 | ||
|  | 3b9a167022 | ||
|  | b479bb7c6b | ||
|  | 8ef771912d | ||
|  | 2d1c9afbb7 | ||
|  | 9ff5d8426c | ||
|  | 467e9c3ddf | ||
|  | 0d5e028c55 | ||
|  | 5858ea1bf0 | ||
|  | 1f220b4eaf | ||
|  | 97c99ca40d | ||
|  | 80a3b1c88c | ||
|  | 68447af127 | ||
|  | d033e3b133 | ||
|  | 4428842e77 | ||
|  | f38cc6edd3 | ||
|  | aeb4f8f4da | ||
|  | 1b7c2085c9 | ||
|  | 48b979599f | ||
|  | af3d3b7ee6 | ||
|  | 56fe126f3a | ||
|  | 04905d4b37 | ||
|  | 460df112f4 | ||
|  | 772096ec55 | ||
|  | 98d64f41c6 | ||
|  | 9a3bca8ab6 | ||
|  | 5781753cc8 | ||
|  | fd3699a519 | ||
|  | 4f6a24411d | ||
|  | de3888a48b | ||
|  | 700fe80a00 | ||
|  | 49e33b978d | ||
|  | 81197d6061 | ||
|  | aa368be4d3 | ||
|  | 0f1fce4a7b | ||
|  | cc591a634a | ||
|  | 6e332e782b | ||
|  | 0e9920b190 | ||
|  | fd35df07c4 | ||
|  | 8f3d21c312 | ||
|  | 7b772e3a4a | ||
|  | 59438a4768 | ||
|  | fe9f7f1f80 | ||
|  | 6b5231265c | ||
|  | 0014346de1 | ||
|  | bc0e04d833 | ||
|  | dd11911ed7 | ||
|  | 0b3feedf94 | ||
|  | 65b8cfc96a | ||
|  | 2e284b93b8 | ||
|  | c28bb517cb | ||
|  | aa3ff39ef8 | ||
|  | 149fb953e7 | ||
|  | bf12621ce9 | ||
|  | 8a1a68ea7c | ||
|  | eabb8f60f5 | ||
|  | c00a378776 | ||
|  | ee6ef1ff4b | ||
|  | fb8f02d0c0 | ||
|  | a025199294 | ||
|  | 87babaaa30 | ||
|  | a4fff15491 | ||
|  | a190dfe2c4 | ||
|  | 3926473917 | ||
|  | 9ffe0bcdee | ||
|  | 4fa3134294 | ||
|  | 92f6fce77d | ||
|  | b1a2cf061d | ||
|  | 0a05c1f590 | ||
|  | 7dbc217768 | ||
|  | bf411ab2ca | ||
|  | 277b827d4d | ||
|  | e0bec881bc | ||
|  | cc5e2ba054 | ||
|  | 904fcd1a0a | ||
|  | 2ec454447f | ||
|  | ecd297e227 | ||
|  | 079ee3c191 | ||
|  | f2638ecd02 | ||
|  | ad6ff819fe | ||
|  | 48357640c6 | ||
|  | e6c2169f76 | ||
|  | 1d17dc4663 | ||
|  | eeac3bd2e6 | ||
|  | 3f5a15d236 | ||
|  | 91493a1e79 | ||
|  | 0c274908ec | ||
|  | 338c40b5d5 | ||
|  | fc3ccf9606 | ||
|  | 746faceb5c | ||
|  | 8c3058d99b | ||
|  | eb56fb9bda | ||
|  | 161493c0d2 | ||
|  | cb9f329d11 | ||
|  | 03af784ebe | ||
|  | e5f6e4584a | ||
|  | 79f9f223d0 | ||
|  | 0bc18cd6e1 | ||
|  | 30a3c6a5b7 | ||
|  | 90c5d83f84 | ||
|  | d8b8ff6851 | ||
|  | ee664f0c90 | ||
|  | f8d371229e | ||
|  | 94a7e813b1 | ||
|  | 8ef7213426 | ||
|  | 2f4464ead5 | ||
|  | 89b93461ac | ||
|  | 9e40f3ae83 | ||
|  | f4962fbc40 | ||
|  | c9d53ca5d5 | ||
|  | 65f50fd713 | ||
|  | bf1d04e399 | ||
|  | 5a8e5e5a40 | ||
|  | f3919dd839 | ||
|  | 9f82a02ddf | ||
|  | 015a36c85f | ||
|  | fbd3388a59 | ||
|  | d8a52d68c5 | ||
|  | 4286708e2e | ||
|  | e362d089e1 | ||
|  | 6b657886a5 | ||
|  | eb16945147 | ||
|  | 38047ca992 | ||
|  | c801e79d4b | ||
|  | 3fca3739de | ||
|  | c218c8bb6c | ||
|  | 0bbc05995a | ||
|  | 3adb67901b | ||
|  | d4350e7da4 | ||
|  | 4665658145 | ||
|  | 0d289fd5a1 | ||
|  | aabc18755c | ||
|  | 1f2a5db016 | ||
|  | ff40f66291 | ||
|  | 7f77084e0e | ||
|  | aca4de728e | ||
|  | 9e7ca43cad | ||
|  | 7116dec74a | ||
|  | a5302b870b | ||
|  | 604e9974b6 | ||
|  | 3e1c83f8fa | ||
|  | e431e27cb2 | ||
|  | 4f188655d0 | ||
|  | 194b0cac88 | ||
|  | 7b4175fc5c | ||
|  | adb5f74ddb | ||
|  | 107a1c34c8 | ||
|  | dc7da5204f | ||
|  | 0301bca176 | ||
|  | 49f9bca23b | ||
|  | 31498bd7dd | ||
|  | 1698f398eb | ||
|  | 4275c2d7b7 | ||
|  | 22bff8566d | ||
|  | d8657be320 | ||
|  | 3db9d58dac | ||
|  | 3fbe9c3cdd | ||
|  | 130e9c519c | ||
|  | 78c9e9745d | ||
|  | 38ebb5abf4 | ||
|  | 9b73be26ab | ||
|  | fd0095b73f | ||
|  | 226049f66a | ||
|  | dc1cf88ca6 | ||
|  | f5f8b730b5 | ||
|  | e8f6b42316 | ||
|  | 49b0d73654 | ||
|  | 394da67cf1 | ||
|  | ef7da36ac6 | ||
|  | 1312100bc7 | ||
|  | 4085bc2152 | ||
|  | f4d7e72426 | ||
|  | ece63ad071 | ||
|  | a9550b8243 | ||
|  | 43724e40b2 | ||
|  | 1bfa40e926 | ||
|  | d493f71c4e | ||
|  | 87f4d1a323 | ||
|  | 0a0e6114f5 | ||
|  | 41d36fa3bf | ||
|  | 707923e3f5 | ||
|  | d9b9581df2 | ||
|  | 463e7c66af | ||
|  | 2be28a22a7 | ||
|  | d73f0bb1af | ||
|  | ce74978b1e | ||
|  | 2b0157aecd | ||
|  | f49baf5d90 | ||
|  | 7cc964c7d8 | ||
|  | bc77322c2f | ||
|  | 8913a74a86 | ||
|  | af35b25d15 | ||
|  | 476b07af6e | ||
|  | e2b9a02531 | ||
|  | 6cc6229066 | ||
|  | 4c62a060f0 | ||
|  | 3d80637fa4 | ||
|  | 68be9fe979 | ||
|  | 547cd4a3ae | ||
|  | ee2d50b2d1 | ||
|  | 15c3ddece8 | ||
|  | beaa9744b7 | ||
|  | 8eb51790b5 | ||
|  | aadc6262ed | ||
|  | 00ae6298d4 | ||
|  | ad0669a326 | ||
|  | 85df76c623 | ||
|  | 87512246cb | ||
|  | a3f9016ae9 | ||
|  | 4e58e9f8d1 | ||
|  | 7c533394fd | ||
|  | 333e014f13 | ||
|  | c0c0efce18 | ||
|  | beabaee345 | ||
|  | c937af3919 | ||
|  | aa4a6ae023 | ||
|  | b57946ec98 | ||
|  | 1e110a2c41 | ||
|  | b234aa48e4 | ||
|  | 8086576677 | ||
|  | 03e34299f0 | ||
|  | 421e3f324f | ||
|  | a0b803959c | ||
|  | ff4d57032a | ||
|  | ba34589065 | ||
|  | a4d11eef46 | ||
|  | fda2e2b47a | ||
|  | d287f480e5 | ||
|  | d85f0e6226 | ||
|  | cfb4943986 | ||
|  | b453a96211 | ||
|  | 81f9b351b3 | ||
|  | 4bca3de42f | ||
|  | 235b1a3679 | ||
|  | 450658d7ac | ||
|  | 8e17e42e26 | ||
|  | 2d6a4c4b90 | ||
|  | 38703acc29 | ||
|  | 095217e797 | ||
|  | 86e965f854 | ||
|  | 57db68dc04 | ||
|  | 72de6d67c7 | ||
|  | b2c3acd025 | ||
|  | 605de59bd0 | ||
|  | e0565ddac5 | ||
|  | 18b68f1b80 | ||
|  | ea88806630 | ||
|  | 412bed0f6d | ||
|  | 53cf26b9af | ||
|  | d738462139 | ||
|  | 2fa48cd9e5 | ||
|  | e64a7a9448 | ||
|  | 9490ad2bf7 | ||
|  | 84f3dce492 | ||
|  | 60c42dddd5 | ||
|  | f93f9406ee | ||
|  | 705c55ce24 | ||
|  | 928770c43a | ||
|  | 59fbd505a0 | ||
|  | 1cc20c9770 | ||
|  | f8f267a880 | ||
|  | 80ea1f6883 | ||
|  | 75ee282a3d | ||
|  | 4edad4601c | ||
|  | 152b51fd33 | ||
|  | 66a0fca4ad | ||
|  | e7c7a66cd1 | ||
|  | b3dbb87c3c | ||
|  | 3d45538998 | ||
|  | 8df9d3fef9 | ||
|  | 99e660c66d | ||
|  | aa02f87b69 | ||
|  | f0d1ee2cb4 | ||
|  | ca4967311d | ||
|  | 65eb6ab611 | ||
|  | 1cb2f7814c | ||
|  | b5485b16e6 | ||
|  | 62c8597a3b | ||
|  | 488604ff2e | ||
|  | bd88a17b8e | ||
|  | 8e892dccfe | ||
|  | c22eb34017 | ||
|  | dcf3edb03e | ||
|  | c85b59d3b5 | ||
|  | 1170de1e8e | ||
|  | 332bd767d4 | ||
|  | 0053b30237 | ||
|  | d44533d956 | ||
|  | 12d8bd5a22 | ||
|  | ae326678ec | ||
|  | 8d31f165c0 | ||
|  | cfd4d6a161 | ||
|  | 329f030a41 | ||
|  | 68dc2925fb | ||
|  | 0d4e61d489 | ||
|  | dc7b96a569 | ||
|  | 50882e5bb0 | ||
|  | 280a73af3b | ||
|  | d8c0631dab | ||
|  | 9166ba91d7 | ||
|  | 6bc4e602bb | ||
|  | 45a7520fc3 | ||
|  | 64c0cace85 | ||
|  | 82af5e4a19 | ||
|  | 7e0ba1b335 | ||
|  | 44b7f792fe | ||
|  | a3e432eb68 | ||
|  | 009f9a2b14 | ||
|  | 2ca905b6e5 | ||
|  | 3b099f936a | ||
|  | 4d6ddb070e | ||
|  | b205314424 | ||
|  | e83132f32c | ||
|  | 1b38309d70 | ||
|  | 6e8196d475 | ||
|  | 90fecc56dd | ||
|  | d3d7f0e670 | ||
|  | 37ffeafeff | ||
|  | abc159b7b9 | ||
|  | 648b28876d | ||
|  | 5b9f2bac87 | ||
|  | 17151f67c2 | ||
|  | 5f14d958ac | ||
|  | bd6c52e025 | ||
|  | cb77bb6b69 | ||
|  | 78b240b740 | ||
|  | 7e30f00178 | ||
|  | 35310dbc73 | ||
|  | af82c07acc | ||
|  | 3f75f30f26 | ||
|  | f7f0e10d4d | ||
|  | 091238a2cf | ||
|  | 0458ef869e | ||
|  | 0bf08db7b9 | ||
|  | d3420918cd | ||
|  | 138e759161 | ||
|  | f1d6ce7d12 | ||
|  | ff749a7a0a | ||
|  | bff78ca8dd | ||
|  | 81647d67a0 | ||
|  | d8924ed892 | ||
|  | 799cdafae6 | ||
|  | bc0c55e49a | ||
|  | c61c6a8525 | ||
|  | 3e764d068c | ||
|  | ac25f4b98b | ||
|  | aa6ff8c84a | ||
|  | 37ca79e9c5 | ||
|  | 6040b4b494 | ||
|  | 51ea3e3c6f | ||
|  | 5a16dda50d | ||
|  | bbfa978861 | ||
|  | 54ca7bf09f | ||
|  | 8bf5370b6c | ||
|  | ecefa05e03 | ||
|  | e013494fb2 | ||
|  | 4853f74dbf | ||
|  | 6f45ee6813 | ||
|  | c60ed32f3a | ||
|  | 178851589d | ||
|  | 5bcc679194 | ||
|  | 1e17b5ac66 | ||
|  | 19f12f3f2f | ||
|  | 71e8d9a490 | ||
|  | e3cd553f82 | ||
|  | b61c8cd104 | ||
|  | 8f288fe458 | ||
|  | 02a920feea | ||
|  | be2c4f2b3c | ||
|  | 7ac74b1c1f | ||
|  | 933cb1d5c7 | ||
|  | 6203e30152 | ||
|  | 7d94af0e31 | ||
|  | 564a2b5f1e | ||
|  | 1dbe7a3163 | ||
|  | 47f8a126ca | ||
|  | 693195f70b | ||
|  | 2267b7e7d7 | ||
|  | a06e605e67 | ||
|  | 47c67ecc99 | ||
|  | 4c4b7cbeae | ||
|  | ddececbfea | ||
|  | 71a6f3d1a4 | ||
|  | e86cf962e9 | ||
|  | 99a58d5c91 | ||
|  | eecbb5ca90 | ||
|  | fbb3bf869c | ||
|  | b887ea9623 | ||
|  | c68e3e1238 | ||
|  | c5080e4030 | ||
|  | 0d01365751 | ||
|  | f4a06ad65d | ||
|  | 05a22d5a54 | ||
|  | 2424ece0c5 | ||
|  | 2d02551d0a | ||
|  | ac416aeeb3 | ||
|  | d09af430e8 | ||
|  | 79454b5eed | ||
|  | 921c1fa412 | ||
|  | 1aba145bc6 | ||
|  | 290d9df3eb | ||
|  | aa76ccdd25 | ||
|  | abe8070c36 | ||
|  | 2d28c258fd | ||
|  | 1338839b52 | ||
|  | 058203a0ec | ||
|  | 8fdf664968 | ||
|  | 50555ec73e | ||
|  | 951a532a9f | ||
|  | e940044603 | ||
|  | babfbb0fcd | ||
|  | bbed312bdd | ||
|  | b593764ded | ||
|  | 483c840fc8 | ||
|  | de80f0ccff | ||
|  | d0b87f7f82 | ||
|  | bf32d3c39a | ||
|  | bc14f2cdaa | ||
|  | 06a21e038a | ||
|  | 4d5eba317e | ||
|  | d37a30e083 | ||
|  | 9170eea784 | ||
|  | 2769967e1e | ||
|  | 609f50d261 | ||
|  | 82f0eb1cbc | ||
|  | b47669403b | ||
|  | 91899acfe5 | ||
|  | ffedd33101 | ||
|  | c9ed930606 | ||
|  | af292b0ec2 | ||
|  | 1ead7f9b2b | ||
|  | 5c91877b69 | ||
|  | e57d834a0d | ||
|  | 0578cdb62e | ||
|  | b661afba01 | ||
|  | b1002dd4f9 | ||
|  | 8e69008699 | ||
|  | f45552f8f8 | ||
|  | a4fe091a51 | ||
|  | 216217e2c6 | ||
|  | 799775b3a7 | ||
|  | ae0384df29 | ||
|  | 8f57279dc7 | ||
|  | e8dbd12f22 | ||
|  | ca230d28b4 | ||
|  | c96065b187 | ||
|  | 2abcf4764d | ||
|  | 6a4c342e45 | ||
|  | bb0b1e88ef | ||
|  | 63c9135184 | ||
|  | 7fac0ef961 | ||
|  | 5a2e268160 | ||
|  | a4e4e8f440 | ||
|  | b62ce947a6 | ||
|  | 9538662262 | ||
|  | 09d7ae4f80 | ||
|  | d7ded366c7 | ||
|  | 09c77973a0 | ||
|  | 22f3c70234 | ||
|  | 6527b1386f | ||
|  | baabf97acd | ||
|  | 97005aca66 | ||
|  | 6e8ea50c19 | ||
|  | 1fcd706e11 | ||
|  | 008bb19b0b | ||
|  | 023acab779 | ||
|  | 68e8584520 | ||
|  | 5d120ebca0 | ||
|  | f91b89f723 | ||
|  | 1181b75e16 | ||
|  | 5f00b4f923 | ||
|  | 4c31193b82 | ||
|  | 17fc9d1886 | ||
|  | d7285d43dd | ||
|  | aa8a991d20 | ||
|  | 40ba51ac43 | ||
|  | d20430a778 | ||
|  | f08f749cd9 | ||
|  | a6c04f4f9a | ||
|  | 15b6c1590f | ||
|  | 4a8985278d | ||
|  | 996618a495 | ||
|  | 1f02d5fbbd | ||
|  | c58b9f00f0 | ||
|  | f131b18cbe | ||
|  | 118a998138 | ||
|  | 7ad6f036e7 | ||
|  | 1d29b824a8 | ||
|  | 3caf2dce28 | ||
|  | 1fc5b954f2 | ||
|  | 31d99c0bd2 | ||
|  | 0ac59c67ea | ||
|  | 8e8c74c621 | ||
|  | f996f3df74 | ||
|  | 9499c97e18 | ||
|  | c1c81fc07b | ||
|  | 072e86a2f0 | ||
|  | 70d6e763b0 | ||
|  | 15f4d4fee6 | ||
|  | 82e28dec43 | ||
|  | b407c0e6c6 | ||
|  | 27ea01ee05 | ||
|  | 7ed5829b2c | ||
|  | 5bf1dd55b1 | ||
|  | 36aebffcc0 | ||
|  | 84c42ed58c | ||
|  | 9634e44343 | ||
|  | 048a045966 | ||
|  | a18c8c0eb4 | ||
|  | 5fb0f46e3f | ||
|  | 962997ed16 | ||
|  | daca0ebc14 | ||
|  | 9ae8fe7c2d | ||
|  | 1907133f99 | ||
|  | 4334955e39 | ||
|  | f00c9dc4d6 | ||
|  | 7d0687ec73 | ||
|  | da3773bfe8 | ||
|  | 6e1c132ee8 | ||
|  | 24ba35d76f | ||
|  | 64b63e9d52 | ||
|  | 7848a82a1c | ||
|  | 6a843cc8b2 | ||
|  | ecdb0785a4 | ||
|  | 9a55caed75 | ||
|  | 2e01eb87db | ||
|  | 597b962ad5 | ||
|  | 7531f533e0 | ||
|  | 6b9d71554e | ||
|  | bb1089e03d | ||
|  | c82f0c937d | ||
|  | 00d2fd685a | ||
|  | f28e1b8c90 | ||
|  | 2b17985a11 | ||
|  | b392e3102e | ||
|  | 58b0b18ddd | ||
|  | 6a9ef319d0 | ||
|  | cf38ef70cb | ||
|  | ac64ade10f | ||
|  | ee85af34d8 | ||
|  | 9d53ad53e5 | ||
|  | 9cdc3ebee6 | ||
|  | 14a5e05d64 | ||
|  | f7b7d0f79e | ||
|  | d98f36ceff | ||
|  | abfabc30c9 | ||
|  | c1aff7a248 | ||
|  | e44f71eeb1 | ||
|  | cb578c84e2 | ||
|  | 565e1dc0ed | ||
|  | b1e28d02f7 | ||
|  | d1467c2f73 | ||
|  | c439150431 | ||
|  | 9bb3dfd639 | ||
|  | 4caa58b9ec | ||
|  | b5213097e8 | ||
|  | 61081651e4 | ||
|  | 4ccfdf051d | ||
|  | 9f2a9d9cda | ||
|  | 827de76345 | ||
|  | fdcaca42ae | ||
|  | 0744892244 | ||
|  | b70ffc69df | ||
|  | 73b12cc32f | ||
|  | ba6a37f315 | ||
|  | 6f8be8c8ac | ||
|  | 68497542b3 | ||
|  | 3d762fed10 | ||
|  | 48b849c031 | ||
|  | 88c4aa2d87 | ||
|  | fb8c0d8fe3 | ||
|  | 1a863725d1 | ||
|  | 7b4245c91c | ||
|  | 9bd0d6b99d | ||
|  | b640c766db | ||
|  | 50ffa8014e | ||
|  | 7ef688b256 | ||
|  | b4fe0b35e4 | ||
|  | a2cbbdf819 | ||
|  | 35b7efe3f4 | ||
|  | 7cea2a768f | ||
|  | 7247b9b68e | ||
|  | dca837b843 | ||
|  | c60c2ee8d0 | ||
|  | 3cdb5b5db2 | ||
|  | b9cc8a4ca9 | ||
|  | 28606e9985 | ||
|  | 5bbe782812 | ||
|  | d65861cdf7 | ||
|  | c8df3fd2a7 | ||
|  | 6cfe6652a3 | ||
|  | 6b711da69d | ||
|  | 9b02867293 | ||
|  | 595cb99b2d | ||
|  | f0a3445250 | ||
|  | 6d353dae1e | ||
|  | 57a38282a9 | ||
|  | db47604865 | ||
|  | 2a121fe202 | ||
|  | 36baff0d7f | ||
|  | 201f3008b1 | ||
|  | f4873fee18 | ||
|  | e02261be6d | ||
|  | 2919e6765c | ||
|  | b8fc4d0079 | ||
|  | 4a46f5f095 | ||
|  | 3484ceabb8 | ||
|  | cab659dce6 | ||
|  | a657f29439 | ||
|  | 4c054bf316 | ||
|  | dc7922c38b | ||
|  | c6c68abfcc | ||
|  | 6aacb0c898 | ||
|  | e7000db491 | ||
|  | fce994ea7f | ||
|  | 6c6446765e | ||
|  | 69a99c70c6 | ||
|  | 56d9f7a8af | ||
|  | 363aefe399 | ||
|  | 7fd4f792ba | ||
|  | 6fbdde63d8 | ||
|  | b04dc90cdf | ||
|  | b525c91bd3 | ||
|  | a32c893078 | ||
|  | 2c6a744848 | ||
|  | 4492874d08 | ||
|  | d3a592e5bf | ||
|  | cab21b1b21 | ||
|  | 1319e422ea | ||
|  | c88ea40b57 | ||
|  | 3194a37fcb | ||
|  | 72ebaa52e9 | ||
|  | 0e00695fc7 | ||
|  | 48a691e722 | ||
|  | cf54d6d6f8 | ||
|  | a03fe234d0 | ||
|  | d88d40cc08 | ||
|  | d3b4af116e | ||
|  | 352b23331b | ||
|  | bdd6041a5c | ||
|  | 1894003f8a | ||
|  | 220513ae42 | ||
|  | fcbabbe357 | ||
|  | 3627969fce | ||
|  | 8807c0dbef | ||
|  | 23cc9f6ff8 | ||
|  | e50799e9c4 | ||
|  | b92c4844eb | ||
|  | c306d42d08 | ||
|  | e31558318e | ||
|  | 78a9420f26 | ||
|  | b47c5b5bfc | ||
|  | 28a312accf | ||
|  | 611094e92e | ||
|  | 2a8579a6a5 | ||
|  | 47577f2f47 | ||
|  | 34e3e45843 | ||
|  | 364dc9ddfb | ||
|  | 23324f0f87 | ||
|  | 17fa9a3b77 | ||
|  | 424b3ca308 | ||
|  | 26e2fc8fd4 | ||
|  | 8e18484898 | ||
|  | 354cfe0f9c | ||
|  | 983474b2bd | ||
|  | 14d861bcbb | ||
|  | f6cd349a16 | ||
|  | 8e1c4dec87 | ||
|  | 18b47e4a73 | ||
|  | 4f157f50ed | ||
|  | f44a2f4857 | ||
|  | c685ace327 | ||
|  | f23b0faf41 | ||
|  | e0e2ca7ccd | ||
|  | 83fe7f7eef | ||
|  | 1feaa8f2e9 | ||
|  | 598d6bf4c5 | ||
|  | 0afd5a40d6 | ||
|  | 26b70e9ed3 | ||
|  | a1a93a4bdd | ||
|  | 4939a7dd7c | ||
|  | 0fa6610fdb | ||
|  | b0148e7860 | ||
|  | 59a06a242d | ||
|  | ffe902605d | ||
|  | 556f7e85fc | ||
|  | 45c86be402 | ||
|  | bf34f413de | ||
|  | 9b022b187f | ||
|  | c3409d64dc | ||
|  | 3c5c3b5026 | ||
|  | f240f00d84 | ||
|  | 68c7764c63 | ||
|  | adfb039ba6 | ||
|  | 89416d9856 | ||
|  | 9b6c972e0f | ||
|  | 55fc04752a | ||
|  | 96f0919633 | ||
|  | 17b140baf4 | ||
|  | 45c2151d0f | ||
|  | 1887f5b7e7 | ||
|  | 708d1c7a32 | ||
|  | acf8c3015a | ||
|  | f83ae5789b | ||
|  | 57ccfcfc1b | ||
|  | dd0fdcfdd4 | ||
|  | 5c805be067 | ||
|  | e423380d7f | ||
|  | 4d8bebc917 | ||
|  | 4314fa883f | ||
|  | d6e39b362b | ||
|  | f89214f9cf | ||
|  | d17cac8210 | ||
|  | aa49283fa9 | ||
|  | e79ea7a2cf | ||
|  | 8a1d280f19 | ||
|  | 6a8eb9562f | ||
|  | 8f76e1e344 | ||
|  | 7b9f084e6b | ||
|  | 5b1693a908 | ||
|  | fd7c00da49 | ||
|  | 7fc5ced3af | ||
|  | a86092fb64 | ||
|  | 003827e916 | ||
|  | b15673c525 | ||
|  | 00363303b1 | ||
|  | 48fbe890f8 | ||
|  | 4179877cc7 | ||
|  | 282b83ac08 | ||
|  | 193656e71b | ||
|  | a25d127f36 | ||
|  | cf9df548ca | ||
|  | f29b93c762 | ||
|  | 032ace40d1 | ||
|  | f74dd1cb3c | ||
|  | 29889d1e35 | ||
|  | d6d19c4229 | ||
|  | ab08e67eaf | ||
|  | 00bf6ac258 | ||
|  | b65478e7d9 | ||
|  | e83b529f1c | ||
|  | 408274152b | ||
|  | 8ff82996fb | ||
|  | d59c4044b7 | ||
|  | 3574e21e4f | ||
|  | 5a091956ef | ||
|  | 14e9c58444 | ||
|  | bfe5b03c69 | ||
|  | f96f7f840e | ||
|  | a3bcf26dce | ||
|  | a7852a89cc | ||
|  | 1b0c761fc0 | ||
|  | 5e4e8d4eda | ||
|  | bd524d2e1e | ||
|  | 60fe919992 | ||
|  | b90063b170 | ||
|  | d9fce49b08 | ||
|  | 5dbee2a270 | ||
|  | 4779106139 | ||
|  | bf2de81873 | ||
|  | 28cdedc9aa | ||
|  | 7e90571404 | ||
|  | 42bbe63927 | ||
|  | 7ddbea697e | ||
|  | b4860de34d | ||
|  | 576f23d5fb | ||
|  | 86548fc7bf | ||
|  | b3b4d992fe | ||
|  | d72daf5f39 | ||
|  | 9ad959a478 | ||
|  | cc00a321da | ||
|  | de74273108 | ||
|  | a7658c7573 | ||
|  | 48a85ee6e0 | ||
|  | 461b789515 | ||
|  | b71ff6fbb8 | ||
|  | 1bcdcce93a | ||
|  | c09bfca634 | ||
|  | 36c5f02bfb | ||
|  | eae6e5d9a1 | ||
|  | 364813dd73 | ||
|  | 1a2b1f283b | ||
|  | a0e5cf4ecc | ||
|  | 820f7b4d93 | ||
|  | 727866f090 | ||
|  | 3d45cdc339 | ||
|  | 02a557aa67 | ||
|  | 6da27e5976 | ||
|  | 19a6e324c4 | ||
|  | 62eadbc174 | ||
|  | ae783d4f45 | ||
|  | 1241a902e3 | ||
|  | fdba648afb | ||
|  | b070e7de07 | ||
|  | d0741946c7 | ||
|  | 080226dd72 | ||
|  | 3cb6a5cfac | ||
|  | 758971e068 | ||
|  | 8739ab9c66 | ||
|  | e8e47c39d7 | ||
|  | 446c101018 | ||
|  | 3654591a1b | ||
|  | 7fb1c9dd35 | ||
|  | 0fffaccdf4 | ||
|  | 5902b241f9 | ||
|  | 784386fddc | ||
|  | d424583cbf | ||
|  | 290b821a3a | ||
|  | a0dfa8d421 | ||
|  | ceb00f6748 | ||
|  | 9bd328e147 | ||
|  | 6fb5c312c3 | ||
|  | 3f9ff7254f | ||
|  | f7a3acfaf4 | ||
|  | e4451ccaf8 | ||
|  | 2adb640821 | ||
|  | 765038274c | ||
|  | 2cbdced974 | ||
|  | fc5d9ae100 | ||
|  | 506168ab83 | ||
|  | 088fd6334b | ||
|  | 94cda90a6e | ||
|  | 78601d90c9 | ||
|  | fa4ac95ecc | ||
|  | dd4d4e23ad | ||
|  | acba86993d | ||
|  | 0fc55451c2 | ||
|  | 5c0bd8a810 | ||
|  | 1aebc95145 | ||
|  | 1d3f20b666 | ||
|  | eb2e106871 | ||
|  | f9a887c8c6 | ||
|  | 67ab810cb2 | ||
|  | 3e0d84383e | ||
|  | d245ea3eaa | ||
|  | 843fc03bf4 | ||
|  | c83c635067 | ||
|  | f605eb14e8 | ||
|  | fd02d77c59 | ||
|  | 0da8fb379d | ||
|  | 257a43298b | ||
|  | a2d3bcd571 | ||
|  | d4142c2cdd | ||
|  | e50d66b303 | ||
|  | 08b6433843 | ||
|  | 8cd536aab5 | ||
|  | 2b495c648f | ||
|  | 06048b6d71 | ||
|  | bb22287336 | ||
|  | a45942a966 | ||
|  | 85d621846d | ||
|  | 534acf8df2 | ||
|  | 5a6d4387ea | ||
|  | 317e844886 | ||
|  | b1f62a2735 | ||
|  | 65e4fea4ef | ||
|  | faca8512c5 | ||
|  | 2121387aa2 | ||
|  | 72c4444a60 | ||
|  | 2d8d2e7e6f | ||
|  | 49bff5d544 | ||
|  | 806a80cef1 | ||
|  | c6f0d5e478 | ||
|  | bf30aba005 | ||
|  | 727778b730 | ||
|  | b081ffce50 | ||
|  | e46779f87b | ||
|  | dabe8c1bb7 | ||
|  | 4042f88bd8 | ||
|  | a0947d0c54 | ||
|  | a34fd9ac89 | ||
|  | aa68322641 | ||
|  | 2d76aebb8e | ||
|  | 7cc1d23bc7 | ||
|  | 0bd2103a8c | ||
|  | 7d8916b6e9 | ||
|  | 8b5df3ca17 | ||
|  | ffdfe99d37 | ||
|  | 7efa67e7e6 | ||
|  | d69808c204 | ||
|  | de360c61dd | ||
|  | 6b04ddfad1 | ||
|  | 0d854ce906 | ||
|  | 38fdf26405 | ||
|  | 6835c15d9b | ||
|  | fa38bfd4e8 | ||
|  | 4d5c6d11ab | ||
|  | 9e80da705a | ||
|  | 9b04391f82 | ||
|  | 8f6c0796e3 | ||
|  | 326fcf4398 | ||
|  | fdda27abd1 | ||
|  | 7e8c62104a | ||
|  | fb213f6e74 | ||
|  | 22e75c1691 | ||
|  | 919f221be9 | ||
|  | da7d64667e | ||
|  | d19c6a1573 | ||
|  | 5cd23039a0 | ||
|  | 19b18d3d0a | ||
|  | 101947da8b | ||
|  | d3c3c23630 | ||
|  | abc14316ea | ||
|  | b66621f9c6 | ||
|  | aa5510531d | ||
|  | 12b846586c | ||
|  | b705f5b743 | ||
|  | 18a5fba42b | ||
|  | b5a3b6f86a | ||
|  | 00f2eda576 | ||
|  | c70d252dc3 | ||
|  | 2f088ce29e | ||
|  | ff408c604b | ||
|  | 6621c318db | ||
|  | 22a8ad2fde | ||
|  | 7674dc9b34 | ||
|  | 9e0ca51c2f | ||
|  | 961629d156 | ||
|  | 2cbebf9c99 | ||
|  | 08a4deca17 | ||
|  | ce9ea7baad | ||
|  | b35efb9f72 | ||
|  | c45dfacb41 | ||
|  | 91152a7977 | ||
|  | 0ce081323f | ||
|  | 79486e3393 | ||
|  | 60758dd76b | ||
|  | e74f659015 | ||
|  | c1c09fa6b4 | ||
|  | 47c7cb9327 | ||
|  | 4d6256e1a1 | ||
|  | 13180d92e3 | ||
|  | 6b38ef3c9f | ||
|  | 4f5b0634ad | ||
|  | ea25972257 | ||
|  | b6168898ec | ||
|  | da33cb54fe | ||
|  | 35d0458228 | ||
|  | e6c0280b40 | ||
|  | 15451ff42b | ||
|  | 9ab856e186 | ||
|  | 6e2db1ced6 | ||
|  | 5c4ce8754e | ||
|  | 416486c370 | ||
|  | 2f075be6f8 | ||
|  | a1494c4c93 | ||
|  | d79ab5ffeb | ||
|  | 01526a7b37 | ||
|  | 091a02f737 | ||
|  | aa4996ef28 | ||
|  | 2f4e2bde6b | ||
|  | e90f6a2fa3 | ||
|  | be8f1b9fdd | ||
|  | ba99190f53 | ||
|  | 70088704e2 | ||
|  | 02733e6e58 | ||
|  | 44732a5dd9 | ||
|  | 5bdd35464b | ||
|  | 1eae97731f | ||
|  | 0325a62f18 | ||
|  | 3a5538813c | ||
|  | 1f1b4b95ce | ||
|  | 8c3ed57ecc | ||
|  | dc8a64fa7d | ||
|  | 0d1e72a764 | ||
|  | 9b3fe09508 | ||
|  | 7c0cfb1da2 | ||
|  | 66429ce331 | ||
|  | bce859569f | ||
|  | 425fb8905b | ||
|  | 4f59c7f77f | ||
|  | 21d1faa793 | ||
|  | b9f3991d03 | ||
|  | c4de879b20 | ||
|  | ee5686e91a | ||
|  | 2a795e9138 | ||
|  | 9a6aa8f8c6 | ||
|  | 3794b181d5 | ||
|  | f09256a24e | ||
|  | 34fca9d6f5 | ||
|  | 433f10ef93 | ||
|  | 9f02f71c52 | ||
|  | 3dcc9bc143 | ||
|  | 7311895894 | ||
|  | a7cab51369 | ||
|  | 437b11af9a | ||
|  | 820b5cbb86 | ||
|  | e6a30f899c | ||
|  | 0bc6507df3 | ||
|  | 71c3c632d7 | ||
|  | 99a5f2cd9d | ||
|  | fb00b79d19 | ||
|  | 7782aa7379 | ||
|  | f3ee4a5dac | ||
|  | a8d6e59a7a | ||
|  | 1d4b1870cf | ||
|  | f63ad2dd69 | ||
|  | 6903eed4e7 | ||
|  | b9e922c658 | ||
|  | 54d8c64ad5 | ||
|  | 2f1fe5468e | ||
|  | 24d15d4274 | ||
|  | 0bc7aa52d8 | ||
|  | e52603b4a7 | ||
|  | 3b88712402 | ||
|  | 33e9ef2106 | ||
|  | 689fe4ed9a | ||
|  | b82d026f39 | ||
|  | 009059def4 | ||
|  | 03ff61d113 | ||
|  | c00914bea2 | ||
|  | 944d1c0a4a | ||
|  | 2cf23e33e3 | ||
|  | e2a0b42d03 | ||
|  | 894e9818ac | ||
|  | de18e256ce | ||
|  | 1a3c70ce1b | ||
|  | bd4a603e16 | ||
|  | 358b80d782 | ||
|  | 824ec42005 | ||
|  | 466935e9a3 | ||
|  | b52d3e3a7b | ||
|  | 888a6da4a5 | ||
|  | 972ac73dd9 | ||
|  | d8b238d5f1 | ||
|  | 63206c3da2 | ||
|  | 5713de8966 | ||
|  | 58f293fef3 | ||
|  | ffbb2c9689 | ||
|  | 9cd3dcdebf | ||
|  | f2fe58c3c5 | ||
|  | b78010aa94 | ||
|  | 49035543b9 | ||
|  | f9ccf635ca | ||
|  | e8ea294964 | ||
|  | 19ef2be88b | ||
|  | 30e8b8186f | ||
|  | 741643af5f | ||
|  | 6aaf9ba470 | ||
|  | 5957dc72eb | ||
|  | e32a9777d7 | ||
|  | 84a8f1eb2b | ||
|  | 6810953014 | ||
|  | 398964945a | ||
|  | 5f43c032f2 | ||
|  | 627cf90de0 | ||
|  | 2bedb36d7f | ||
|  | e93a95d0cb | ||
|  | 3f31666796 | ||
|  | 3fe8031cf3 | ||
|  | b27c7ce11b | ||
|  | ed34c2ca68 | ||
|  | 3ca2e953fb | ||
|  | d8a7328365 | ||
|  | f33cd625bf | ||
|  | 80530bb13c | ||
|  | affc12df4b | ||
|  | 4eedf00025 | ||
|  | e5acbcc0dd | ||
|  | 1b6743ee53 | ||
|  | b5fb82d95d | ||
|  | 193aa4e1f2 | ||
|  | ebd34427c7 | ||
|  | 3d75573889 | ||
|  | c6240ca415 | ||
|  | 2ee8984b44 | ||
|  | b7ec587e5b | ||
|  | 47c58bce2b | ||
|  | 96e95ac533 | ||
|  | b013a065f7 | ||
|  | 74b37d11cf | ||
|  | c6cc013617 | ||
|  | f4e1d80a87 | ||
|  | 91dad4060f | ||
|  | e07cb82c15 | ||
|  | 2770cec187 | ||
|  | 5c3928190a | ||
|  | 9f4b04ea0f | ||
|  | 96d20756ca | ||
|  | b8454c7f5b | ||
|  | c84f703f92 | ||
|  | 57c2e867d8 | ||
|  | 553f496d84 | ||
|  | b1d8aca46a | ||
|  | 8e884fd3ea | ||
|  | 76524b7498 | ||
|  | 65914fb2b2 | ||
|  | a4d0da0085 | ||
|  | c9d496e9a0 | ||
|  | 88a951ba4f | ||
|  | 403ceb19dc | ||
|  | 835d3c3d18 | ||
|  | 3135b456be | ||
|  | 0be6d3661a | ||
|  | 6f5f5b4711 | ||
|  | c6c5f85abb | ||
|  | 7b860f7739 | ||
|  | e28804c03a | ||
|  | 1b9432824b | ||
|  | 3b71a6b5c5 | ||
|  | 7ce8768c19 | ||
|  | 25e0f12976 | ||
|  | f168682a68 | ||
|  | d25058a46d | ||
|  | 4d0c092d9f | ||
|  | 15714ef855 | ||
|  | eb743beaa3 | ||
|  | 0007535a46 | ||
|  | 8391af026c | ||
|  | 800f656dcf | ||
|  | 088c5f49d9 | ||
|  | d8d98b6143 | ||
|  | 02fb3b9315 | ||
|  | 4f87db784e | ||
|  | 7e6287b925 | ||
|  | 999cdfd997 | ||
|  | 8d6cb087c6 | ||
|  | 2b7417c728 | ||
|  | 3c455cf1c1 | ||
|  | 5135185e31 | ||
|  | b461f26e5d | ||
|  | faef5b8570 | ||
|  | 0a20e04c10 | ||
|  | d19bb2308d | ||
|  | d8dd07d9ef | ||
|  | 36c56243cd | ||
|  | 23d06b79a6 | ||
|  | e4c4e923ee | ||
|  | 936d2f1f47 | ||
|  | 07018b5060 | ||
|  | ac90d6ae5c | ||
|  | 2141f2c4c5 | ||
|  | 81870777a9 | ||
|  | 845092dcad | ||
|  | dd473d1e1e | ||
|  | d2869bf4ed | ||
|  | 891a3f4b29 | ||
|  | 6767b50d75 | ||
|  | d9e4b562a9 | ||
|  | fb3243f1bc | ||
|  | 5fe1497c92 | ||
|  | 5446592d44 | ||
|  | 40ed9a53c9 | ||
|  | f7ac8cea90 | ||
|  | 4ef5d1f0cd | ||
|  | 6992615c98 | ||
|  | 43dabb2825 | ||
|  | 05e40e5681 | ||
|  | 2c4536e137 | ||
|  | 3dc81058a0 | ||
|  | bd84667a2b | ||
|  | e5b6a12977 | ||
|  | ca415d5d62 | ||
|  | 99b4fe7278 | ||
|  | 327e164869 | ||
|  | 25bc571f30 | ||
|  | 38c7e8a1d2 | ||
|  | ca282e28e0 | ||
|  | 5ef59c06df | ||
|  | 8f55d385d6 | ||
|  | cd2fc25c19 | ||
|  | 709983eea6 | ||
|  | 40e99b1b80 | ||
|  | 488684d960 | ||
|  | f35034b989 | ||
|  | 9d6f9b1f26 | ||
|  | 6148a608fb | ||
|  | 3fa9e70383 | ||
|  | 16fea6f009 | ||
|  | df9ed835ca | ||
|  | e394c8f0f2 | ||
|  | 21974f7288 | ||
|  | 5ef0170d77 | ||
|  | c21dcf14de | ||
|  | a8d20d4e1e | ||
|  | 8b307485b0 | ||
|  | 4544afe422 | ||
|  | 9d7eba5f70 | ||
|  | be0aee95f2 | ||
|  | 3469ed7ab9 | ||
|  | 1f223aa7e6 | ||
|  | 0a431ead5e | ||
|  | f750796444 | ||
|  | c82bcd882a | ||
|  | 7d0ec33b54 | ||
|  | 43d48b3feb | ||
|  | 2e406d2687 | ||
|  | 3f30808104 | ||
|  | ab10217c86 | ||
|  | 00430491ca | ||
|  | 109202329f | ||
|  | 3b1509f307 | ||
|  | 7ad7b08bed | ||
|  | 4650e5e8fb | ||
|  | af59d4929e | ||
|  | e34100bab4 | ||
|  | d9b3a9fb60 | ||
|  | 39eec59c90 | ||
|  | d651d0d472 | ||
|  | 87a2358a65 | ||
|  | cef4e313e1 | ||
|  | 7cc1a4eba0 | ||
|  | c6cc0133b3 | ||
|  | 7748e68440 | ||
|  | 6c2230a076 | ||
|  | 66b233eaea | ||
|  | fed58f3920 | ||
|  | 815b2be7f7 | ||
|  | f420c9fb7c | ||
|  | 01bdf10b94 | ||
|  | ddedc1ee92 | ||
|  | 9e9703183f | ||
|  | adce9e6220 | ||
|  | c499133bbe | ||
|  | 8f505c2dcc | ||
|  | b320064418 | ||
|  | a643933d16 | ||
|  | 2659ec5887 | ||
|  | 9f8327926d | ||
|  | 7a568dc118 | ||
|  | c946b06be5 | ||
|  | c65fd0e477 | ||
|  | 8f8217e928 | ||
|  | 6c9e1799c7 | ||
|  | decd70eb23 | ||
|  | a20d40618f | ||
|  | b4af8ec751 | ||
|  | feb5eed8a5 | ||
|  | f4fa39c70e | ||
|  | 7b7165f5d8 | ||
|  | 13897db6d3 | ||
|  | c4afdb7198 | ||
|  | 0284975f3f | ||
|  | 269e3d1303 | ||
|  | 8c81f7ece9 | ||
|  | f6e0593774 | ||
|  | 3d80e549cb | ||
|  | acc7448dc5 | ||
|  | 35d3d3de72 | ||
|  | 0372e07eb0 | ||
|  | 00221e3410 | ||
|  | 9c264611cf | ||
|  | 31d7f70e27 | ||
|  | 04e8b83d45 | ||
|  | e87bf71f20 | ||
|  | 2dd70c8d62 | ||
|  | a3886702a3 | ||
|  | 713af133a0 | ||
|  | 057ffffbf2 | ||
|  | a81d6d124b | ||
|  | 23f07fde5e | ||
|  | b42b760393 | ||
|  | bf6f4c48c0 | ||
|  | 6133f04841 | ||
|  | 3c18f79ea4 | ||
|  | 2af8342fea | ||
|  | fc3db7942d | ||
|  | 164e2b2678 | ||
|  | b7b28390df | ||
|  | a6e996d921 | ||
|  | 07e666345d | ||
|  | 007f10d29d | ||
|  | f9284d20ca | ||
|  | 9050869781 | ||
|  | 54975de0f3 | ||
|  | a7aead5138 | ||
|  | 6868f66f24 | ||
|  | 3c0b00e42d | ||
|  | 3327388f1f | ||
|  | 04497aec36 | ||
|  | aa9d596930 | ||
|  | f96e68cd11 | ||
|  | 013227323d | ||
|  | 19cbb442ee | ||
|  | c0e7f341cb | ||
|  | 0a1ba7c434 | ||
|  | b708dabf98 | ||
|  | 899e56e5b8 | ||
|  | f6d3bd8ccb | ||
|  | deb5677a57 | ||
|  | 5c464c3f5a | ||
|  | cceef33fef | ||
|  | ed8174fe36 | ||
|  | 3c8906494f | ||
|  | 6e745e9882 | ||
|  | fb4e9c3772 | ||
|  | 2c282f9550 | ||
|  | d92d41cb05 | ||
|  | 82e7050561 | ||
|  | 44f92d4169 | ||
|  | 2f1fae38dd | ||
|  | 9fe99979fe | ||
|  | 6399de0b51 | ||
|  | 959740a585 | ||
|  | 159b082828 | ||
|  | 8e7c5af16c | ||
|  | c1645ab7a7 | ||
|  | 2ae2bfdde9 | ||
|  | 3fe93968a6 | ||
|  | 79a2d715b0 | ||
|  | 50b271c868 | ||
|  | a57f28ac83 | ||
|  | 3f3747a2fe | ||
|  | d133913c3d | ||
|  | e049cef00a | ||
|  | eb8176971c | ||
|  | 5bbfca45fa | ||
|  | 9b500cd867 | ||
|  | b52cae6575 | ||
|  | 35a0142f9b | ||
|  | d4f6ef4f1b | ||
|  | 11024deaae | ||
|  | 5a038de1d5 | ||
|  | 903982e896 | ||
|  | 6355c404cc | ||
|  | 92b9cb5d43 | ||
|  | 7580383d26 | ||
|  | ba0934e41e | ||
|  | a6a1021521 | ||
|  | 33b4d83c73 | ||
|  | 6cf630c74a | ||
|  | 736fe5b84e | ||
|  | 4241bde6ea | ||
|  | b4ce14d744 | ||
|  | 10832a2ccc | ||
|  | 91aca44f67 | ||
|  | 96cfbb201a | ||
|  | b2bc155701 | ||
|  | a70ef5594d | ||
|  | 6d991586fd | ||
|  | f8890ca841 | ||
|  | 0752c6b24f | ||
|  | 3ffaf2c0e1 | ||
|  | a3e0fbd606 | ||
|  | 9c8ceb6b4e | ||
|  | bebce2c053 | ||
|  | 34c6790762 | ||
|  | a5fb009b62 | ||
|  | 9671ca5ebf | ||
|  | 5334ea393e | ||
|  | 2aaacc02e3 | ||
|  | 222e929b2d | ||
|  | 6f16d35a92 | ||
|  | d7a2ccf5ac | ||
|  | 9ce605221a | ||
|  | 1e930fe950 | ||
|  | 4dc158589c | ||
|  | 4525eb457b | ||
|  | 56a2e07dc2 | ||
|  | 9b7fe9ac31 | ||
|  | c3da07ccf7 | ||
|  | b691a56d51 | ||
|  | 13e0a1b5bb | ||
|  | 646baddce4 | ||
|  | 02f61c323d | ||
|  | 1e3d2df9e7 | ||
|  | e43fae86f1 | ||
|  | c6151e34e0 | ||
|  | 45cb991254 | ||
|  | 839bc99f94 | ||
|  | 0aeb1ca408 | ||
|  | cd76a906f4 | ||
|  | e438491938 | ||
|  | 307b35a5bf | ||
|  | 217c9720ea | ||
|  | 778c7dc5f2 | ||
|  | 4c80154437 | ||
|  | 6bd9529a66 | ||
|  | 33ea2b4844 | ||
|  | 5c807f3dc8 | ||
|  | 9063b559c4 | ||
|  | 40f6df7160 | ||
|  | 95165aa92f | ||
|  | d96fcdb35c | ||
|  | 5efabdcea3 | ||
|  | 2d57dc0565 | ||
|  | 576629f825 | ||
|  | 5badb9d151 | ||
|  | 45dc379d9a | ||
|  | 49c0c9f44c | ||
|  | ef5fa4d062 | ||
|  | 35b66d5d94 | ||
|  | d0b749a43c | ||
|  | bcc4d4e8c6 | ||
|  | 41bff0b293 | ||
|  | dfc7f35ef1 | ||
|  | 0bbbbdde80 | ||
|  | 5fa5284b58 | ||
|  | b7ef82cb67 | ||
|  | 1233780265 | ||
|  | dd095279c8 | ||
|  | 4d5200c50f | ||
|  | 1bcd675ead | ||
|  | 2a3d3de0b2 | ||
|  | b124836f3a | ||
|  | 93ba95971b | ||
|  | 7b193b3745 | ||
|  | 2b647d2405 | ||
|  | 7714cca599 | ||
|  | 42511aa9cf | ||
|  | ace2a2f3d1 | ||
|  | 2062fe7a08 | ||
|  | d4c02c3988 | ||
|  | 4c1496b4a4 | ||
|  | eec876295d | ||
|  | 3093175f54 | ||
|  | dd05c4d34a | ||
|  | 57e3a40321 | ||
|  | 9e70152076 | ||
|  | e1da83a8f6 | ||
|  | 8108198613 | ||
|  | 915849b2ce | ||
|  | 2e96302336 | ||
|  | 051cd744ad | ||
|  | 53fbc165ba | ||
|  | 1862bcf867 | ||
|  | 8909d1d144 | ||
|  | a2f0f20284 | ||
|  | 1951b52aa5 | ||
|  | cd7a9345ec | ||
|  | dba4c33c81 | ||
|  | 153c239c9b | ||
|  | 4034ab4182 | ||
|  | 9c917c3bd3 | ||
|  | cca0222e1d | ||
|  | 682db9b81f | ||
|  | 3e000f9be1 | ||
|  | 548a552638 | ||
|  | 1d5b5b7d15 | ||
|  | 91aa4586e2 | ||
|  | 6d3bc43ef6 | ||
|  | 0f63e26641 | ||
|  | ab2ef69c6a | ||
|  | 621350515e | ||
|  | 03ed5c398a | ||
|  | 65d6f8c018 | ||
|  | 79d0673ae6 | ||
|  | cbd488e19f | ||
|  | 380d869195 | ||
|  | 73893f2a33 | ||
|  | ad81470d35 | ||
|  | fc140d04ef | ||
|  | a0257ed7e7 | ||
|  | 4769487c3b | ||
|  | 29def587ff | ||
|  | f35d0b2b37 | ||
|  | 283e92d55d | ||
|  | c82b26d334 | ||
|  | 2753e02cda | ||
|  | fde733c205 | ||
|  | f730591f2c | ||
|  | 94eac1e79d | ||
|  | 9f2b6d0ec6 | ||
|  | 7d7d0ea001 | ||
|  | 794101691c | ||
|  | a443144a5c | ||
|  | 73f0867061 | ||
|  | f97db93212 | ||
|  | d36708933c | ||
|  | 14f82ea0a9 | ||
|  | c41dd6495d | ||
|  | 1005c99e9c | ||
|  | f4478fc762 | ||
|  | c5ed308ea5 | ||
|  | 3ab5ba6149 | ||
|  | 9b2fde962c | ||
|  | 571a7dc42d | ||
|  | 3421fffa9b | ||
|  | c25619fd63 | ||
|  | 76adb13a64 | ||
|  | 33b1eed361 | ||
|  | c44891a1a8 | ||
|  | f31f52ff1c | ||
|  | 6ad9a56bd9 | ||
|  | a5c2fc4f9d | ||
|  | 0a65006bb4 | ||
|  | 3db896c4e2 | ||
|  | e80322021a | ||
|  | 48316ba60d | ||
|  | c0f1493473 | ||
|  | ccbd128fa2 | ||
|  | 46817caa68 | ||
|  | 775c8624d4 | ||
|  | 36eedc987c | ||
|  | 3b8f31c888 | ||
|  | a34fa74eaa | ||
|  | d6b2d8dcb5 | ||
|  | aab0599280 | ||
|  | dfa8eaf24e | ||
|  | 63d55cb797 | ||
|  | c642eee0d2 | ||
|  | 5f33d298d7 | ||
|  | fc39fd7519 | ||
|  | 7f442f7485 | ||
|  | 0ee3203a5a | ||
|  | 43a5df8780 | ||
|  | 0949df014b | ||
|  | 01f4dd8f97 | ||
|  | 8b7599f5d9 | ||
|  | 9bdc320cf8 | ||
|  | d9c8285806 | ||
|  | 4b8344082f | ||
|  | e5cf76b460 | ||
|  | 422ca87a12 | ||
|  | a512ccca28 | ||
|  | ba215be97c | ||
|  | ca16050681 | ||
|  | 06e4ed1bb4 | ||
|  | d4a8ae5743 | ||
|  | a4f2f811d3 | ||
|  | ebaba95eb3 | ||
|  | 31f7769199 | ||
|  | 7726be94be | ||
|  | f2cbcea6d7 | ||
|  | 5d6a28954b | ||
|  | 319f1deceb | ||
|  | 3f14958741 | ||
|  | 42ba4a5c56 | ||
|  | c804c395ed | ||
|  | 58c8cf1a3a | ||
|  | 76ea8c86b7 | ||
|  | 050378fa72 | ||
|  | 29d858d58c | ||
|  | dc45920afb | ||
|  | 15fcb57e2f | ||
|  | 91ee85152c | ||
|  | aa7bf7af1e | ||
|  | 02c1ba39ad | ||
|  | 8e8d9426df | ||
|  | 57f301815d | ||
|  | dfc9dc713c | ||
|  | 1a0cad7f5f | ||
|  | 3df436f0d8 | ||
|  | d737fca295 | ||
|  | da5a3532d7 | ||
|  | 27111e7b29 | ||
|  | b847bc0aba | ||
|  | 6eb0bc50e2 | ||
|  | 7530f03bf6 | ||
|  | 24a9633edc | ||
|  | 7e1a5ce445 | ||
|  | 2ffdbc7fc0 | ||
|  | 52c7b68cc3 | ||
|  | ddbcc8e84b | ||
|  | 2bfb195ad6 | ||
|  | cd2d9517a0 | ||
|  | 19dc312128 | ||
|  | 175659628d | ||
|  | 8fea2b09be | ||
|  | f77f45b70c | ||
|  | 103a287f11 | ||
|  | d600ade40c | ||
|  | a6a7cba121 | ||
|  | 7fff635a3f | ||
|  | 7a749b88c7 | ||
|  | 1ce6a7f4be | ||
|  | a092910fdd | ||
|  | bb77838b3e | ||
|  | 1001f1bd36 | ||
|  | de0e5583a5 | 
							
								
								
									
										143
									
								
								.github/workflows/github-actions.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								.github/workflows/github-actions.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | |||||||
|  | name: MongoengineCI | ||||||
|  | on: | ||||||
|  |   # All PR | ||||||
|  |   pull_request: | ||||||
|  |   # master branch merge | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - master | ||||||
|  |   # release tags | ||||||
|  |   create: | ||||||
|  |     tags: | ||||||
|  |       - 'v[0-9]+\.[0-9]+\.[0-9]+*' | ||||||
|  | env: | ||||||
|  |   MONGODB_3_6: 3.6.14 | ||||||
|  |   MONGODB_4_0: 4.0.23 | ||||||
|  |   MONGODB_4_2: 4.2 | ||||||
|  |   MONGODB_4_4: 4.4 | ||||||
|  |  | ||||||
|  |   PYMONGO_3_4: 3.4 | ||||||
|  |   PYMONGO_3_6: 3.6 | ||||||
|  |   PYMONGO_3_9: 3.9 | ||||||
|  |   PYMONGO_3_11: 3.11 | ||||||
|  |  | ||||||
|  |   MAIN_PYTHON_VERSION: 3.7 | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   linting: | ||||||
|  |     # Run pre-commit (https://pre-commit.com/) | ||||||
|  |     # which runs pre-configured linter & autoformatter | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@v2 | ||||||
|  |     - name: Set up Python 3.7 | ||||||
|  |       uses: actions/setup-python@v2 | ||||||
|  |       with: | ||||||
|  |         python-version: 3.7 | ||||||
|  |     - run: bash .github/workflows/install_ci_python_dep.sh | ||||||
|  |     - run: pre-commit run -a | ||||||
|  |  | ||||||
|  |   test: | ||||||
|  |     # Test suite run against recent python versions | ||||||
|  |     # and against a few combination of MongoDB and pymongo | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         python-version: [3.6, 3.7, 3.8, 3.9, pypy3] | ||||||
|  |         MONGODB: [$MONGODB_4_0] | ||||||
|  |         PYMONGO: [$PYMONGO_3_11] | ||||||
|  |         include: | ||||||
|  |           - python-version: 3.7 | ||||||
|  |             MONGODB: $MONGODB_3_6 | ||||||
|  |             PYMONGO: $PYMONGO_3_9 | ||||||
|  |           - python-version: 3.7 | ||||||
|  |             MONGODB: $MONGODB_4_2 | ||||||
|  |             PYMONGO: $PYMONGO_3_6 | ||||||
|  |           - python-version: 3.7 | ||||||
|  |             MONGODB: $MONGODB_4_4 | ||||||
|  |             PYMONGO: $PYMONGO_3_11 | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@v2 | ||||||
|  |     - name: Set up Python ${{ matrix.python-version }} | ||||||
|  |       uses: actions/setup-python@v2 | ||||||
|  |       with: | ||||||
|  |         python-version: ${{ matrix.python-version }} | ||||||
|  |     - name: install mongo and ci dependencies | ||||||
|  |       run: | | ||||||
|  |         bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }} | ||||||
|  |         bash .github/workflows/install_ci_python_dep.sh | ||||||
|  |         bash .github/workflows/start_mongo.sh ${{ matrix.MONGODB }} | ||||||
|  |     - name: tox dry-run (to pre-install venv) | ||||||
|  |       run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" | ||||||
|  |     - name: Run test suite | ||||||
|  |       run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||||
|  |     - name: Send coverage to Coveralls | ||||||
|  |       env: | ||||||
|  |         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |         COVERALLS_SERVICE_NAME: github | ||||||
|  |       if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }} | ||||||
|  |       run: coveralls | ||||||
|  |  | ||||||
|  |   build_doc_dryrun: | ||||||
|  |     # ensures that readthedocs can be built continuously | ||||||
|  |     # to avoid that it breaks when new releases are being created | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@v2 | ||||||
|  |     - name: Set up Python ${{ matrix.python-version }} | ||||||
|  |       uses: actions/setup-python@v2 | ||||||
|  |       with: | ||||||
|  |         python-version: 3.7 | ||||||
|  |     - name: install python dep | ||||||
|  |       run: | | ||||||
|  |         pip install -e . | ||||||
|  |         pip install -r docs/requirements.txt | ||||||
|  |     - name: build doc | ||||||
|  |       run: | | ||||||
|  |         cd docs | ||||||
|  |         make html-readthedocs | ||||||
|  |  | ||||||
|  |   build-n-publish-dummy: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     needs: [linting, test, build_doc_dryrun] | ||||||
|  |     if: github.event_name != 'pull_request' | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@master | ||||||
|  |     - name: Set up Python 3.7 | ||||||
|  |       uses: actions/setup-python@v1 | ||||||
|  |       with: | ||||||
|  |         python-version: 3.7 | ||||||
|  |     - name: build dummy wheel for test-pypi | ||||||
|  |       run: | | ||||||
|  |         pip install wheel | ||||||
|  |         python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel | ||||||
|  | #    - name: publish test-pypi | ||||||
|  | #      # Although working and recommended, test-pypi has a limit | ||||||
|  | #      # in the size of projects so it's better to avoid publishing | ||||||
|  | #      # until there is a way to garbage collect these dummy releases | ||||||
|  | #      uses: pypa/gh-action-pypi-publish@master | ||||||
|  | #      with: | ||||||
|  | #        password: ${{ secrets.test_pypi_token }} | ||||||
|  | #        repository_url: https://test.pypi.org/legacy/ | ||||||
|  |  | ||||||
|  |   build-n-publish: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     needs: [linting, test, build_doc_dryrun, build-n-publish-dummy] | ||||||
|  |     if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v') | ||||||
|  |     steps: | ||||||
|  |     - uses: actions/checkout@master | ||||||
|  |     - name: Set up Python 3.7 | ||||||
|  |       uses: actions/setup-python@v1 | ||||||
|  |       with: | ||||||
|  |         python-version: 3.7 | ||||||
|  |     # todo separate build from publish | ||||||
|  |     # https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have | ||||||
|  |     - name: build dummy wheel for test-pypi | ||||||
|  |       run: | | ||||||
|  |         pip install wheel | ||||||
|  |         python setup.py sdist bdist_wheel | ||||||
|  |     - name: publish pypi | ||||||
|  |       uses: pypa/gh-action-pypi-publish@master | ||||||
|  |       with: | ||||||
|  |         password: ${{ secrets.pypi_token }} | ||||||
							
								
								
									
										5
									
								
								.github/workflows/install_ci_python_dep.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/install_ci_python_dep.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | |||||||
|  | #!/bin/bash | ||||||
|  | pip install --upgrade pip | ||||||
|  | pip install coveralls | ||||||
|  | pip install pre-commit | ||||||
|  | pip install tox | ||||||
							
								
								
									
										18
									
								
								.github/workflows/install_mongo.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.github/workflows/install_mongo.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | #!/bin/bash | ||||||
|  |  | ||||||
|  | MONGODB=$1 | ||||||
|  |  | ||||||
|  | # Mongo > 4.0 follows different name convention for download links | ||||||
|  | mongo_build=mongodb-linux-x86_64-${MONGODB} | ||||||
|  |  | ||||||
|  | if [[ "$MONGODB" == *"4.2"* ]]; then | ||||||
|  |   mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest | ||||||
|  | elif [[ "$MONGODB" == *"4.4"* ]]; then | ||||||
|  |   mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest | ||||||
|  | fi | ||||||
|  |  | ||||||
|  | wget http://fastdl.mongodb.org/linux/$mongo_build.tgz | ||||||
|  | tar xzf $mongo_build.tgz | ||||||
|  |  | ||||||
|  | mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*") | ||||||
|  | $mongodb_dir/bin/mongod --version | ||||||
							
								
								
									
										9
									
								
								.github/workflows/start_mongo.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/start_mongo.sh
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | |||||||
|  | #!/bin/bash | ||||||
|  |  | ||||||
|  | MONGODB=$1 | ||||||
|  |  | ||||||
|  | mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*") | ||||||
|  |  | ||||||
|  | mkdir $mongodb_dir/data | ||||||
|  | $mongodb_dir/bin/mongod --dbpath $mongodb_dir/data --logpath $mongodb_dir/mongodb.log --fork | ||||||
|  | mongo --eval 'db.version();'    # Make sure mongo is awake | ||||||
							
								
								
									
										14
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,15 @@ | |||||||
| .* |  | ||||||
| !.gitignore | !.gitignore | ||||||
| *~ | *~ | ||||||
| *.py[co] | *.py[co] | ||||||
| .*.sw[po] | .*.sw[po] | ||||||
|  | .cache/ | ||||||
|  | .coverage | ||||||
|  | .coveragerc | ||||||
|  | .env | ||||||
|  | .idea/ | ||||||
|  | .pytest_cache/ | ||||||
|  | .tox/ | ||||||
|  | .eggs/ | ||||||
| *.egg | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
| @@ -13,5 +20,6 @@ env/ | |||||||
| .settings | .settings | ||||||
| .project | .project | ||||||
| .pydevproject | .pydevproject | ||||||
| tests/test_bugfix.py | htmlcov/ | ||||||
| htmlcov/ | venv | ||||||
|  | venv3 | ||||||
|   | |||||||
							
								
								
									
										26
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | |||||||
|  | fail_fast: false | ||||||
|  | repos: | ||||||
|  |   - repo: https://github.com/pre-commit/pre-commit-hooks | ||||||
|  |     rev: v3.4.0 | ||||||
|  |     hooks: | ||||||
|  |       - id: check-merge-conflict | ||||||
|  |       - id: debug-statements | ||||||
|  |       - id: trailing-whitespace | ||||||
|  |       - id: end-of-file-fixer | ||||||
|  |   - repo: https://github.com/ambv/black | ||||||
|  |     rev: 21.4b2 | ||||||
|  |     hooks: | ||||||
|  |       - id: black | ||||||
|  |   - repo: https://gitlab.com/pycqa/flake8 | ||||||
|  |     rev: 3.9.1 | ||||||
|  |     hooks: | ||||||
|  |       - id: flake8 | ||||||
|  |   - repo: https://github.com/asottile/pyupgrade | ||||||
|  |     rev: v2.14.0 | ||||||
|  |     hooks: | ||||||
|  |       - id: pyupgrade | ||||||
|  |         args: [--py36-plus] | ||||||
|  |   - repo: https://github.com/pycqa/isort | ||||||
|  |     rev: 5.8.0 | ||||||
|  |     hooks: | ||||||
|  |       - id: isort | ||||||
							
								
								
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | # .readthedocs.yml | ||||||
|  | # Read the Docs configuration file | ||||||
|  | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details | ||||||
|  |  | ||||||
|  | # Required | ||||||
|  | version: 2 | ||||||
|  |  | ||||||
|  | # Build documentation in the docs/ directory with Sphinx | ||||||
|  | sphinx: | ||||||
|  |   configuration: docs/conf.py | ||||||
|  |  | ||||||
|  | # Optionally set the version of Python and requirements required to build your docs | ||||||
|  | python: | ||||||
|  |   version: 3.7 | ||||||
|  |   install: | ||||||
|  |     - requirements: docs/requirements.txt | ||||||
|  |     # docs/conf.py is importing mongoengine | ||||||
|  |     # so mongoengine needs to be installed as well | ||||||
|  |     - method: setuptools | ||||||
|  |       path: . | ||||||
							
								
								
									
										72
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										72
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,72 +0,0 @@ | |||||||
| language: python |  | ||||||
| python: |  | ||||||
| - '2.6' |  | ||||||
| - '2.7' |  | ||||||
| - '3.2' |  | ||||||
| - '3.3' |  | ||||||
| - '3.4' |  | ||||||
| - pypy |  | ||||||
| - pypy3 |  | ||||||
| env: |  | ||||||
| - PYMONGO=2.7.2 DJANGO=dev |  | ||||||
| - PYMONGO=2.7.2 DJANGO=1.7.1 |  | ||||||
| - PYMONGO=2.7.2 DJANGO=1.6.8 |  | ||||||
| - PYMONGO=2.7.2 DJANGO=1.5.11 |  | ||||||
| - PYMONGO=2.8 DJANGO=dev |  | ||||||
| - PYMONGO=2.8 DJANGO=1.7.1 |  | ||||||
| - PYMONGO=2.8 DJANGO=1.6.8 |  | ||||||
| - PYMONGO=2.8 DJANGO=1.5.11 |  | ||||||
| matrix: |  | ||||||
|   exclude: |  | ||||||
|   - python: '2.6' |  | ||||||
|     env: PYMONGO=2.7.2 DJANGO=dev |  | ||||||
|   - python: '2.6' |  | ||||||
|     env: PYMONGO=2.8 DJANGO=dev |  | ||||||
|   - python: '2.6' |  | ||||||
|     env: PYMONGO=2.7.2 DJANGO=1.7.1 |  | ||||||
|   - python: '2.6' |  | ||||||
|     env: PYMONGO=2.8 DJANGO=1.7.1 |  | ||||||
|   allow_failures: |  | ||||||
|   - python: pypy3 |  | ||||||
|   fast_finish: true |  | ||||||
| before_install: |  | ||||||
| - travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 |  | ||||||
| - echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | |  | ||||||
|   sudo tee /etc/apt/sources.list.d/mongodb.list |  | ||||||
| - travis_retry sudo apt-get update |  | ||||||
| - travis_retry sudo apt-get install mongodb-org-server |  | ||||||
| install: |  | ||||||
| - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev |  | ||||||
|   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev |  | ||||||
|   python-tk |  | ||||||
| - if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master; |  | ||||||
|   true; fi |  | ||||||
| - if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true; |  | ||||||
|   fi |  | ||||||
| - if [[ $DJANGO == 'dev' ]]; then travis_retry pip install git+https://github.com/django/django.git; |  | ||||||
|   fi |  | ||||||
| - if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi |  | ||||||
| - travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b |  | ||||||
| - travis_retry pip install coveralls |  | ||||||
| - travis_retry python setup.py install |  | ||||||
| script: |  | ||||||
| - travis_retry python setup.py test |  | ||||||
| - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi; |  | ||||||
| - coverage run --source=mongoengine setup.py test |  | ||||||
| - coverage report -m |  | ||||||
| - python benchmark.py |  | ||||||
| after_script: coveralls --verbose |  | ||||||
| notifications: |  | ||||||
|   irc: irc.freenode.org#mongoengine |  | ||||||
| branches: |  | ||||||
|   only: |  | ||||||
|   - master |  | ||||||
|   - /^v.*$/ |  | ||||||
| deploy: |  | ||||||
|   provider: pypi |  | ||||||
|   user: the_drow |  | ||||||
|   password: |  | ||||||
|     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= |  | ||||||
|   on: |  | ||||||
|     tags: true |  | ||||||
|     repo: MongoEngine/mongoengine |  | ||||||
							
								
								
									
										108
									
								
								.travis_.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										108
									
								
								.travis_.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,108 @@ | |||||||
|  | ## For full coverage, we'd have to test all supported Python, MongoDB, and | ||||||
|  | ## PyMongo combinations. However, that would result in an overly long build | ||||||
|  | ## with a very large number of jobs, hence we only test a subset of all the | ||||||
|  | ## combinations. | ||||||
|  | ## * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | ||||||
|  | ## Other combinations are tested. See below for the details or check the travis jobs | ||||||
|  | # | ||||||
|  | ## We should periodically check MongoDB Server versions supported by MongoDB | ||||||
|  | ## Inc., add newly released versions to the test matrix, and remove versions | ||||||
|  | ## which have reached their End of Life. See: | ||||||
|  | ## 1. https://www.mongodb.com/support-policy. | ||||||
|  | ## 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility | ||||||
|  | ## | ||||||
|  | ## Reminder: Update README.rst if you change MongoDB versions we test. | ||||||
|  | # | ||||||
|  | #language: python | ||||||
|  | #dist: xenial | ||||||
|  | #python: | ||||||
|  | #  - 3.6 | ||||||
|  | #  - 3.7 | ||||||
|  | #  - 3.8 | ||||||
|  | #  - 3.9 | ||||||
|  | #  - pypy3 | ||||||
|  | # | ||||||
|  | #env: | ||||||
|  | #  global: | ||||||
|  | #    - MONGODB_3_4=3.4.19 | ||||||
|  | #    - MONGODB_3_6=3.6.13 | ||||||
|  | #    - MONGODB_4_0=4.0.13 | ||||||
|  | # | ||||||
|  | #    - PYMONGO_3_4=3.4 | ||||||
|  | #    - PYMONGO_3_6=3.6 | ||||||
|  | #    - PYMONGO_3_9=3.9 | ||||||
|  | #    - PYMONGO_3_11=3.11 | ||||||
|  | # | ||||||
|  | #    - MAIN_PYTHON_VERSION=3.7 | ||||||
|  | #  matrix: | ||||||
|  | #    - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11} | ||||||
|  | # | ||||||
|  | #matrix: | ||||||
|  | #  # Finish the build as soon as one job fails | ||||||
|  | #  fast_finish: true | ||||||
|  | # | ||||||
|  | #  include: | ||||||
|  | #    - python: 3.7 | ||||||
|  | #      env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} | ||||||
|  | #    - python: 3.7 | ||||||
|  | #      env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} | ||||||
|  | #    - python: 3.7 | ||||||
|  | #      env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11} | ||||||
|  | #    - python: 3.8 | ||||||
|  | #      env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11} | ||||||
|  | # | ||||||
|  | #install: | ||||||
|  | #  # Install Mongo | ||||||
|  | #  - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|  | #  - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|  | #  - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | ||||||
|  | #  # Install Python dependencies. | ||||||
|  | #  - pip install --upgrade pip | ||||||
|  | #  - pip install coveralls | ||||||
|  | #  - pip install pre-commit | ||||||
|  | #  - pip install tox | ||||||
|  | #  # tox dryrun to setup the tox venv (we run a mock test). | ||||||
|  | #  - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" | ||||||
|  | # | ||||||
|  | #before_script: | ||||||
|  | #  - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | ||||||
|  | #  - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork | ||||||
|  | #  # Run pre-commit hooks (black, flake8, etc) on entire codebase | ||||||
|  | #  - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi | ||||||
|  | #  - mongo --eval 'db.version();' # Make sure mongo is awake | ||||||
|  | # | ||||||
|  | #script: | ||||||
|  | #  - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||||
|  | # | ||||||
|  | #after_success: | ||||||
|  | #  - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi | ||||||
|  | # | ||||||
|  | #notifications: | ||||||
|  | #  irc: irc.freenode.org#mongoengine | ||||||
|  | # | ||||||
|  | ## Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z) | ||||||
|  | #branches: | ||||||
|  | #  # Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z) | ||||||
|  | #  only: | ||||||
|  | #    - master | ||||||
|  | #    - /^v.*$/ | ||||||
|  | # | ||||||
|  | ## Whenever a new release is created via GitHub, publish it on PyPI. | ||||||
|  | #deploy: | ||||||
|  | #  provider: pypi | ||||||
|  | #  user: the_drow | ||||||
|  | #  password: | ||||||
|  | #    secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||||
|  | # | ||||||
|  | #  # Create a source distribution and a pure python wheel for faster installs. | ||||||
|  | #  distributions: "sdist bdist_wheel" | ||||||
|  | # | ||||||
|  | #  # Only deploy on tagged commits (aka GitHub releases) and only for the parent | ||||||
|  | #  # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. | ||||||
|  | #  # We run Travis against many different Python, PyMongo, and MongoDB versions | ||||||
|  | #  # and we don't want the deploy to occur multiple times). | ||||||
|  | #  on: | ||||||
|  | #    tags: true | ||||||
|  | #    repo: MongoEngine/mongoengine | ||||||
|  | #    condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4}) | ||||||
|  | #    python: 3.7 | ||||||
							
								
								
									
										48
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										48
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron | |||||||
|  |  | ||||||
| CONTRIBUTORS | CONTRIBUTORS | ||||||
|  |  | ||||||
| Dervived from the git logs, inevitably incomplete but all of whom and others | Derived from the git logs, inevitably incomplete but all of whom and others | ||||||
| have submitted patches, reported bugs and generally helped make MongoEngine | have submitted patches, reported bugs and generally helped make MongoEngine | ||||||
| that much better: | that much better: | ||||||
|  |  | ||||||
| @@ -119,7 +119,7 @@ that much better: | |||||||
|  * Anton Kolechkin |  * Anton Kolechkin | ||||||
|  * Sergey Nikitin |  * Sergey Nikitin | ||||||
|  * psychogenic |  * psychogenic | ||||||
|  * Stefan Wójcik |  * Stefan Wójcik (https://github.com/wojcikstefan) | ||||||
|  * dimonb |  * dimonb | ||||||
|  * Garry Polley |  * Garry Polley | ||||||
|  * James Slagle |  * James Slagle | ||||||
| @@ -138,7 +138,6 @@ that much better: | |||||||
|  * hellysmile |  * hellysmile | ||||||
|  * Jaepil Jeong |  * Jaepil Jeong | ||||||
|  * Daniil Sharou |  * Daniil Sharou | ||||||
|  * Stefan Wójcik |  | ||||||
|  * Pete Campton |  * Pete Campton | ||||||
|  * Martyn Smith |  * Martyn Smith | ||||||
|  * Marcelo Anton |  * Marcelo Anton | ||||||
| @@ -218,3 +217,46 @@ that much better: | |||||||
|  * Matthew Ellison (https://github.com/seglberg) |  * Matthew Ellison (https://github.com/seglberg) | ||||||
|  * Jimmy Shen (https://github.com/jimmyshen) |  * Jimmy Shen (https://github.com/jimmyshen) | ||||||
|  * J. Fernando Sánchez (https://github.com/balkian) |  * J. Fernando Sánchez (https://github.com/balkian) | ||||||
|  |  * Michael Chase (https://github.com/rxsegrxup) | ||||||
|  |  * Eremeev Danil (https://github.com/elephanter) | ||||||
|  |  * Catstyle Lee (https://github.com/Catstyle) | ||||||
|  |  * Kiryl Yermakou (https://github.com/rma4ok) | ||||||
|  |  * Matthieu Rigal (https://github.com/MRigal) | ||||||
|  |  * Charanpal Dhanjal (https://github.com/charanpald) | ||||||
|  |  * Emmanuel Leblond (https://github.com/touilleMan) | ||||||
|  |  * Breeze.Kay (https://github.com/9nix00) | ||||||
|  |  * Vicki Donchenko (https://github.com/kivistein) | ||||||
|  |  * Emile Caron (https://github.com/emilecaron) | ||||||
|  |  * Amit Lichtenberg (https://github.com/amitlicht) | ||||||
|  |  * Gang Li (https://github.com/iici-gli) | ||||||
|  |  * Lars Butler (https://github.com/larsbutler) | ||||||
|  |  * George Macon (https://github.com/gmacon) | ||||||
|  |  * Ashley Whetter (https://github.com/AWhetter) | ||||||
|  |  * Paul-Armand Verhaegen (https://github.com/paularmand) | ||||||
|  |  * Steven Rossiter (https://github.com/BeardedSteve) | ||||||
|  |  * Luo Peng (https://github.com/RussellLuo) | ||||||
|  |  * Bryan Bennett (https://github.com/bbenne10) | ||||||
|  |  * Gilb's Gilb's (https://github.com/gilbsgilbs) | ||||||
|  |  * Joshua Nedrud (https://github.com/Neurostack) | ||||||
|  |  * Shu Shen (https://github.com/shushen) | ||||||
|  |  * xiaost7 (https://github.com/xiaost7) | ||||||
|  |  * Victor Varvaryuk | ||||||
|  |  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||||
|  |  * Dmitry Yantsen (https://github.com/mrTable) | ||||||
|  |  * Renjianxin (https://github.com/Davidrjx) | ||||||
|  |  * Erdenezul Batmunkh (https://github.com/erdenezul) | ||||||
|  |  * Andy Yankovsky (https://github.com/werat) | ||||||
|  |  * Bastien Gérard (https://github.com/bagerard) | ||||||
|  |  * Trevor Hall (https://github.com/tjhall13) | ||||||
|  |  * Gleb Voropaev (https://github.com/buggyspace) | ||||||
|  |  * Paulo Amaral (https://github.com/pauloAmaral) | ||||||
|  |  * Gaurav Dadhania (https://github.com/GVRV) | ||||||
|  |  * Yurii Andrieiev (https://github.com/yandrieiev) | ||||||
|  |  * Filip Kucharczyk (https://github.com/Pacu2) | ||||||
|  |  * Eric Timmons (https://github.com/daewok) | ||||||
|  |  * Matthew Simpson (https://github.com/mcsimps2) | ||||||
|  |  * Leonardo Domingues (https://github.com/leodmgs) | ||||||
|  |  * Agustin Barto (https://github.com/abarto) | ||||||
|  |  * Stankiewicz Mateusz (https://github.com/mas15) | ||||||
|  |  * Felix Schultheiß (https://github.com/felix-smashdocs) | ||||||
|  |  * Jan Stein (https://github.com/janste63) | ||||||
|   | |||||||
| @@ -14,38 +14,81 @@ Before starting to write code, look for existing `tickets | |||||||
| <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one | <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one | ||||||
| <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific | <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific | ||||||
| issue or feature request. That way you avoid working on something | issue or feature request. That way you avoid working on something | ||||||
| that might not be of interest or that has already been addressed.  If in doubt | that might not be of interest or that has already been addressed. If in doubt | ||||||
| post to the `user group <http://groups.google.com/group/mongoengine-users>` | post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||||
|  |  | ||||||
| Supported Interpreters | Supported Interpreters | ||||||
| ---------------------- | ---------------------- | ||||||
|  |  | ||||||
| MongoEngine supports CPython 2.6 and newer. Language | MongoEngine supports CPython 3.5 and newer as well as Pypy3. | ||||||
| features not supported by all interpreters can not be used. | Language features not supported by all interpreters can not be used. | ||||||
| Please also ensure that your code is properly converted by |  | ||||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | Python3 codebase | ||||||
|  | ---------------------- | ||||||
|  |  | ||||||
|  | Since 0.20, the codebase is exclusively Python 3. | ||||||
|  |  | ||||||
|  | Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. | ||||||
|  | Travis runs the tests against the main Python 3.x versions. | ||||||
|  |  | ||||||
|  |  | ||||||
| Style Guide | Style Guide | ||||||
| ----------- | ----------- | ||||||
|  |  | ||||||
| MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | MongoEngine's codebase is auto-formatted with `black <https://github.com/python/black>`_, imports are ordered with `isort <https://pycqa.github.io/isort/>`_ | ||||||
| including 4 space indents and 79 character line limits. | and other tools like flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly. | ||||||
|  |  | ||||||
|  | To install all development tools, simply run the following commands: | ||||||
|  |  | ||||||
|  | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ python -m pip install -r requirements-dev.txt | ||||||
|  |  | ||||||
|  |  | ||||||
|  | You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks, | ||||||
|  | to automatically check and fix any formatting issue before creating a | ||||||
|  | git commit. | ||||||
|  |  | ||||||
|  | To enable ``pre-commit`` simply run: | ||||||
|  |  | ||||||
|  | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ pre-commit install | ||||||
|  |  | ||||||
|  | See the ``.pre-commit-config.yaml`` configuration file for more information | ||||||
|  | on how it works. | ||||||
|  |  | ||||||
|  | pre-commit will now run upon every commit and will reject anything that doesn't comply. | ||||||
|  |  | ||||||
|  | You can also run all the checks with ``pre-commit run -a``, this is what is used in the CI. | ||||||
|  |  | ||||||
| Testing | Testing | ||||||
| ------- | ------- | ||||||
|  |  | ||||||
| All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | ||||||
| and any pull requests are automatically tested by Travis. Any pull requests | and any pull requests are automatically tested. Any pull requests without | ||||||
| without tests will take longer to be integrated and might be refused. | tests will take longer to be integrated and might be refused. | ||||||
|  |  | ||||||
|  | You may also submit a simple failing test as a pull request if you don't know | ||||||
|  | how to fix it, it will be easier for other people to work on it and it may get | ||||||
|  | fixed faster. | ||||||
|  |  | ||||||
| General Guidelines | General Guidelines | ||||||
| ------------------ | ------------------ | ||||||
|  |  | ||||||
| - Avoid backward breaking changes if at all possible. | - Avoid backward breaking changes if at all possible. | ||||||
|  | - If you *have* to introduce a breaking change, make it very clear in your | ||||||
|  |   pull request's description. Also, describe how users of this package | ||||||
|  |   should adapt to the breaking change in docs/upgrade.rst. | ||||||
| - Write inline documentation for new classes and methods. | - Write inline documentation for new classes and methods. | ||||||
| - Write tests and make sure they pass (make sure you have a mongod | - Write tests and make sure they pass (make sure you have a mongod | ||||||
|   running on the default port, then execute ``python setup.py test`` |   running on the default port, then execute ``python setup.py test`` | ||||||
|   from the cmd line to run the test suite). |   from the cmd line to run the test suite). | ||||||
|  | - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | ||||||
|  |   You can test various Python and PyMongo versions locally by executing | ||||||
|  |   ``tox``. For different MongoDB versions, you can rely on our automated | ||||||
|  |   Travis tests. | ||||||
|  | - Add enhancements or problematic bug fixes to docs/changelog.rst. | ||||||
| - Add yourself to AUTHORS :) | - Add yourself to AUTHORS :) | ||||||
|  |  | ||||||
| Documentation | Documentation | ||||||
| @@ -59,3 +102,8 @@ just make your changes to the inline documentation of the appropriate | |||||||
| branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | ||||||
| You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | ||||||
| button. | button. | ||||||
|  |  | ||||||
|  | If you want to test your documentation changes locally, you need to install | ||||||
|  | the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed, | ||||||
|  | go to the ``docs`` directory, run ``make html`` and inspect the updated docs | ||||||
|  | by running ``open _build/html/index.html``. | ||||||
|   | |||||||
							
								
								
									
										125
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										125
									
								
								README.rst
									
									
									
									
									
								
							| @@ -4,55 +4,78 @@ MongoEngine | |||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
| :Repository: https://github.com/MongoEngine/mongoengine | :Repository: https://github.com/MongoEngine/mongoengine | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | :Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) | ||||||
|  |  | ||||||
| .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master | .. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master | ||||||
|   :target: http://travis-ci.org/MongoEngine/mongoengine |   :target: https://travis-ci.org/MongoEngine/mongoengine | ||||||
|    |  | ||||||
| .. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master  | .. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master | ||||||
|   :target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master |   :target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master | ||||||
|    |  | ||||||
| .. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg | ||||||
|    :target: https://landscape.io/github/MongoEngine/mongoengine/master |   :target: https://github.com/ambv/black | ||||||
|    :alt: Code Health |  | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||||
| Documentation available at http://mongoengine-odm.rtfd.org - there is currently | Documentation is available at https://mongoengine-odm.readthedocs.io - there | ||||||
| a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, | ||||||
| <https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference | a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and | ||||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | ||||||
|  |  | ||||||
|  | Supported MongoDB Versions | ||||||
|  | ========================== | ||||||
|  | MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions | ||||||
|  | should be supported as well, but aren't actively tested at the moment. Make | ||||||
|  | sure to open an issue or submit a pull request if you experience any problems | ||||||
|  | with MongoDB version > 4.0. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||||
| you can use ``easy_install -U mongoengine``. Otherwise, you can download the | `pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``. | ||||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| setup.py install``. | and thus you can use ``easy_install -U mongoengine``. Another option is | ||||||
|  | `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | ||||||
|  | to both create the virtual environment and install the package. Otherwise, you can | ||||||
|  | download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||||
|  | run ``python setup.py install``. | ||||||
|  |  | ||||||
|  | The support for Python2 was dropped with MongoEngine 0.20.0 | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| - pymongo>=2.7.1 | All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_. | ||||||
| - sphinx (optional - for documentation generation) | At the very least, you'll need these two packages to use MongoEngine: | ||||||
|  |  | ||||||
|  | - pymongo>=3.4 | ||||||
|  |  | ||||||
|  | If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||||
|  |  | ||||||
| Optional Dependencies |  | ||||||
| --------------------- |  | ||||||
| - **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x |  | ||||||
| - **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow) |  | ||||||
| - dateutil>=2.1.0 | - dateutil>=2.1.0 | ||||||
|  |  | ||||||
| .. note | If you need to use an ``ImageField`` or ``ImageGridFsProxy``: | ||||||
|    MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5 |  | ||||||
|  | - Pillow>=2.0.0 | ||||||
|  |  | ||||||
|  | If you need to use signals: | ||||||
|  |  | ||||||
|  | - blinker>=1.3 | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| ======== | ======== | ||||||
| Some simple examples of what MongoEngine code looks like:: | Some simple examples of what MongoEngine code looks like: | ||||||
|  |  | ||||||
|  | .. code :: python | ||||||
|  |  | ||||||
|  |     from mongoengine import * | ||||||
|  |     connect('mydb') | ||||||
|  |  | ||||||
|     class BlogPost(Document): |     class BlogPost(Document): | ||||||
|         title = StringField(required=True, max_length=200) |         title = StringField(required=True, max_length=200) | ||||||
|         posted = DateTimeField(default=datetime.datetime.now) |         posted = DateTimeField(default=datetime.datetime.utcnow) | ||||||
|         tags = ListField(StringField(max_length=50)) |         tags = ListField(StringField(max_length=50)) | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|     class TextPost(BlogPost): |     class TextPost(BlogPost): | ||||||
|         content = StringField(required=True) |         content = StringField(required=True) | ||||||
| @@ -72,31 +95,52 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
|  |  | ||||||
|     # Iterate over all posts using the BlogPost superclass |     # Iterate over all posts using the BlogPost superclass | ||||||
|     >>> for post in BlogPost.objects: |     >>> for post in BlogPost.objects: | ||||||
|     ...     print '===', post.title, '===' |     ...     print('===', post.title, '===') | ||||||
|     ...     if isinstance(post, TextPost): |     ...     if isinstance(post, TextPost): | ||||||
|     ...         print post.content |     ...         print(post.content) | ||||||
|     ...     elif isinstance(post, LinkPost): |     ...     elif isinstance(post, LinkPost): | ||||||
|     ...         print 'Link:', post.url |     ...         print('Link:', post.url) | ||||||
|     ...     print |  | ||||||
|     ... |     ... | ||||||
|  |  | ||||||
|     >>> len(BlogPost.objects) |     # Count all blog posts and its subtypes | ||||||
|  |     >>> BlogPost.objects.count() | ||||||
|     2 |     2 | ||||||
|     >>> len(HtmlPost.objects) |     >>> TextPost.objects.count() | ||||||
|     1 |     1 | ||||||
|     >>> len(LinkPost.objects) |     >>> LinkPost.objects.count() | ||||||
|     1 |     1 | ||||||
|  |  | ||||||
|     # Find tagged posts |     # Count tagged posts | ||||||
|     >>> len(BlogPost.objects(tags='mongoengine')) |     >>> BlogPost.objects(tags='mongoengine').count() | ||||||
|     2 |     2 | ||||||
|     >>> len(BlogPost.objects(tags='mongodb')) |     >>> BlogPost.objects(tags='mongodb').count() | ||||||
|     1 |     1 | ||||||
|  |  | ||||||
| Tests | Tests | ||||||
| ===== | ===== | ||||||
| To run the test suite, ensure you are running a local instance of MongoDB on | To run the test suite, ensure you are running a local instance of MongoDB on | ||||||
| the standard port, and run: ``python setup.py test``. | the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` | ||||||
|  | or simply ``pytest``. | ||||||
|  |  | ||||||
|  | To run the test suite on every supported Python and PyMongo version, you can | ||||||
|  | use ``tox``. You'll need to make sure you have each supported Python version | ||||||
|  | installed in your environment and then: | ||||||
|  |  | ||||||
|  | .. code-block:: shell | ||||||
|  |  | ||||||
|  |     # Install tox | ||||||
|  |     $ python -m pip install tox | ||||||
|  |     # Run the test suites | ||||||
|  |     $ tox | ||||||
|  |  | ||||||
|  | If you wish to run a subset of tests, use the pytest convention: | ||||||
|  |  | ||||||
|  | .. code-block:: shell | ||||||
|  |  | ||||||
|  |     # Run all the tests in a particular test file | ||||||
|  |     $ pytest tests/fields/test_fields.py | ||||||
|  |     # Run only particular test class in that file | ||||||
|  |     $ pytest tests/fields/test_fields.py::TestField | ||||||
|  |  | ||||||
| Community | Community | ||||||
| ========= | ========= | ||||||
| @@ -104,8 +148,7 @@ Community | |||||||
|   <http://groups.google.com/group/mongoengine-users>`_ |   <http://groups.google.com/group/mongoengine-users>`_ | ||||||
| - `MongoEngine Developers mailing list | - `MongoEngine Developers mailing list | ||||||
|   <http://groups.google.com/group/mongoengine-dev>`_ |   <http://groups.google.com/group/mongoengine-dev>`_ | ||||||
| - `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_ |  | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| We welcome contributions! see  the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||||
|   | |||||||
							
								
								
									
										283
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										283
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,283 +0,0 @@ | |||||||
| #!/usr/bin/env python |  | ||||||
|  |  | ||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def cprofile_main(): |  | ||||||
|     from pymongo import Connection |  | ||||||
|     connection = Connection() |  | ||||||
|     connection.drop_database('timeit_test') |  | ||||||
|     connection.disconnect() |  | ||||||
|  |  | ||||||
|     from mongoengine import Document, DictField, connect |  | ||||||
|     connect("timeit_test") |  | ||||||
|  |  | ||||||
|     class Noddy(Document): |  | ||||||
|         fields = DictField() |  | ||||||
|  |  | ||||||
|     for i in range(1): |  | ||||||
|         noddy = Noddy() |  | ||||||
|         for j in range(20): |  | ||||||
|             noddy.fields["key" + str(j)] = "value " + str(j) |  | ||||||
|         noddy.save() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     """ |  | ||||||
|     0.4 Performance Figures ... |  | ||||||
|  |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     3.86744189262 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     6.23374891281 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     5.33027005196 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     pass - No Cascade |  | ||||||
|  |  | ||||||
|     0.5.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     3.89597702026 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     21.7735359669 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     19.8670389652 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     pass - No Cascade |  | ||||||
|  |  | ||||||
|     0.6.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     3.81559205055 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     10.0446798801 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     9.51354718208 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     9.02567505836 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     8.44933390617 |  | ||||||
|  |  | ||||||
|     0.7.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     3.78801012039 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     9.73050498962 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     8.33456707001 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     8.37778115273 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     8.36906409264 |  | ||||||
|     0.8.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     3.69964408875 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo write_concern={"w": 0} |  | ||||||
|     3.5526599884 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     7.00959801674 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries without continual assign - MongoEngine |  | ||||||
|     5.60943293571 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True |  | ||||||
|     6.715102911 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True |  | ||||||
|     5.50644683838 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False |  | ||||||
|     4.69851183891 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False |  | ||||||
|     4.68946313858 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     """ |  | ||||||
|     print("Benchmarking...") |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example, write_concern={"w": 0}) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| connection.disconnect() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect("timeit_test") |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     fields = {} |  | ||||||
|     for j in range(20): |  | ||||||
|         fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.fields = fields |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(validate=False, write_concern={"w": 0}) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
							
								
								
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,142 @@ | |||||||
|  | from timeit import repeat | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import ( | ||||||
|  |     BooleanField, | ||||||
|  |     Document, | ||||||
|  |     EmailField, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     IntField, | ||||||
|  |     ListField, | ||||||
|  |     StringField, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | mongoengine.connect(db="mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def timeit(f, n=10000): | ||||||
|  |     return min(repeat(f, repeat=3, number=n)) / float(n) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_basic(): | ||||||
|  |     class Book(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         pages = IntField() | ||||||
|  |         tags = ListField(StringField()) | ||||||
|  |         is_published = BooleanField() | ||||||
|  |         author_email = EmailField() | ||||||
|  |  | ||||||
|  |     Book.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_book(): | ||||||
|  |         return Book( | ||||||
|  |             name="Always be closing", | ||||||
|  |             pages=100, | ||||||
|  |             tags=["self-help", "sales"], | ||||||
|  |             is_published=True, | ||||||
|  |             author_email="alec@example.com", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     b = init_book() | ||||||
|  |     print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Doc setattr: %.3fus" | ||||||
|  |         % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)  # noqa B010 | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     def save_book(): | ||||||
|  |         b._mark_as_changed("name") | ||||||
|  |         b._mark_as_changed("tags") | ||||||
|  |         b.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     son = b.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_book(): | ||||||
|  |         b = init_book() | ||||||
|  |         b.save() | ||||||
|  |         b.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_book, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_big_doc(): | ||||||
|  |     class Contact(EmbeddedDocument): | ||||||
|  |         name = StringField() | ||||||
|  |         title = StringField() | ||||||
|  |         address = StringField() | ||||||
|  |  | ||||||
|  |     class Company(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         contacts = ListField(EmbeddedDocumentField(Contact)) | ||||||
|  |  | ||||||
|  |     Company.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_company(): | ||||||
|  |         return Company( | ||||||
|  |             name="MongoDB, Inc.", | ||||||
|  |             contacts=[ | ||||||
|  |                 Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) | ||||||
|  |                 for x in range(1000) | ||||||
|  |             ], | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     company = init_company() | ||||||
|  |     print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     company.save() | ||||||
|  |  | ||||||
|  |     def save_company(): | ||||||
|  |         company._mark_as_changed("name") | ||||||
|  |         company._mark_as_changed("contacts") | ||||||
|  |         company.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     son = company.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fms" | ||||||
|  |         % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fms" | ||||||
|  |         % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_company(): | ||||||
|  |         c = init_company() | ||||||
|  |         c.save() | ||||||
|  |         c.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_company, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     test_basic() | ||||||
|  |     print("-" * 100) | ||||||
|  |     test_big_doc() | ||||||
							
								
								
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | |||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("PyMongo: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient, WriteConcern | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | connection.close() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     fields = {} | ||||||
|  |     for j in range(20): | ||||||
|  |         fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.fields = fields | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(f"{t.timeit(1)}s") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -33,8 +33,14 @@ clean: | |||||||
| html: | html: | ||||||
| 	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html | 	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html | ||||||
| 	@echo | 	@echo | ||||||
|  | 	@echo "Build finished. Check $(BUILDDIR)/html/index.html" | ||||||
|  |  | ||||||
|  | html-readthedocs: | ||||||
|  | 	$(SPHINXBUILD) -T -E -b readthedocs $(ALLSPHINXOPTS) $(BUILDDIR)/html | ||||||
|  | 	@echo | ||||||
| 	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html." | 	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html." | ||||||
|  |  | ||||||
|  |  | ||||||
| dirhtml: | dirhtml: | ||||||
| 	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml | 	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml | ||||||
| 	@echo | 	@echo | ||||||
|   | |||||||
							
								
								
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
								
							| @@ -1,17 +0,0 @@ | |||||||
| """Sphinx ReadTheDocs theme. |  | ||||||
|  |  | ||||||
| From https://github.com/ryan-roemer/sphinx-bootstrap-theme. |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| import os |  | ||||||
|  |  | ||||||
| VERSION = (0, 1, 5) |  | ||||||
|  |  | ||||||
| __version__ = ".".join(str(v) for v in VERSION) |  | ||||||
| __version_full__ = __version__ |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_html_theme_path(): |  | ||||||
|     """Return list of HTML theme paths.""" |  | ||||||
|     cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |  | ||||||
|     return cur_dir |  | ||||||
							
								
								
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,15 +0,0 @@ | |||||||
| <ul class="wy-breadcrumbs"> |  | ||||||
|   <li><a href="{{ pathto(master_doc) }}">Docs</a> »</li> |  | ||||||
|   <li><a href="">{{ title }}</a></li> |  | ||||||
|     <li class="wy-breadcrumbs-aside"> |  | ||||||
|       {% if display_github %} |  | ||||||
|         <a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a> |  | ||||||
|       {% elif display_bitbucket %} |  | ||||||
|         <a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a> |  | ||||||
|       {% elif show_source and has_source and sourcename %} |  | ||||||
|         <a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a> |  | ||||||
|       {% endif %} |  | ||||||
|     </li> |  | ||||||
| </ul> |  | ||||||
| <hr/> |  | ||||||
|  |  | ||||||
							
								
								
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,30 +0,0 @@ | |||||||
| <footer> |  | ||||||
|   {% if next or prev %} |  | ||||||
|     <div class="rst-footer-buttons"> |  | ||||||
|       {% if next %} |  | ||||||
|         <a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a> |  | ||||||
|       {% endif %} |  | ||||||
|       {% if prev %} |  | ||||||
|         <a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a> |  | ||||||
|       {% endif %} |  | ||||||
|     </div> |  | ||||||
|   {% endif %} |  | ||||||
|  |  | ||||||
|   <hr/> |  | ||||||
|  |  | ||||||
|   <p> |  | ||||||
|   {%- if show_copyright %} |  | ||||||
|     {%- if hasdoc('copyright') %} |  | ||||||
|       {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} |  | ||||||
|     {%- else %} |  | ||||||
|       {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} |  | ||||||
|     {%- endif %} |  | ||||||
|   {%- endif %} |  | ||||||
|  |  | ||||||
|   {%- if last_updated %} |  | ||||||
|     {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} |  | ||||||
|   {%- endif %} |  | ||||||
|   </p> |  | ||||||
|  |  | ||||||
|   {% trans %}<a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %} |  | ||||||
| </footer> |  | ||||||
							
								
								
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,142 +0,0 @@ | |||||||
| {# TEMPLATE VAR SETTINGS #} |  | ||||||
| {%- set url_root = pathto('', 1) %} |  | ||||||
| {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} |  | ||||||
| {%- if not embedded and docstitle %} |  | ||||||
|   {%- set titlesuffix = " — "|safe + docstitle|e %} |  | ||||||
| {%- else %} |  | ||||||
|   {%- set titlesuffix = "" %} |  | ||||||
| {%- endif %} |  | ||||||
|  |  | ||||||
| <!DOCTYPE html> |  | ||||||
| <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> |  | ||||||
| <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> |  | ||||||
| <head> |  | ||||||
|   <meta charset="utf-8"> |  | ||||||
|   <meta name="viewport" content="width=device-width, initial-scale=1.0"> |  | ||||||
|   {% block htmltitle %} |  | ||||||
|   <title>{{ title|striptags|e }}{{ titlesuffix }}</title> |  | ||||||
|   {% endblock %} |  | ||||||
|  |  | ||||||
|   {# FAVICON #} |  | ||||||
|   {% if favicon %} |  | ||||||
|     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> |  | ||||||
|   {% endif %} |  | ||||||
|   {# CANONICAL #} |  | ||||||
|   {%- if theme_canonical_url %} |  | ||||||
|     <link rel="canonical" href="{{ theme_canonical_url }}{{ pagename }}.html"/> |  | ||||||
|   {%- endif %} |  | ||||||
|  |  | ||||||
|   {# CSS #} |  | ||||||
|   <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'> |  | ||||||
|  |  | ||||||
|   {# JS #} |  | ||||||
|   {% if not embedded %} |  | ||||||
|  |  | ||||||
|     <script type="text/javascript"> |  | ||||||
|       var DOCUMENTATION_OPTIONS = { |  | ||||||
|         URL_ROOT:'{{ url_root }}', |  | ||||||
|         VERSION:'{{ release|e }}', |  | ||||||
|         COLLAPSE_INDEX:false, |  | ||||||
|         FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}', |  | ||||||
|         HAS_SOURCE:  {{ has_source|lower }} |  | ||||||
|       }; |  | ||||||
|     </script> |  | ||||||
|     {%- for scriptfile in script_files %} |  | ||||||
|       <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> |  | ||||||
|     {%- endfor %} |  | ||||||
|  |  | ||||||
|     {% if use_opensearch %} |  | ||||||
|       <link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/> |  | ||||||
|     {% endif %} |  | ||||||
|  |  | ||||||
|   {% endif %} |  | ||||||
|  |  | ||||||
|   {# RTD hosts these file themselves, so just load on non RTD builds #} |  | ||||||
|   {% if not READTHEDOCS %} |  | ||||||
|     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> |  | ||||||
|     <script type="text/javascript" src="_static/js/theme.js"></script> |  | ||||||
|   {% endif %} |  | ||||||
|  |  | ||||||
|   {% for cssfile in css_files %} |  | ||||||
|     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> |  | ||||||
|   {% endfor %} |  | ||||||
|  |  | ||||||
|   {%- block linktags %} |  | ||||||
|     {%- if hasdoc('about') %} |  | ||||||
|         <link rel="author" title="{{ _('About these documents') }}" |  | ||||||
|               href="{{ pathto('about') }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('genindex') %} |  | ||||||
|         <link rel="index" title="{{ _('Index') }}" |  | ||||||
|               href="{{ pathto('genindex') }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('search') %} |  | ||||||
|         <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('copyright') %} |  | ||||||
|         <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/> |  | ||||||
|     {%- if parents %} |  | ||||||
|         <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if next %} |  | ||||||
|         <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if prev %} |  | ||||||
|         <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|   {%- endblock %} |  | ||||||
|   {%- block extrahead %} {% endblock %} |  | ||||||
|  |  | ||||||
|   <script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script> |  | ||||||
|  |  | ||||||
| </head> |  | ||||||
|  |  | ||||||
| <body class="wy-body-for-nav"> |  | ||||||
|  |  | ||||||
|   <div class="wy-grid-for-nav"> |  | ||||||
|  |  | ||||||
|     {# SIDE NAV, TOGGLES ON MOBILE #} |  | ||||||
|     <nav data-toggle="wy-nav-shift" class="wy-nav-side"> |  | ||||||
|       <div class="wy-side-nav-search"> |  | ||||||
|         <a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a> |  | ||||||
|         {% include "searchbox.html" %} |  | ||||||
|       </div> |  | ||||||
|  |  | ||||||
|       <div class="wy-menu wy-menu-vertical" data-spy="affix"> |  | ||||||
|         {% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %} |  | ||||||
|         {% if toctree %} |  | ||||||
|             {{ toctree }} |  | ||||||
|         {% else %} |  | ||||||
|             <!-- Local TOC --> |  | ||||||
|             <div class="local-toc">{{ toc }}</div> |  | ||||||
|         {% endif %} |  | ||||||
|       </div> |  | ||||||
|         |  | ||||||
|     </nav> |  | ||||||
|  |  | ||||||
|     <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> |  | ||||||
|  |  | ||||||
|       {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} |  | ||||||
|       <nav class="wy-nav-top"> |  | ||||||
|         <i data-toggle="wy-nav-top" class="icon icon-reorder"></i> |  | ||||||
|         <a href="{{ pathto(master_doc) }}">{{ project }}</a> |  | ||||||
|       </nav> |  | ||||||
|  |  | ||||||
|  |  | ||||||
|       {# PAGE CONTENT #} |  | ||||||
|       <div class="wy-nav-content"> |  | ||||||
|         <div class="rst-content"> |  | ||||||
|           {% include "breadcrumbs.html" %} |  | ||||||
|           {% block body %}{% endblock %} |  | ||||||
|           {% include "footer.html" %} |  | ||||||
|         </div> |  | ||||||
|       </div> |  | ||||||
|  |  | ||||||
|     </section> |  | ||||||
|  |  | ||||||
|   </div> |  | ||||||
|   {% include "versions.html" %} |  | ||||||
| </body> |  | ||||||
| </html> |  | ||||||
							
								
								
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,205 +0,0 @@ | |||||||
| {# |  | ||||||
|     basic/layout.html |  | ||||||
|     ~~~~~~~~~~~~~~~~~ |  | ||||||
|  |  | ||||||
|     Master layout template for Sphinx themes. |  | ||||||
|  |  | ||||||
|     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. |  | ||||||
|     :license: BSD, see LICENSE for details. |  | ||||||
| #} |  | ||||||
| {%- block doctype -%} |  | ||||||
| <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" |  | ||||||
|   "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> |  | ||||||
| {%- endblock %} |  | ||||||
| {%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} |  | ||||||
| {%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} |  | ||||||
| {%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and |  | ||||||
|                          (sidebars != []) %} |  | ||||||
| {%- set url_root = pathto('', 1) %} |  | ||||||
| {# XXX necessary? #} |  | ||||||
| {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} |  | ||||||
| {%- if not embedded and docstitle %} |  | ||||||
|   {%- set titlesuffix = " — "|safe + docstitle|e %} |  | ||||||
| {%- else %} |  | ||||||
|   {%- set titlesuffix = "" %} |  | ||||||
| {%- endif %} |  | ||||||
|  |  | ||||||
| {%- macro relbar() %} |  | ||||||
|     <div class="related"> |  | ||||||
|       <h3>{{ _('Navigation') }}</h3> |  | ||||||
|       <ul> |  | ||||||
|         {%- for rellink in rellinks %} |  | ||||||
|         <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}> |  | ||||||
|           <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}" |  | ||||||
|              {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a> |  | ||||||
|           {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li> |  | ||||||
|         {%- endfor %} |  | ||||||
|         {%- block rootrellink %} |  | ||||||
|         <li><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li> |  | ||||||
|         {%- endblock %} |  | ||||||
|         {%- for parent in parents %} |  | ||||||
|           <li><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li> |  | ||||||
|         {%- endfor %} |  | ||||||
|         {%- block relbaritems %} {% endblock %} |  | ||||||
|       </ul> |  | ||||||
|     </div> |  | ||||||
| {%- endmacro %} |  | ||||||
|  |  | ||||||
| {%- macro sidebar() %} |  | ||||||
|       {%- if render_sidebar %} |  | ||||||
|       <div class="sphinxsidebar"> |  | ||||||
|         <div class="sphinxsidebarwrapper"> |  | ||||||
|           {%- block sidebarlogo %} |  | ||||||
|           {%- if logo %} |  | ||||||
|             <p class="logo"><a href="{{ pathto(master_doc) }}"> |  | ||||||
|               <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/> |  | ||||||
|             </a></p> |  | ||||||
|           {%- endif %} |  | ||||||
|           {%- endblock %} |  | ||||||
|           {%- if sidebars != None %} |  | ||||||
|             {#- new style sidebar: explicitly include/exclude templates #} |  | ||||||
|             {%- for sidebartemplate in sidebars %} |  | ||||||
|             {%- include sidebartemplate %} |  | ||||||
|             {%- endfor %} |  | ||||||
|           {%- else %} |  | ||||||
|             {#- old style sidebars: using blocks -- should be deprecated #} |  | ||||||
|             {%- block sidebartoc %} |  | ||||||
|             {%- include "localtoc.html" %} |  | ||||||
|             {%- endblock %} |  | ||||||
|             {%- block sidebarrel %} |  | ||||||
|             {%- include "relations.html" %} |  | ||||||
|             {%- endblock %} |  | ||||||
|             {%- block sidebarsourcelink %} |  | ||||||
|             {%- include "sourcelink.html" %} |  | ||||||
|             {%- endblock %} |  | ||||||
|             {%- if customsidebar %} |  | ||||||
|             {%- include customsidebar %} |  | ||||||
|             {%- endif %} |  | ||||||
|             {%- block sidebarsearch %} |  | ||||||
|             {%- include "searchbox.html" %} |  | ||||||
|             {%- endblock %} |  | ||||||
|           {%- endif %} |  | ||||||
|         </div> |  | ||||||
|       </div> |  | ||||||
|       {%- endif %} |  | ||||||
| {%- endmacro %} |  | ||||||
|  |  | ||||||
| {%- macro script() %} |  | ||||||
|     <script type="text/javascript"> |  | ||||||
|       var DOCUMENTATION_OPTIONS = { |  | ||||||
|         URL_ROOT:    '{{ url_root }}', |  | ||||||
|         VERSION:     '{{ release|e }}', |  | ||||||
|         COLLAPSE_INDEX: false, |  | ||||||
|         FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}', |  | ||||||
|         HAS_SOURCE:  {{ has_source|lower }} |  | ||||||
|       }; |  | ||||||
|     </script> |  | ||||||
|     {%- for scriptfile in script_files %} |  | ||||||
|     <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> |  | ||||||
|     {%- endfor %} |  | ||||||
| {%- endmacro %} |  | ||||||
|  |  | ||||||
| {%- macro css() %} |  | ||||||
|     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> |  | ||||||
|     <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" /> |  | ||||||
|     {%- for cssfile in css_files %} |  | ||||||
|     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> |  | ||||||
|     {%- endfor %} |  | ||||||
| {%- endmacro %} |  | ||||||
|  |  | ||||||
| <html xmlns="http://www.w3.org/1999/xhtml"> |  | ||||||
|   <head> |  | ||||||
|     <meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" /> |  | ||||||
|     {{ metatags }} |  | ||||||
|     {%- block htmltitle %} |  | ||||||
|     <title>{{ title|striptags|e }}{{ titlesuffix }}</title> |  | ||||||
|     {%- endblock %} |  | ||||||
|     {{ css() }} |  | ||||||
|     {%- if not embedded %} |  | ||||||
|     {{ script() }} |  | ||||||
|     {%- if use_opensearch %} |  | ||||||
|     <link rel="search" type="application/opensearchdescription+xml" |  | ||||||
|           title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" |  | ||||||
|           href="{{ pathto('_static/opensearch.xml', 1) }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if favicon %} |  | ||||||
|     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- endif %} |  | ||||||
| {%- block linktags %} |  | ||||||
|     {%- if hasdoc('about') %} |  | ||||||
|     <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('genindex') %} |  | ||||||
|     <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('search') %} |  | ||||||
|     <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if hasdoc('copyright') %} |  | ||||||
|     <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}" /> |  | ||||||
|     {%- if parents %} |  | ||||||
|     <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if next %} |  | ||||||
|     <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if prev %} |  | ||||||
|     <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" /> |  | ||||||
|     {%- endif %} |  | ||||||
| {%- endblock %} |  | ||||||
| {%- block extrahead %} {% endblock %} |  | ||||||
|   </head> |  | ||||||
|   <body> |  | ||||||
| {%- block header %}{% endblock %} |  | ||||||
|  |  | ||||||
| {%- block relbar1 %}{{ relbar() }}{% endblock %} |  | ||||||
|  |  | ||||||
| {%- block content %} |  | ||||||
|   {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} |  | ||||||
|  |  | ||||||
|     <div class="document"> |  | ||||||
|   {%- block document %} |  | ||||||
|       <div class="documentwrapper"> |  | ||||||
|       {%- if render_sidebar %} |  | ||||||
|         <div class="bodywrapper"> |  | ||||||
|       {%- endif %} |  | ||||||
|           <div class="body"> |  | ||||||
|             {% block body %} {% endblock %} |  | ||||||
|           </div> |  | ||||||
|       {%- if render_sidebar %} |  | ||||||
|         </div> |  | ||||||
|       {%- endif %} |  | ||||||
|       </div> |  | ||||||
|   {%- endblock %} |  | ||||||
|  |  | ||||||
|   {%- block sidebar2 %}{{ sidebar() }}{% endblock %} |  | ||||||
|       <div class="clearer"></div> |  | ||||||
|     </div> |  | ||||||
| {%- endblock %} |  | ||||||
|  |  | ||||||
| {%- block relbar2 %}{{ relbar() }}{% endblock %} |  | ||||||
|  |  | ||||||
| {%- block footer %} |  | ||||||
|     <div class="footer"> |  | ||||||
|     {%- if show_copyright %} |  | ||||||
|       {%- if hasdoc('copyright') %} |  | ||||||
|         {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} |  | ||||||
|       {%- else %} |  | ||||||
|         {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} |  | ||||||
|       {%- endif %} |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if last_updated %} |  | ||||||
|       {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} |  | ||||||
|     {%- endif %} |  | ||||||
|     {%- if show_sphinx %} |  | ||||||
|       {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %} |  | ||||||
|     {%- endif %} |  | ||||||
|     </div> |  | ||||||
|     <p>asdf asdf asdf asdf 22</p> |  | ||||||
| {%- endblock %} |  | ||||||
|   </body> |  | ||||||
| </html> |  | ||||||
|  |  | ||||||
							
								
								
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,50 +0,0 @@ | |||||||
| {# |  | ||||||
|     basic/search.html |  | ||||||
|     ~~~~~~~~~~~~~~~~~ |  | ||||||
|  |  | ||||||
|     Template for the search page. |  | ||||||
|  |  | ||||||
|     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. |  | ||||||
|     :license: BSD, see LICENSE for details. |  | ||||||
| #} |  | ||||||
| {%- extends "layout.html" %} |  | ||||||
| {% set title = _('Search') %} |  | ||||||
| {% set script_files = script_files + ['_static/searchtools.js'] %} |  | ||||||
| {% block extrahead %} |  | ||||||
|   <script type="text/javascript"> |  | ||||||
|     jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); }); |  | ||||||
|   </script> |  | ||||||
|   {# this is used when loading the search index using $.ajax fails, |  | ||||||
|      such as on Chrome for documents on localhost #} |  | ||||||
|   <script type="text/javascript" id="searchindexloader"></script> |  | ||||||
|   {{ super() }} |  | ||||||
| {% endblock %} |  | ||||||
| {% block body %} |  | ||||||
|   <noscript> |  | ||||||
|   <div id="fallback" class="admonition warning"> |  | ||||||
|     <p class="last"> |  | ||||||
|       {% trans %}Please activate JavaScript to enable the search |  | ||||||
|       functionality.{% endtrans %} |  | ||||||
|     </p> |  | ||||||
|   </div> |  | ||||||
|   </noscript> |  | ||||||
|  |  | ||||||
|   {% if search_performed %} |  | ||||||
|     <h2>{{ _('Search Results') }}</h2> |  | ||||||
|     {% if not search_results %} |  | ||||||
|       <p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p> |  | ||||||
|     {% endif %} |  | ||||||
|   {% endif %} |  | ||||||
|   <div id="search-results"> |  | ||||||
|   {% if search_results %} |  | ||||||
|     <ul> |  | ||||||
|     {% for href, caption, context in search_results %} |  | ||||||
|       <li> |  | ||||||
|         <a href="{{ pathto(item.href) }}">{{ caption }}</a> |  | ||||||
|         <p class="context">{{ context|e }}</p> |  | ||||||
|       </li> |  | ||||||
|     {% endfor %} |  | ||||||
|     </ul> |  | ||||||
|   {% endif %} |  | ||||||
|   </div> |  | ||||||
| {% endblock %} |  | ||||||
							
								
								
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,5 +0,0 @@ | |||||||
| <form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get"> |  | ||||||
|   <input type="text" name="q" placeholder="Search docs" /> |  | ||||||
|   <input type="hidden" name="check_keywords" value="yes" /> |  | ||||||
|   <input type="hidden" name="area" value="default" /> |  | ||||||
| </form> |  | ||||||
| @@ -1 +0,0 @@ | |||||||
| .font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} |  | ||||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
								
							
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 6.1 KiB | 
										
											Binary file not shown.
										
									
								
							| @@ -1,399 +0,0 @@ | |||||||
| <?xml version="1.0" standalone="no"?> |  | ||||||
| <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" > |  | ||||||
| <svg xmlns="http://www.w3.org/2000/svg"> |  | ||||||
| <metadata></metadata> |  | ||||||
| <defs> |  | ||||||
| <font id="fontawesomeregular" horiz-adv-x="1536" > |  | ||||||
| <font-face units-per-em="1792" ascent="1536" descent="-256" /> |  | ||||||
| <missing-glyph horiz-adv-x="448" /> |  | ||||||
| <glyph unicode=" "  horiz-adv-x="448" /> |  | ||||||
| <glyph unicode="	" horiz-adv-x="448" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="448" /> |  | ||||||
| <glyph unicode="¨" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="©" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="®" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="´" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="Æ" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="768" /> |  | ||||||
| <glyph unicode=" " /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="768" /> |  | ||||||
| <glyph unicode=" " /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="512" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="384" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="256" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="256" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="192" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="307" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="85" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="307" /> |  | ||||||
| <glyph unicode=" " horiz-adv-x="384" /> |  | ||||||
| <glyph unicode="™" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="∞" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="≠" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="500" d="M0 0z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317q54 43 100.5 115.5t46.5 131.5z M1792 1120v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 q-18 -18 -44 -18z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 l502 -73q56 -9 56 -46z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 131q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81 t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45 t45 -19h128q26 0 45 19t19 45zM1792 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 704v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1792 320v128 q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 704v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19 t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1920 1248v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h960q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 t-28 -68l-294 -294l294 -294q28 -28 28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5 t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z " /> |  | ||||||
| <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0 52 38 90t90 38t90 -38t38 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5v222q0 12 8 23t19 13 l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10q129 -119 165 -170q7 -8 7 -22 q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832 q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280zM768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z " /> |  | ||||||
| <glyph unicode="" d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39z" /> |  | ||||||
| <glyph unicode="" d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1023 576h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8t-2.5 -8h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 q25 -61 25 -123z" /> |  | ||||||
| <glyph unicode="" d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9 l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" /> |  | ||||||
| <glyph unicode="" d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5v7q65 268 270 434.5t480 166.5 q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5z M1536 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5zM1536 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5 t9.5 -22.5zM1664 160v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 1248v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47 t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314q0 151 67 291t179 242.5 t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5 t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5q140 -59 225 -188.5t85 -282.5zM1664 640q0 -230 -127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289 t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19q13 0 26 -5q211 -91 338 -283.5t127 -422.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" /> |  | ||||||
| <glyph unicode="" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906 q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57q38 -15 59 -43zM575 1056q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5 t-16.5 -22.5zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M725 977l-170 -450q73 -1 153.5 -2t119 -1.5t52.5 -0.5l29 2q-32 95 -92 241q-53 132 -92 211zM21 -128h-21l2 79q22 7 80 18q89 16 110 31q20 16 48 68l237 616l280 724h75h53l11 -21l205 -480q103 -242 124 -297q39 -102 96 -235q26 -58 65 -164q24 -67 65 -149 q22 -49 35 -57q22 -19 69 -23q47 -6 103 -27q6 -39 6 -57q0 -14 -1 -26q-80 0 -192 8q-93 8 -189 8q-79 0 -135 -2l-200 -11l-58 -2q0 45 4 78l131 28q56 13 68 23q12 12 12 27t-6 32l-47 114l-92 228l-450 2q-29 -65 -104 -274q-23 -64 -23 -84q0 -31 17 -43 q26 -21 103 -32q3 0 13.5 -2t30 -5t40.5 -6q1 -28 1 -58q0 -17 -2 -27q-66 0 -349 20l-48 -8q-81 -14 -167 -14z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M555 15q76 -32 140 -32q131 0 216 41t122 113q38 70 38 181q0 114 -41 180q-58 94 -141 126q-80 32 -247 32q-74 0 -101 -10v-144l-1 -173l3 -270q0 -15 12 -44zM541 761q43 -7 109 -7q175 0 264 65t89 224q0 112 -85 187q-84 75 -255 75q-52 0 -130 -13q0 -44 2 -77 q7 -122 6 -279l-1 -98q0 -43 1 -77zM0 -128l2 94q45 9 68 12q77 12 123 31q17 27 21 51q9 66 9 194l-2 497q-5 256 -9 404q-1 87 -11 109q-1 4 -12 12q-18 12 -69 15q-30 2 -114 13l-4 83l260 6l380 13l45 1q5 0 14 0.5t14 0.5q1 0 21.5 -0.5t40.5 -0.5h74q88 0 191 -27 q43 -13 96 -39q57 -29 102 -76q44 -47 65 -104t21 -122q0 -70 -32 -128t-95 -105q-26 -20 -150 -77q177 -41 267 -146q92 -106 92 -236q0 -76 -29 -161q-21 -62 -71 -117q-66 -72 -140 -108q-73 -36 -203 -60q-82 -15 -198 -11l-197 4q-84 2 -298 -11q-33 -3 -272 -11z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M0 -126l17 85q4 1 77 20q76 19 116 39q29 37 41 101l27 139l56 268l12 64q8 44 17 84.5t16 67t12.5 46.5t9 30.5t3.5 11.5l29 157l16 63l22 135l8 50v38q-41 22 -144 28q-28 2 -38 4l19 103l317 -14q39 -2 73 -2q66 0 214 9q33 2 68 4.5t36 2.5q-2 -19 -6 -38 q-7 -29 -13 -51q-55 -19 -109 -31q-64 -16 -101 -31q-12 -31 -24 -88q-9 -44 -13 -82q-44 -199 -66 -306l-61 -311l-38 -158l-43 -235l-12 -45q-2 -7 1 -27q64 -15 119 -21q36 -5 66 -10q-1 -29 -7 -58q-7 -31 -9 -41q-18 0 -23 -1q-24 -2 -42 -2q-9 0 -28 3q-19 4 -145 17 l-198 2q-41 1 -174 -11q-74 -7 -98 -9z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l215 -1h293l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -42.5 2t-103.5 -1t-111 -1 q-34 0 -67 -5q-10 -97 -8 -136l1 -152v-332l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-88 0 -233 -14q-48 -4 -70 -4q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q8 192 6 433l-5 428q-1 62 -0.5 118.5t0.5 102.5t-2 57t-6 15q-6 5 -14 6q-38 6 -148 6q-43 0 -100 -13.5t-73 -24.5q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1744 128q33 0 42 -18.5t-11 -44.5 l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5t42 18.5h80v1024h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80z" /> |  | ||||||
| <glyph unicode="" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l446 -1h318l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -58.5 2t-138.5 -1t-128 -1 q-94 0 -127 -5q-10 -97 -8 -136l1 -152v52l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-82 0 -233 -13q-45 -5 -70 -5q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q6 137 6 433l-5 44q0 265 -2 278q-2 11 -6 15q-6 5 -14 6q-38 6 -148 6q-50 0 -168.5 -14t-132.5 -24q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1505 113q26 -20 26 -49t-26 -49l-162 -126 q-26 -20 -44.5 -11t-18.5 42v80h-1024v-80q0 -33 -18.5 -42t-44.5 11l-162 126q-26 20 -26 49t26 49l162 126q26 20 44.5 11t18.5 -42v-80h1024v80q0 33 18.5 42t44.5 -11z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344 q13 0 22.5 -9.5t9.5 -22.5zM256 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192 q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 q39 -17 39 -59z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 q53 0 91 -38l235 -234q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" /> |  | ||||||
| <glyph unicode="" d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 1216l288 -288l-672 -672h-288v288zM1756 1084l-92 -92 l-288 288l92 92q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5 t-38.5 114t-17.5 122q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110 q24 -24 24 -57t-24 -57z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710 q19 19 32 13t13 -32v-710q4 11 13 19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" /> |  | ||||||
| <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710 q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1538" d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M742 -37l-652 651q-37 37 -37 90.5t37 90.5l652 651q37 37 90.5 37t90.5 -37l75 -75q37 -37 37 -90.5t-37 -90.5l-486 -486l486 -485q37 -38 37 -91t-37 -90l-75 -75q-37 -37 -90.5 -37t-90.5 37z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1099 704q0 -52 -37 -91l-652 -651q-37 -37 -90 -37t-90 37l-76 75q-37 39 -37 91q0 53 37 90l486 486l-486 485q-37 39 -37 91q0 53 37 90l76 75q36 38 90 38t90 -38l652 -651q37 -37 37 -90z" /> |  | ||||||
| <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143 q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5 t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 t32.5 -90.5z" /> |  | ||||||
| <glyph unicode="" d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" /> |  | ||||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" /> |  | ||||||
| <glyph unicode="" d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5 t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5 t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150.5t27.5 -184z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5 q16 -10 16 -27zM1344 704q0 -139 -79 -253.5t-209 -164.5l280 502q8 -45 8 -84zM1792 576q0 -35 -20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69z " /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h288v288h-288v-288zM1280 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64 q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47 h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5 v192h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111 t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 320q0 -53 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-486 485l-486 -485q-36 -38 -90 -38t-90 38l-75 75q-38 36 -38 90q0 53 38 91l651 651q37 37 90 37q52 0 91 -37l650 -651q38 -38 38 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 832q0 -53 -37 -90l-651 -651q-38 -38 -91 -38q-54 0 -90 38l-651 651q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l486 -486l486 486q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21 zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45z " /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5 l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5 t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M512 512v-384h-256v384h256zM896 1024v-896h-256v896h256zM1280 768v-640h-256v640h256zM1664 1152v-1024h-256v1024h256zM1792 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5z M1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5 t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1307 618l23 219h-198v109q0 49 15.5 68.5t71.5 19.5h110v219h-175q-152 0 -218 -72t-66 -213v-131h-131v-219h131v-635h262v635h175zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5v1280 q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5 t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5 l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7 l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8q144 -133 144 -160q0 -9 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5zM1920 198v-140q0 -16 -149 -31 q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20 t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31zM1920 1222v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68 q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70 q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7 q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230z" /> |  | ||||||
| <glyph unicode="" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81zM1536 769 q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128 q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179z" /> |  | ||||||
| <glyph unicode="" d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5z M1536 511q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5 h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -74 49 -163z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="896" d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5 q224 0 351 -124t127 -344z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143v128q0 40 28 68t68 28h288v96 q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" d="M394 184q-8 -9 -20 3q-13 11 -4 19q8 9 20 -3q12 -11 4 -19zM352 245q9 -12 0 -19q-8 -6 -17 7t0 18q9 7 17 -6zM291 305q-5 -7 -13 -2q-10 5 -7 12q3 5 13 2q10 -5 7 -12zM322 271q-6 -7 -16 3q-9 11 -2 16q6 6 16 -3q9 -11 2 -16zM451 159q-4 -12 -19 -6q-17 4 -13 15 t19 7q16 -5 13 -16zM514 154q0 -11 -16 -11q-17 -2 -17 11q0 11 16 11q17 2 17 -11zM572 164q2 -10 -14 -14t-18 8t14 15q16 2 18 -9zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 27.5v239q0 97 -52 142q57 6 102.5 18t94 39 t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103 q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -103t0.5 -68q0 -22 -11 -33.5t-22 -13t-33 -1.5 h-224q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69z" /> |  | ||||||
| <glyph unicode="" d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t-281 -180.5q-124 -44 -326 -44 q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5 q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -10 1 -18.5t3 -17t4 -13.5t6.5 -16t6.5 -17q16 -40 25 -118.5t9 -136.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5 q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235t235 -174 q2 -1 19 -11.5t24 -14t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> |  | ||||||
| <glyph unicode="" d="M1280 343q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5 t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5t320.5 -216.5q6 -2 30 -11t33 -12.5 t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M511 980h257l-30 -284h-227v-824h-341v824h-170v284h170v171q0 182 86 275.5t283 93.5h227v-284h-142q-39 0 -62.5 -6.5t-34 -23.5t-13.5 -34.5t-3 -49.5v-142z" /> |  | ||||||
| <glyph unicode="" d="M1536 640q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5 q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23 q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -89t0.5 -54q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 t316.5 -131.5t131.5 -316.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294 q187 -186 294 -425.5t120 -501.5z" /> |  | ||||||
| <glyph unicode="" d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v320q0 25 16 75 l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM1664 128q0 -52 -38 -90t-90 -38 h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> |  | ||||||
| <glyph unicode="" d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70 l-53 -186l188 -48q40 -10 52 -51q10 -42 -20 -70z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43 q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5 t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576 q-50 0 -89 -38.5t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32zM1664 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45 t45 -19t45 19t19 45zM1792 768v-640q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181q0 103 76 179t180 76h374q-22 60 -22 128 q0 122 81.5 189t206.5 67q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5z" /> |  | ||||||
| <glyph unicode="" d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5 t-90.5 -37.5h-640q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76 q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227z" /> |  | ||||||
| <glyph unicode="" d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 580 q0 -142 -77.5 -230t-217.5 -87l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100 q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317z" /> |  | ||||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5 q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5 q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5 t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-5 7 -8 9q-12 4 -27 -5q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3 q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25 q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5q-16 0 -22 -1q-146 -80 -235 -222q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5 t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5 t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10t17 -20q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21 q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5 q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3 q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5 t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q7 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5 q15 10 -7 16q-17 5 -43 -12zM879 10q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7t-10 1.5t-11.5 -7 q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5q0 -6 2 -16z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19z " /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75 t75 -181zM1344 896q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5zM1920 671q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128q81 117 81 256q0 29 -5 66q66 -23 133 -23q59 0 119 21.5t97.5 42.5 t43.5 21q124 0 124 -353zM1792 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15 t21.5 -21.5t18.5 -19q33 31 33 73zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204t85 203l147 146q83 83 203 83q121 0 204 -85l206 -207 q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z " /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148 q4 -48 -10 -97q4 -1 12 -5l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56zM579 836q46 42 21 108t-106 117q-92 59 -192 59q-74 0 -113 -36q-46 -42 -21 -108t106 -117q92 -59 192 -59q74 0 113 36zM494 91q81 51 106 117t-21 108 q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117t21 -108q39 -36 113 -36q100 0 192 59zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 480l96 -32l736 576l-128 64l-768 -431v-113l-160 -96l9 -8q2 -2 7 -6 q4 -4 11 -12t11 -12l26 -26zM1600 64l128 64l-520 408l-177 -138q-2 -3 -13 -7z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235 z" /> |  | ||||||
| <glyph unicode="" d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88z" /> |  | ||||||
| <glyph unicode="" d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z M1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105zM1792 224v-192q0 -13 -9.5 -22.5 t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1123v-99h-335v99h107q0 41 0.5 122t0.5 121v12h-2q-8 -17 -50 -54l-71 76l136 127h106v-404h108zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5 t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 97 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23 l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71z" /> |  | ||||||
| <glyph unicode="" d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41 q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39zM1536 -96v64q0 14 -9 23t-23 9h-1472 q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 160v192 q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192 q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1664 1248v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113 z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5q0 26 19 45t45 19v320q0 8 -0.5 35t0 38 t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33 q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5 t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960z" /> |  | ||||||
| <glyph unicode="" d="M678 -57q0 -38 -10 -71h-380q-95 0 -171.5 56.5t-103.5 147.5q24 45 69 77.5t100 49.5t107 24t107 7q32 0 49 -2q6 -4 30.5 -21t33 -23t31 -23t32 -25.5t27.5 -25.5t26.5 -29.5t21 -30.5t17.5 -34.5t9.5 -36t4.5 -40.5zM385 294q-234 -7 -385 -85v433q103 -118 273 -118 q32 0 70 5q-21 -61 -21 -86q0 -67 63 -149zM558 805q0 -100 -43.5 -160.5t-140.5 -60.5q-51 0 -97 26t-78 67.5t-56 93.5t-35.5 104t-11.5 99q0 96 51.5 165t144.5 69q66 0 119 -41t84 -104t47 -130t16 -128zM1536 896v-736q0 -119 -84.5 -203.5t-203.5 -84.5h-468 q39 73 39 157q0 66 -22 122.5t-55.5 93t-72 71t-72 59.5t-55.5 54.5t-22 59.5q0 36 23 68t56 61.5t65.5 64.5t55.5 93t23 131t-26.5 145.5t-75.5 118.5q-6 6 -14 11t-12.5 7.5t-10 9.5t-10.5 17h135l135 64h-437q-138 0 -244.5 -38.5t-182.5 -133.5q0 126 81 213t207 87h960 q119 0 203.5 -84.5t84.5 -203.5v-96h-256v256h-128v-256h-256v-128h256v-256h128v256h256z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M876 71q0 21 -4.5 40.5t-9.5 36t-17.5 34.5t-21 30.5t-26.5 29.5t-27.5 25.5t-32 25.5t-31 23t-33 23t-30.5 21q-17 2 -50 2q-54 0 -106 -7t-108 -25t-98 -46t-69 -75t-27 -107q0 -68 35.5 -121.5t93 -84t120.5 -45.5t127 -15q59 0 112.5 12.5t100.5 39t74.5 73.5 t27.5 110zM756 933q0 60 -16.5 127.5t-47 130.5t-84 104t-119.5 41q-93 0 -144 -69t-51 -165q0 -47 11.5 -99t35.5 -104t56 -93.5t78 -67.5t97 -26q97 0 140.5 60.5t43.5 160.5zM625 1408h437l-135 -79h-135q71 -45 110 -126t39 -169q0 -74 -23 -131.5t-56 -92.5t-66 -64.5 t-56 -61t-23 -67.5q0 -26 16.5 -51t43 -48t58.5 -48t64 -55.5t58.5 -66t43 -85t16.5 -106.5q0 -160 -140 -282q-152 -131 -420 -131q-59 0 -119.5 10t-122 33.5t-108.5 58t-77 89t-30 121.5q0 61 37 135q32 64 96 110.5t145 71t155 36t150 13.5q-64 83 -64 149q0 12 2 23.5 t5 19.5t8 21.5t7 21.5q-40 -5 -70 -5q-149 0 -255.5 98t-106.5 246q0 140 95 250.5t234 141.5q94 20 187 20zM1664 1152v-128h-256v-256h-128v256h-256v128h256v256h128v-256h256z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t118.5 52h1472q65 0 112.5 -47t47.5 -113z" /> |  | ||||||
| <glyph unicode="" d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14 q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28 q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 1024q0 53 -37.5 90.5 t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1472 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 384q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29 q-141 221 -141 483q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 174 120 321.5 t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224q0 139 94 257t256.5 186.5 t353.5 68.5zM1526 111q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129 q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="896" d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68 z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69 q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7zM896 1408v-98q-42 2 -64 2t-64 -2v98q0 26 19 45t45 19t45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47 q0 -46 -31.5 -71t-77.5 -25q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268q0 99 44.5 184.5t117 142t164 89t186.5 32.5 t186.5 -32.5t164 -89t117 -142t44.5 -184.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68 t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5t81 -103t47.5 -132.5t24 -138t5.5 -131zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 t271.5 -112.5t112.5 -271.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252 t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 t66 -158z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1664 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5 q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1024 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1024 608v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280z M768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1536h-1152v-1536h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM1408 1472v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1152h-256v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM896 1056v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5 t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1408 1088v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1280q0 26 19 45t45 19h320 v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM1920 1344v-1152 q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128q-26 0 -45 19t-19 45t19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 158 -66t66 -158z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" /> |  | ||||||
| <glyph unicode="" d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> |  | ||||||
| <glyph unicode="" d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1568" d="M496 192q0 -60 -42.5 -102t-101.5 -42q-60 0 -102 42t-42 102t42 102t102 42q59 0 101.5 -42t42.5 -102zM928 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -66 -47 -113t-113 -47t-113 47t-47 113 t47 113t113 47t113 -47t47 -113zM1360 192q0 -46 -33 -79t-79 -33t-79 33t-33 79t33 79t79 33t79 -33t33 -79zM528 1088q0 -73 -51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5t51.5 124.5t124.5 51.5t124.5 -51.5t51.5 -124.5zM992 1280q0 -80 -56 -136t-136 -56 t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1536 640q0 -40 -28 -68t-68 -28t-68 28t-28 68t28 68t68 28t68 -28t28 -68zM1328 1088q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5z" /> |  | ||||||
| <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" /> |  | ||||||
| <glyph unicode="" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150 t-150 362t150 362t362 150h896q212 0 362 -150t150 -362z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1024 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16 h96q16 0 16 -16zM896 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1280 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1152 880v-96 q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 880v-352q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h112v240q0 16 16 16h96q16 0 16 -16zM1792 128v896h-1664v-896 h1664zM1920 1024v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102 q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266 q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8 q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 -173 169 -509z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497 q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M439 265l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204zM1031 1044l-239 -18 l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56zM1664 960q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9 t-9 23t9 23t23 9h320q14 0 23 -9t9 -23zM1120 1504v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM1527 1353l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="640" d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" /> |  | ||||||
| <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1534 846v-206h-514l-3 27 q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5t-65.5 -51.5t-30.5 -63h232v80 h126z" /> |  | ||||||
| <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1536 -50v-206h-514l-4 27 q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73h232v80h126z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89 q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117 q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23 t-10 -23zM1005 1325l-621 -621v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" /> |  | ||||||
| <glyph unicode="" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192q26 0 45 -19 t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1152" d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 v-320h736z" /> |  | ||||||
| <glyph unicode="" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="384" d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> |  | ||||||
| <glyph unicode="" d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10 t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 q16 -8 32 -8q17 0 32 9z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" /> |  | ||||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" /> |  | ||||||
| <glyph unicode="" d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26 l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5l12 3l5 2q13 5 26 -2q12 -7 15 -21z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1024 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1024 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28 t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162 l230 -662h70z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248 v119h121z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1216 1504v-192q0 -14 -9 -23t-23 -9h-256 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1792 1504v-192q0 -14 -9 -23t-23 -9h-832 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5 t82 -252.5zM1456 882v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165z" /> |  | ||||||
| <glyph unicode="" d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13 q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24 2 76 59t101 121q68 87 101 120q18 18 31 48t17.5 48.5 t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121 t-76 59q-25 2 -43 20.5t-18 43.5v641q0 26 19 44.5t45 19.5q35 1 158 44q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76z" /> |  | ||||||
| <glyph unicode="" d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86v129q0 59 20 86q29 38 80 38t78 -38 q21 -28 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68zM785 1079v-156q0 -51 -32 -51t-32 51v156q0 52 32 52t32 -52zM1318 366q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5 q-20 -87 -20 -260q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73q20 84 20 260zM563 1017l90 296h-75l-51 -195l-53 195h-78l24 -69t23 -69q35 -103 46 -158v-201h74v201zM852 936v130q0 58 -21 87q-29 38 -78 38q-51 0 -78 -38 q-21 -29 -21 -87v-130q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87zM1033 816h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293q0 -37 6 -55q11 -27 43 -27q36 0 77 45v-40zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51 q-28 -37 -28 -116v-173q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54q2 9 2 58zM790 1011v210q0 69 -43 69t-43 -69v-210q0 -70 43 -70t43 70zM1509 260q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99 q-26 112 -26 350q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350zM511 1536h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187h106l71 -263zM881 1203v-175q0 -81 -28 -118q-37 -51 -106 -51q-67 0 -105 51 q-28 38 -28 118v175q0 80 28 117q38 51 105 51q69 0 106 -51q28 -37 28 -117zM1216 1365v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 q25 45 64 45h241q22 0 31 -15z" /> |  | ||||||
| <glyph unicode="" d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 640q0 37 -30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54zM1792 640q0 -96 -1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58t-69.5 123 q-14 65 -21.5 147.5t-8.5 136.5t-1 150t1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M928 135v-151l-707 -1v151zM1169 481v-701l-1 -35v-1h-1132l-35 1h-1v736h121v-618h928v618h120zM241 393l704 -65l-13 -150l-705 65zM309 709l683 -183l-39 -146l-683 183zM472 1058l609 -360l-77 -130l-609 360zM832 1389l398 -585l-124 -85l-399 584zM1285 1536 l121 -697l-149 -26l-121 697z" /> |  | ||||||
| <glyph unicode="" d="M1362 110v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5zM1078 643q0 124 -90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5 t90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5zM1362 1003v165q0 28 -20 48.5t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165q0 -29 20 -49t49 -20h174q29 0 49 20t20 49zM1536 1211v-1142q0 -81 -58 -139t-139 -58h-1142q-81 0 -139 58t-58 139v1142q0 81 58 139 t139 58h1142q81 0 139 -58t58 -139z" /> |  | ||||||
| <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" /> |  | ||||||
| <glyph unicode="" d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18 t-76.5 27t-73 43.5t-52 61.5q-25 96 -57 292l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37zM1403 1166q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34t-6 39.5 t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54z" /> |  | ||||||
| <glyph unicode="" d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71z M1272 1020q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63q24 13 39.5 23t31 29t19.5 40q48 267 80 473zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M390 1408h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170v217q91 30 155 84q64 55 103 132q39 78 54 196z " /> |  | ||||||
| <glyph unicode="" d="M1123 127v181q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="768" d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1408" d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78v666h918zM931 1255q107 -55 171 -153.5t64 -215.5 h-925q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20zM1408 767v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5z" /> |  | ||||||
| <glyph unicode="" d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18l-4 -5q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54t7 -70.5q46 24 7 92 q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5t60 -22.5zM626 1152 q3 17 -2.5 30t-11.5 15q-9 2 -9 -7q2 -5 5 -6q10 0 7 -15q-3 -20 8 -20q3 0 3 3zM1045 955q-2 8 -6.5 11.5t-13 5t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20zM867 1168q0 11 -5 19.5t-11 12.5t-9 3q-14 -1 -7 -7l4 -2 q14 -4 18 -31q0 -3 8 2zM921 1401q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9t3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7zM1486 60q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5t-30 -18.5 t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43q-19 4 -51 9.5 t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49t-14 -48 q3 -17 37 -26q20 -6 84.5 -18.5t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54q110 143 124 195 q-12 112 -16 310q-2 90 24 151.5t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5t-40.5 -33.5t-61 -14 q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5t15.5 47.5q1 -31 8 -56.5 t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13t16.5 -9.5z" /> |  | ||||||
| <glyph unicode="" d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q-185 164 -433 164q-76 0 -155 -19 q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5zM1424 647q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5q25 -53 44 -95q2 -6 6.5 -17.5t7.5 -16.5q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5t36.5 -6 t25 -4.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 -92 122 -157.5t291 -65.5 q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5q0 73 16 150q-80 104 -80 234q0 159 112.5 271.5t271.5 112.5q130 0 234 -80 q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1483 512l-587 -587q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128t53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41 l619 619l2 -3q53 -53 53 -128t-53 -128zM1406 1138l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15v0q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36t14 35l121 120q13 15 35 15t36 -15l252 -252l574 575q15 15 36 15t36 -15z" /> |  | ||||||
| <glyph unicode="" d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1280" d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> |  | ||||||
| <glyph unicode="" d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94 q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29z" /> |  | ||||||
| <glyph unicode="" d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" /> |  | ||||||
| <glyph unicode="" horiz-adv-x="1792" /> |  | ||||||
| </font> |  | ||||||
| </defs></svg>  |  | ||||||
| Before Width: | Height: | Size: 193 KiB | 
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
								
							| @@ -1,16 +0,0 @@ | |||||||
| $( document ).ready(function() { |  | ||||||
|   // Shift nav in mobile when clicking the menu. |  | ||||||
|   $("[data-toggle='wy-nav-top']").click(function() { |  | ||||||
|     $("[data-toggle='wy-nav-shift']").toggleClass("shift"); |  | ||||||
|     $("[data-toggle='rst-versions']").toggleClass("shift"); |  | ||||||
|   }); |  | ||||||
|   // Close menu when you click a link. |  | ||||||
|   $(".wy-menu-vertical .current ul li a").click(function() { |  | ||||||
|     $("[data-toggle='wy-nav-shift']").removeClass("shift"); |  | ||||||
|     $("[data-toggle='rst-versions']").toggleClass("shift"); |  | ||||||
|   }); |  | ||||||
|   $("[data-toggle='rst-current-version']").click(function() { |  | ||||||
|     $("[data-toggle='rst-versions']").toggleClass("shift-up"); |  | ||||||
|   }); |  | ||||||
|   $("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>"); |  | ||||||
| }); |  | ||||||
							
								
								
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +0,0 @@ | |||||||
| [theme] |  | ||||||
| inherit = basic |  | ||||||
| stylesheet = css/theme.css |  | ||||||
|  |  | ||||||
| [options] |  | ||||||
| typekit_id = hiw1hhg |  | ||||||
| analytics_id = |  | ||||||
| canonical_url = |  | ||||||
							
								
								
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
								
							| @@ -1,37 +0,0 @@ | |||||||
| {% if READTHEDOCS %} |  | ||||||
| {# Add rst-badge after rst-versions for small badge style. #} |  | ||||||
|   <div class="rst-versions" data-toggle="rst-versions"> |  | ||||||
|     <span class="rst-current-version" data-toggle="rst-current-version"> |  | ||||||
|       <span class="icon icon-book"> Read the Docs</span> |  | ||||||
|       v: {{ current_version }}  |  | ||||||
|       <span class="icon icon-caret-down"></span> |  | ||||||
|     </span> |  | ||||||
|     <div class="rst-other-versions"> |  | ||||||
|       <dl> |  | ||||||
|         <dt>Versions</dt> |  | ||||||
|         {% for slug, url in versions %} |  | ||||||
|           <dd><a href="{{ url }}">{{ slug }}</a></dd> |  | ||||||
|         {% endfor %} |  | ||||||
|       </dl> |  | ||||||
|       <dl> |  | ||||||
|         <dt>Downloads</dt> |  | ||||||
|         {% for type, url in downloads %} |  | ||||||
|           <dd><a href="{{ url }}">{{ type }}</a></dd> |  | ||||||
|         {% endfor %} |  | ||||||
|       </dl> |  | ||||||
|       <dl> |  | ||||||
|         <dt>On Read the Docs</dt> |  | ||||||
|           <dd> |  | ||||||
|             <a href="//{{ PRODUCTION_DOMAIN }}/projects/{{ slug }}/?fromdocs={{ slug }}">Project Home</a> |  | ||||||
|           </dd> |  | ||||||
|           <dd> |  | ||||||
|             <a href="//{{ PRODUCTION_DOMAIN }}/builds/{{ slug }}/?fromdocs={{ slug }}">Builds</a> |  | ||||||
|           </dd> |  | ||||||
|       </dl> |  | ||||||
|       <hr/> |  | ||||||
|       Free document hosting provided by <a href="http://www.readthedocs.org">Read the Docs</a>. |  | ||||||
|  |  | ||||||
|     </div> |  | ||||||
|   </div> |  | ||||||
| {% endif %} |  | ||||||
|  |  | ||||||
| @@ -13,6 +13,7 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.Document | .. autoclass:: mongoengine.Document | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
|    .. attribute:: objects |    .. attribute:: objects | ||||||
|  |  | ||||||
| @@ -21,19 +22,25 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicDocument | .. autoclass:: mongoengine.DynamicDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |    :members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.ValidationError | .. autoclass:: mongoengine.ValidationError | ||||||
|   :members: |   :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.FieldDoesNotExist | ||||||
|  |  | ||||||
|  |  | ||||||
| Context Managers | Context Managers | ||||||
| ================ | ================ | ||||||
|  |  | ||||||
| @@ -84,7 +91,9 @@ Fields | |||||||
| .. autoclass:: mongoengine.fields.DictField | .. autoclass:: mongoengine.fields.DictField | ||||||
| .. autoclass:: mongoengine.fields.MapField | .. autoclass:: mongoengine.fields.MapField | ||||||
| .. autoclass:: mongoengine.fields.ReferenceField | .. autoclass:: mongoengine.fields.ReferenceField | ||||||
|  | .. autoclass:: mongoengine.fields.LazyReferenceField | ||||||
| .. autoclass:: mongoengine.fields.GenericReferenceField | .. autoclass:: mongoengine.fields.GenericReferenceField | ||||||
|  | .. autoclass:: mongoengine.fields.GenericLazyReferenceField | ||||||
| .. autoclass:: mongoengine.fields.CachedReferenceField | .. autoclass:: mongoengine.fields.CachedReferenceField | ||||||
| .. autoclass:: mongoengine.fields.BinaryField | .. autoclass:: mongoengine.fields.BinaryField | ||||||
| .. autoclass:: mongoengine.fields.FileField | .. autoclass:: mongoengine.fields.FileField | ||||||
|   | |||||||
| @@ -1,15 +1,373 @@ | |||||||
|  |  | ||||||
|  |  | ||||||
| ========= | ========= | ||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | Development | ||||||
|  | =========== | ||||||
|  | - (Fill this out as you fix issues and develop your features). | ||||||
|  |  | ||||||
| Changes in 0.9.X - DEV | Changes in 0.23.1 | ||||||
| ====================== | =========== | ||||||
|  | - Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484 | ||||||
|  | - Improve connection doc #2481 | ||||||
|  |  | ||||||
|  | Changes in 0.23.0 | ||||||
|  | ================= | ||||||
|  | - Bugfix: manually setting SequenceField in DynamicDocument doesn't increment the counter #2471 | ||||||
|  | - Add MongoDB 4.2 and 4.4 to CI | ||||||
|  | - Add support for allowDiskUse on querysets #2468 | ||||||
|  |  | ||||||
|  | Changes in 0.22.1 | ||||||
|  | ================= | ||||||
|  | - Declare that Py3.5 is not supported in package metadata #2449 | ||||||
|  | - Moved CI from Travis to Github-Actions | ||||||
|  |  | ||||||
|  | Changes in 0.22.0 | ||||||
|  | ================= | ||||||
|  | - Fix LazyReferenceField dereferencing in embedded documents #2426 | ||||||
|  | - Fix regarding the recent use of Cursor.__spec in .count() that was interfering with mongomock #2425 | ||||||
|  | - Drop support for Python 3.5 by introducing f-strings in the codebase | ||||||
|  |  | ||||||
|  | Changes in 0.21.0 | ||||||
|  | ================= | ||||||
|  | - Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412 | ||||||
|  | - When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count | ||||||
|  |     and Cursor.count that got deprecated in pymongo >= 3.7. | ||||||
|  |     This should have a negative impact on performance of count see Issue #2219 | ||||||
|  | - Fix a bug that made the queryset drop the read_preference after clone(). | ||||||
|  | - Remove Py3.5 from CI as it reached EOL and add Python 3.9 | ||||||
|  | - Fix some issues related with db_field/field conflict in constructor #2414 | ||||||
|  | - BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 | ||||||
|  | - Bug fix in ListField when updating the first item, it was saving the whole list, instead of | ||||||
|  |     just replacing the first item (as usually done when updating 1 item of the list) #2392 | ||||||
|  | - Add EnumField: ``mongoengine.fields.EnumField`` | ||||||
|  | - Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields | ||||||
|  | - Fix query transformation regarding special operators #2365 | ||||||
|  | - Bug Fix: Document.save() fails when shard_key is not _id #2154 | ||||||
|  |  | ||||||
|  | Changes in 0.20.0 | ||||||
|  | ================= | ||||||
|  | - ATTENTION: Drop support for Python2 | ||||||
|  | - Add Mongo 4.0 to Travis | ||||||
|  | - Fix error when setting a string as a ComplexDateTimeField #2253 | ||||||
|  | - Bump development Status classifier to Production/Stable #2232 | ||||||
|  | - Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 | ||||||
|  | - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 | ||||||
|  | - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 | ||||||
|  | - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 | ||||||
|  | - Remove methods that were deprecated years ago: | ||||||
|  |     - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field | ||||||
|  |     - Queryset.slave_okay() was deprecated since pymongo3 | ||||||
|  |     - dropDups was dropped with MongoDB3 | ||||||
|  |     - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` | ||||||
|  | - Added pre-commit for development/CI #2212 | ||||||
|  | - Renamed requirements-lint.txt to requirements-dev.txt #2212 | ||||||
|  | - Support for setting ReadConcern #2255 | ||||||
|  |  | ||||||
|  | Changes in 0.19.1 | ||||||
|  | ================= | ||||||
|  | - Tests require Pillow < 7.0.0 as it dropped Python2 support | ||||||
|  | - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | ||||||
|  |     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 | ||||||
|  |  | ||||||
|  | Changes in 0.19.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. | ||||||
|  |     - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. | ||||||
|  | - BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 | ||||||
|  | - BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 | ||||||
|  |     - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. | ||||||
|  | - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 | ||||||
|  |     - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. | ||||||
|  | - BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 | ||||||
|  | - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 | ||||||
|  |     - Added ability to check if Q or QNode are empty by parsing them to bool. | ||||||
|  |     - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. | ||||||
|  | - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 | ||||||
|  | - Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 | ||||||
|  | - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 | ||||||
|  | - Improve error message related to InvalidDocumentError #2180 | ||||||
|  | - Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 | ||||||
|  | - Added ability to compare Q and Q operations #2204 | ||||||
|  | - Added ability to use a db alias on query_counter #2194 | ||||||
|  | - Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 | ||||||
|  | - Fix updates of a list field by negative index #2094 | ||||||
|  | - Switch from nosetest to pytest as test runner #2114 | ||||||
|  | - The codebase is now formatted using ``black``. #2109 | ||||||
|  | - Documentation improvements: | ||||||
|  |     - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | Changes in 0.18.2 | ||||||
|  | ================= | ||||||
|  | - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 | ||||||
|  | - Various code clarity and documentation improvements. | ||||||
|  |  | ||||||
|  | Changes in 0.18.1 | ||||||
|  | ================= | ||||||
|  | - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 | ||||||
|  | - Add Python 3.7 to Travis CI. #2058 | ||||||
|  |  | ||||||
|  | Changes in 0.18.0 | ||||||
|  | ================= | ||||||
|  | - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | ||||||
|  | - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 | ||||||
|  | - Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 | ||||||
|  | - Connection/disconnection improvements: | ||||||
|  |     - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. | ||||||
|  |     - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 | ||||||
|  |     - Improve documentation of ``connect``/``disconnect``. | ||||||
|  |     - Fix issue when using multiple connections to the same mongo with different credentials. #2047 | ||||||
|  |     - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 | ||||||
|  | - Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 | ||||||
|  | - Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 | ||||||
|  | - Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 | ||||||
|  | - Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 | ||||||
|  | - Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 | ||||||
|  | - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 | ||||||
|  | - BREAKING CHANGES (associated with connection/disconnection fixes): | ||||||
|  |     - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). | ||||||
|  |     - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. | ||||||
|  |     - ``disconnect`` now clears the cached attribute ``Document._collection``. | ||||||
|  | - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 | ||||||
|  |  | ||||||
|  | Changes in 0.17.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 | ||||||
|  | - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 | ||||||
|  | - DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 | ||||||
|  | - Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. | ||||||
|  | - Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 | ||||||
|  | - Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 | ||||||
|  | - Fix test suite and CI to support MongoDB v3.4. #1445 | ||||||
|  | - Fix reference fields querying the database on each access if value contains orphan DBRefs. | ||||||
|  |  | ||||||
|  | Changes in 0.16.3 | ||||||
|  | ================= | ||||||
|  | - Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 | ||||||
|  |  | ||||||
|  | Changes in 0.16.2 | ||||||
|  | ================= | ||||||
|  | - Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 | ||||||
|  |  | ||||||
|  | Changes in 0.16.1 | ||||||
|  | ================= | ||||||
|  | - Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 | ||||||
|  | - Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 | ||||||
|  | - Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 | ||||||
|  |  | ||||||
|  | Changes in 0.16.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGES: | ||||||
|  |     - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 | ||||||
|  |     - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 | ||||||
|  |     - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 | ||||||
|  | - Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 | ||||||
|  | - Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 | ||||||
|  | - Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 | ||||||
|  | - Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 | ||||||
|  | - Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 | ||||||
|  | - Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 | ||||||
|  | - Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 | ||||||
|  | - Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 | ||||||
|  | - Improve Python 2-3 codebase compatibility. #1889 | ||||||
|  | - Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 | ||||||
|  | - Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 | ||||||
|  | - Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 | ||||||
|  | - Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 | ||||||
|  | - Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 | ||||||
|  | - Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 | ||||||
|  | - Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 | ||||||
|  | - ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 | ||||||
|  | - Bulk insert updates the IDs of the input documents instances. #1919 | ||||||
|  | - Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 | ||||||
|  | - Improve validation of the ``BinaryField``. #273 | ||||||
|  | - Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 | ||||||
|  | - Update ``GridFSProxy.__str__``  so that it would always print both the filename and grid_id. #710 | ||||||
|  | - Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 | ||||||
|  | - Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 | ||||||
|  | - Add a ``DateField``. #513 | ||||||
|  | - Various improvements to the documentation. | ||||||
|  | - Various code quality improvements. | ||||||
|  |  | ||||||
|  | Changes in 0.15.3 | ||||||
|  | ================= | ||||||
|  | - ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 | ||||||
|  | - Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 | ||||||
|  | - Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 | ||||||
|  | - Use each modifier only with ``$position``. #1673 #1675 | ||||||
|  | - Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 | ||||||
|  | - Update cached fields when a ``fields`` argument is given. #1712 | ||||||
|  | - Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. | ||||||
|  | - Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 | ||||||
|  | - Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 | ||||||
|  | - Fix how ``reload(fields)`` affects changed fields. #1371 | ||||||
|  | - Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 | ||||||
|  |  | ||||||
|  | Changes in 0.15.0 | ||||||
|  | ================= | ||||||
|  | - Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 | ||||||
|  |  | ||||||
|  | Changes in 0.14.1 | ||||||
|  | ================= | ||||||
|  | - Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 | ||||||
|  | - Add support for the ``$position`` param in the ``$push`` operator. #1566 | ||||||
|  | - Fix ``DateTimeField`` interpreting an empty string as today. #1533 | ||||||
|  | - Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 | ||||||
|  | - Fix ``BaseQuerySet._fields_to_db_fields``. #1553 | ||||||
|  |  | ||||||
|  | Changes in 0.14.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 | ||||||
|  | - POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 | ||||||
|  | - Improve code quality. #1531, #1540, #1541, #1547 | ||||||
|  |  | ||||||
|  | Changes in 0.13.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. | ||||||
|  |  | ||||||
|  | Changes in 0.12.0 | ||||||
|  | ================= | ||||||
|  | - POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 | ||||||
|  | - POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 | ||||||
|  | - Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 | ||||||
|  | - Fix connecting to a replica set with PyMongo 2.x. #1436 | ||||||
|  | - Fix using sets in field choices. #1481 | ||||||
|  | - Fix deleting items from a ``ListField``. #1318 | ||||||
|  | - Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 | ||||||
|  | - Fix behavior of a ``dec`` update operator. #1450 | ||||||
|  | - Add a ``rename`` update operator. #1454 | ||||||
|  | - Add validation for the ``db_field`` parameter. #1448 | ||||||
|  | - Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 | ||||||
|  | - Fix the error message displayed when validating Unicode URLs. #1486 | ||||||
|  | - Raise an error when trying to save an abstract document. #1449 | ||||||
|  |  | ||||||
|  | Changes in 0.11.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 | ||||||
|  | - BREAKING CHANGE: Drop Python v2.6 support. #1428 | ||||||
|  | - BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 | ||||||
|  | - BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 | ||||||
|  | - Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 | ||||||
|  |  | ||||||
|  | Changes in 0.10.8 | ||||||
|  | ================= | ||||||
|  | - Add support for ``QuerySet.batch_size``. (#1426) | ||||||
|  | - Fix a query set iteration within an iteration. #1427 | ||||||
|  | - Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 | ||||||
|  | - Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 | ||||||
|  | - Fix cascading deletes for models with a custom primary key field. #1247 | ||||||
|  | - Add ability to specify an authentication mechanism (e.g. X.509). #1333 | ||||||
|  | - Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 | ||||||
|  | - Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 | ||||||
|  | - Fix filtering by ``embedded_doc=None``. #1422 | ||||||
|  | - Add support for ``Cursor.comment``. #1420 | ||||||
|  | - Fix ``doc.get_<field>_display`` methods. #1419 | ||||||
|  | - Fix the ``__repr__`` method of the ``StrictDict`` #1424 | ||||||
|  | - Add a deprecation warning for Python v2.6. | ||||||
|  |  | ||||||
|  | Changes in 0.10.7 | ||||||
|  | ================= | ||||||
|  | - Drop Python 3.2 support #1390 | ||||||
|  | - Fix a bug where a dynamic doc has an index inside a dict field. #1278 | ||||||
|  | - Fix: ``ListField`` minus index assignment does not work. #1128 | ||||||
|  | - Fix cascade delete mixing among collections. #1224 | ||||||
|  | - Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 | ||||||
|  | - Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. | ||||||
|  | - Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 | ||||||
|  | - Fix ``LongField`` values stored as int32 in Python 3. #1253 | ||||||
|  | - ``MapField`` now handles unicode keys correctly. #1267 | ||||||
|  | - ``ListField`` now handles negative indicies correctly. #1270 | ||||||
|  | - Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 | ||||||
|  | - Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 | ||||||
|  | - Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 | ||||||
|  | - Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 | ||||||
|  | - Fix ``BaseDocument._mark_as_changed``. #1369 | ||||||
|  | - Add support for pickling ``QuerySet`` instances. #1397 | ||||||
|  | - Fix connecting to a list of hosts. #1389 | ||||||
|  | - Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 | ||||||
|  | - Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 | ||||||
|  | - Improvements to the dictionary field's docs. #1383 | ||||||
|  |  | ||||||
|  | Changes in 0.10.6 | ||||||
|  | ================= | ||||||
|  | - Add support for mocking MongoEngine based on mongomock. #1151 | ||||||
|  | - Fix not being able to run tests on Windows. #1153 | ||||||
|  | - Allow creation of sparse compound indexes. #1114 | ||||||
|  |  | ||||||
|  | Changes in 0.10.5 | ||||||
|  | ================= | ||||||
|  | - Fix for reloading of strict with special fields. #1156 | ||||||
|  |  | ||||||
|  | Changes in 0.10.4 | ||||||
|  | ================= | ||||||
|  | - ``SaveConditionError`` is now importable from the top level package. #1165 | ||||||
|  | - Add a ``QuerySet.upsert_one`` method. #1157 | ||||||
|  |  | ||||||
|  | Changes in 0.10.3 | ||||||
|  | ================= | ||||||
|  | - Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 | ||||||
|  |  | ||||||
|  | Changes in 0.10.2 | ||||||
|  | ================= | ||||||
|  | - Allow shard key to point to a field in an embedded document. #551 | ||||||
|  | - Allow arbirary metadata in fields. #1129 | ||||||
|  | - ReferenceFields now support abstract document types. #837 | ||||||
|  |  | ||||||
|  | Changes in 0.10.1 | ||||||
|  | ================= | ||||||
|  | - Fix infinite recursion with cascade delete rules under specific conditions. #1046 | ||||||
|  | - Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 | ||||||
|  | - Fix ignored chained options. #842 | ||||||
|  | - ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 | ||||||
|  | - Fix ``Document.reload`` for the ``DynamicDocument``. #1050 | ||||||
|  | - ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 | ||||||
|  | - Fix ``ListField`` negative index assignment not working. #1119 | ||||||
|  | - Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 | ||||||
|  | - Remove test dependencies (nose and rednose) from install dependencies. #1079 | ||||||
|  | - Recursively build a query when using the ``elemMatch`` operator. #1130 | ||||||
|  | - Fix instance back references for lists of embedded documents. #1131 | ||||||
|  |  | ||||||
|  | Changes in 0.10.0 | ||||||
|  | ================= | ||||||
|  | - Django support was removed and will be available as a separate extension. #958 | ||||||
|  | - Allow to load undeclared field with meta attribute 'strict': False #957 | ||||||
|  | - Support for PyMongo 3+ #946 | ||||||
|  | - Removed get_or_create() deprecated since 0.8.0. #300 | ||||||
|  | - Improve Document._created status when switch collection and db #1020 | ||||||
|  | - Queryset update doesn't go through field validation #453 | ||||||
|  | - Added support for specifying authentication source as option ``authSource`` in URI. #967 | ||||||
|  | - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | ||||||
|  | - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | ||||||
|  | - Support += and *= for ListField #595 | ||||||
|  | - Use sets for populating dbrefs to dereference | ||||||
|  | - Fixed unpickled documents replacing the global field's list. #888 | ||||||
|  | - Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910 | ||||||
|  | - Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769) | ||||||
|  | - Fix for updating sorting in SortedListField. #978 | ||||||
|  | - Added __ support to escape field name in fields lookup keywords that match operators names #949 | ||||||
|  | - Fix for issue where FileField deletion did not free space in GridFS. | ||||||
|  | - No_dereference() not respected on embedded docs containing reference. #517 | ||||||
|  | - Document save raise an exception if save_condition fails #1005 | ||||||
|  | - Fixes some internal _id handling issue. #961 | ||||||
|  | - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | ||||||
|  | - Capped collection multiple of 256. #1011 | ||||||
|  | - Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. | ||||||
|  | - Fix for delete with write_concern {'w': 0}. #1008 | ||||||
|  | - Allow dynamic lookup for more than two parts. #882 | ||||||
|  | - Added support for min_distance on geo queries. #831 | ||||||
|  | - Allow to add custom metadata to fields #705 | ||||||
|  |  | ||||||
|  | Changes in 0.9.0 | ||||||
|  | ================ | ||||||
| - Update FileField when creating a new file #714 | - Update FileField when creating a new file #714 | ||||||
| - Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 | - Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 | ||||||
| - ComplexDateTimeField should fall back to None when null=True #864 | - ComplexDateTimeField should fall back to None when null=True #864 | ||||||
| - Request Support for $min, $max Field update operators #863 | - Request Support for $min, $max Field update operators #863 | ||||||
| - `BaseDict` does not follow `setdefault` #866 | - ``BaseDict`` does not follow ``setdefault`` #866 | ||||||
| - Add support for $type operator # 766 | - Add support for $type operator # 766 | ||||||
| - Fix tests for pymongo 2.8+ #877 | - Fix tests for pymongo 2.8+ #877 | ||||||
| - No module named 'django.utils.importlib' (Django dev) #872 | - No module named 'django.utils.importlib' (Django dev) #872 | ||||||
| @@ -30,13 +388,13 @@ Changes in 0.9.X - DEV | |||||||
| - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | ||||||
| - Not overriding default values when loading a subset of fields #399 | - Not overriding default values when loading a subset of fields #399 | ||||||
| - Saving document doesn't create new fields in existing collection #620 | - Saving document doesn't create new fields in existing collection #620 | ||||||
| - Added `Queryset.aggregate` wrapper to aggregation framework #703 | - Added ``Queryset.aggregate`` wrapper to aggregation framework #703 | ||||||
| - Added support to show original model fields on to_json calls instead of db_field #697 | - Added support to show original model fields on to_json calls instead of db_field #697 | ||||||
| - Added Queryset.search_text to Text indexes searchs #700 | - Added Queryset.search_text to Text indexes searchs #700 | ||||||
| - Fixed tests for Django 1.7 #696 | - Fixed tests for Django 1.7 #696 | ||||||
| - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | ||||||
| - Added preliminary support for text indexes #680 | - Added preliminary support for text indexes #680 | ||||||
| - Added `elemMatch` operator as well - `match` is too obscure #653 | - Added ``elemMatch`` operator as well - ``match`` is too obscure #653 | ||||||
| - Added support for progressive JPEG #486 #548 | - Added support for progressive JPEG #486 #548 | ||||||
| - Allow strings to be used in index creation #675 | - Allow strings to be used in index creation #675 | ||||||
| - Fixed EmbeddedDoc weakref proxy issue #592 | - Fixed EmbeddedDoc weakref proxy issue #592 | ||||||
| @@ -72,12 +430,13 @@ Changes in 0.9.X - DEV | |||||||
| - Increase email field length to accommodate new TLDs #726 | - Increase email field length to accommodate new TLDs #726 | ||||||
| - index_cls is ignored when deciding to set _cls as index prefix #733 | - index_cls is ignored when deciding to set _cls as index prefix #733 | ||||||
| - Make 'db' argument to connection optional #737 | - Make 'db' argument to connection optional #737 | ||||||
| - Allow atomic update for the entire `DictField` #742 | - Allow atomic update for the entire ``DictField`` #742 | ||||||
| - Added MultiPointField, MultiLineField, MultiPolygonField | - Added MultiPointField, MultiLineField, MultiPolygonField | ||||||
| - Fix multiple connections aliases being rewritten #748 | - Fix multiple connections aliases being rewritten #748 | ||||||
| - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | ||||||
| - Make `in_bulk()` respect `no_dereference()` #775 | - Make ``in_bulk()`` respect ``no_dereference()`` #775 | ||||||
| - Handle None from model __str__; Fixes #753 #754 | - Handle None from model __str__; Fixes #753 #754 | ||||||
|  | - _get_changed_fields fix for embedded documents with id field. #925 | ||||||
|  |  | ||||||
| Changes in 0.8.7 | Changes in 0.8.7 | ||||||
| ================ | ================ | ||||||
| @@ -129,18 +488,15 @@ Changes in 0.8.4 | |||||||
|  |  | ||||||
| Changes in 0.8.3 | Changes in 0.8.3 | ||||||
| ================ | ================ | ||||||
| - Fixed EmbeddedDocuments with `id` also storing `_id` (#402) | - Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) | ||||||
| - Added get_proxy_object helper to filefields (#391) | - Added get_proxy_object helper to filefields (#391) | ||||||
| - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | ||||||
| - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | ||||||
| - Fixed as_pymongo to return the id (#386) | - Fixed as_pymongo to return the id (#386) | ||||||
| - Document.select_related() now respects `db_alias` (#377) | - Document.select_related() now respects ``db_alias`` (#377) | ||||||
| - Reload uses shard_key if applicable (#384) | - Reload uses shard_key if applicable (#384) | ||||||
| - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||||
|  | - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | ||||||
|   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 |  | ||||||
|  |  | ||||||
| - Fixed pickling dynamic documents `_dynamic_fields` (#387) |  | ||||||
| - Fixed ListField setslice and delslice dirty tracking (#390) | - Fixed ListField setslice and delslice dirty tracking (#390) | ||||||
| - Added Django 1.5 PY3 support (#392) | - Added Django 1.5 PY3 support (#392) | ||||||
| - Added match ($elemMatch) support for EmbeddedDocuments (#379) | - Added match ($elemMatch) support for EmbeddedDocuments (#379) | ||||||
| @@ -181,7 +537,7 @@ Changes in 0.8.0 | |||||||
| ================ | ================ | ||||||
| - Fixed querying ReferenceField custom_id (#317) | - Fixed querying ReferenceField custom_id (#317) | ||||||
| - Fixed pickle issues with collections (#316) | - Fixed pickle issues with collections (#316) | ||||||
| - Added `get_next_value` preview for SequenceFields (#319) | - Added ``get_next_value`` preview for SequenceFields (#319) | ||||||
| - Added no_sub_classes context manager and queryset helper (#312) | - Added no_sub_classes context manager and queryset helper (#312) | ||||||
| - Querysets now utilises a local cache | - Querysets now utilises a local cache | ||||||
| - Changed __len__ behaviour in the queryset (#247, #311) | - Changed __len__ behaviour in the queryset (#247, #311) | ||||||
| @@ -210,7 +566,7 @@ Changes in 0.8.0 | |||||||
| - Updated connection to use MongoClient (#262, #274) | - Updated connection to use MongoClient (#262, #274) | ||||||
| - Fixed db_alias and inherited Documents (#143) | - Fixed db_alias and inherited Documents (#143) | ||||||
| - Documentation update for document errors (#124) | - Documentation update for document errors (#124) | ||||||
| - Deprecated `get_or_create` (#35) | - Deprecated ``get_or_create`` (#35) | ||||||
| - Updated inheritable objects created by upsert now contain _cls (#118) | - Updated inheritable objects created by upsert now contain _cls (#118) | ||||||
| - Added support for creating documents with embedded documents in a single operation (#6) | - Added support for creating documents with embedded documents in a single operation (#6) | ||||||
| - Added to_json and from_json to Document (#1) | - Added to_json and from_json to Document (#1) | ||||||
| @@ -331,7 +687,7 @@ Changes in 0.7.0 | |||||||
| - Fixed UnboundLocalError in composite index with pk field (#88) | - Fixed UnboundLocalError in composite index with pk field (#88) | ||||||
| - Updated ReferenceField's to optionally store ObjectId strings | - Updated ReferenceField's to optionally store ObjectId strings | ||||||
|   this will become the default in 0.8 (#89) |   this will become the default in 0.8 (#89) | ||||||
| - Added FutureWarning - save will default to `cascade=False` in 0.8 | - Added FutureWarning - save will default to ``cascade=False`` in 0.8 | ||||||
| - Added example of indexing embedded document fields (#75) | - Added example of indexing embedded document fields (#75) | ||||||
| - Fixed ImageField resizing when forcing size (#80) | - Fixed ImageField resizing when forcing size (#80) | ||||||
| - Add flexibility for fields handling bad data (#78) | - Add flexibility for fields handling bad data (#78) | ||||||
| @@ -427,7 +783,7 @@ Changes in 0.6.8 | |||||||
| ================ | ================ | ||||||
| - Fixed FileField losing reference when no default set | - Fixed FileField losing reference when no default set | ||||||
| - Removed possible race condition from FileField (grid_file) | - Removed possible race condition from FileField (grid_file) | ||||||
| - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | - Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` | ||||||
| - Added support for pull operations on nested EmbeddedDocuments | - Added support for pull operations on nested EmbeddedDocuments | ||||||
| - Added support for choices with GenericReferenceFields | - Added support for choices with GenericReferenceFields | ||||||
| - Added support for choices with GenericEmbeddedDocumentFields | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
| @@ -442,7 +798,7 @@ Changes in 0.6.7 | |||||||
| - Fixed indexing on '_id' or 'pk' or 'id' | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
| - Invalid data from the DB now raises a InvalidDocumentError | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
| - Cleaned up the Validation Error - docs and code | - Cleaned up the Validation Error - docs and code | ||||||
| - Added meta `auto_create_index` so you can disable index creation | - Added meta ``auto_create_index`` so you can disable index creation | ||||||
| - Added write concern options to inserts | - Added write concern options to inserts | ||||||
| - Fixed typo in meta for index options | - Fixed typo in meta for index options | ||||||
| - Bug fix Read preference now passed correctly | - Bug fix Read preference now passed correctly | ||||||
| @@ -483,7 +839,6 @@ Changes in 0.6.1 | |||||||
|  |  | ||||||
| Changes in 0.6 | Changes in 0.6 | ||||||
| ============== | ============== | ||||||
|  |  | ||||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||||
| - Added support for covered indexes when inheritance is off | - Added support for covered indexes when inheritance is off | ||||||
| - No longer always upsert on save for items with a '_id' | - No longer always upsert on save for items with a '_id' | ||||||
| @@ -708,7 +1063,6 @@ Changes in v0.1.3 | |||||||
|   querying takes place |   querying takes place | ||||||
| - A few minor bugfixes | - A few minor bugfixes | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.2 | Changes in v0.1.2 | ||||||
| ================= | ================= | ||||||
| - Query values may be processed before before being used in queries | - Query values may be processed before before being used in queries | ||||||
| @@ -717,7 +1071,6 @@ Changes in v0.1.2 | |||||||
| - Added ``BooleanField`` | - Added ``BooleanField`` | ||||||
| - Added ``Document.reload()`` method | - Added ``Document.reload()`` method | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.1 | Changes in v0.1.1 | ||||||
| ================= | ================= | ||||||
| - Documents may now use capped collections | - Documents may now use capped collections | ||||||
|   | |||||||
| @@ -1,66 +1,77 @@ | |||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| connect('tumblelog') | connect("tumblelog") | ||||||
|  |  | ||||||
|  |  | ||||||
| class Comment(EmbeddedDocument): | class Comment(EmbeddedDocument): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|     name = StringField(max_length=120) |     name = StringField(max_length=120) | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     email = StringField(required=True) |     email = StringField(required=True) | ||||||
|     first_name = StringField(max_length=50) |     first_name = StringField(max_length=50) | ||||||
|     last_name = StringField(max_length=50) |     last_name = StringField(max_length=50) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Post(Document): | class Post(Document): | ||||||
|     title = StringField(max_length=120, required=True) |     title = StringField(max_length=120, required=True) | ||||||
|     author = ReferenceField(User) |     author = ReferenceField(User) | ||||||
|     tags = ListField(StringField(max_length=30)) |     tags = ListField(StringField(max_length=30)) | ||||||
|     comments = ListField(EmbeddedDocumentField(Comment)) |     comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|  |     # bugfix | ||||||
|  |     meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |  | ||||||
| class TextPost(Post): | class TextPost(Post): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ImagePost(Post): | class ImagePost(Post): | ||||||
|     image_path = StringField() |     image_path = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class LinkPost(Post): | class LinkPost(Post): | ||||||
|     link_url = StringField() |     link_url = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| Post.drop_collection() | Post.drop_collection() | ||||||
|  |  | ||||||
| john = User(email='jdoe@example.com', first_name='John', last_name='Doe') | john = User(email="jdoe@example.com", first_name="John", last_name="Doe") | ||||||
| john.save() | john.save() | ||||||
|  |  | ||||||
| post1 = TextPost(title='Fun with MongoEngine', author=john) | post1 = TextPost(title="Fun with MongoEngine", author=john) | ||||||
| post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | post1.content = "Took a look at MongoEngine today, looks pretty cool." | ||||||
| post1.tags = ['mongodb', 'mongoengine'] | post1.tags = ["mongodb", "mongoengine"] | ||||||
| post1.save() | post1.save() | ||||||
|  |  | ||||||
| post2 = LinkPost(title='MongoEngine Documentation', author=john) | post2 = LinkPost(title="MongoEngine Documentation", author=john) | ||||||
| post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" | ||||||
| post2.tags = ['mongoengine'] | post2.tags = ["mongoengine"] | ||||||
| post2.save() | post2.save() | ||||||
|  |  | ||||||
| print 'ALL POSTS' | print("ALL POSTS") | ||||||
| print | print() | ||||||
| for post in Post.objects: | for post in Post.objects: | ||||||
|     print post.title |     print(post.title) | ||||||
|     print '=' * post.title.count() |     # print '=' * post.title.count() | ||||||
|  |     print("=" * 20) | ||||||
|  |  | ||||||
|     if isinstance(post, TextPost): |     if isinstance(post, TextPost): | ||||||
|         print post.content |         print(post.content) | ||||||
|  |  | ||||||
|     if isinstance(post, LinkPost): |     if isinstance(post, LinkPost): | ||||||
|         print 'Link:', post.link_url |         print("Link:", post.link_url) | ||||||
|  |  | ||||||
|     print |     print() | ||||||
| print | print() | ||||||
|  |  | ||||||
| print 'POSTS TAGGED \'MONGODB\'' | print("POSTS TAGGED 'MONGODB'") | ||||||
| print | print() | ||||||
| for post in Post.objects(tags='mongodb'): | for post in Post.objects(tags="mongodb"): | ||||||
|     print post.title |     print(post.title) | ||||||
| print | print() | ||||||
|  |  | ||||||
| num_posts = Post.objects(tags='mongodb').count() | num_posts = Post.objects(tags="mongodb").count() | ||||||
| print 'Found %d posts with tag "mongodb"' % num_posts | print('Found %d posts with tag "mongodb"' % num_posts) | ||||||
|   | |||||||
							
								
								
									
										106
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										106
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| # | # | ||||||
| # MongoEngine documentation build configuration file, created by | # MongoEngine documentation build configuration file, created by | ||||||
| # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | ||||||
| @@ -11,40 +10,44 @@ | |||||||
| # All configuration values have a default; values that are commented out | # All configuration values have a default; values that are commented out | ||||||
| # serve to show the default. | # serve to show the default. | ||||||
|  |  | ||||||
| import sys, os | import os | ||||||
|  | import sys | ||||||
|  |  | ||||||
|  | import sphinx_rtd_theme | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  |  | ||||||
| # If extensions (or modules to document with autodoc) are in another directory, | # If extensions (or modules to document with autodoc) are in another directory, | ||||||
| # add these directories to sys.path here. If the directory is relative to the | # add these directories to sys.path here. If the directory is relative to the | ||||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||||
| sys.path.insert(0, os.path.abspath('..')) | sys.path.insert(0, os.path.abspath("..")) | ||||||
|  |  | ||||||
| # -- General configuration ----------------------------------------------------- | # -- General configuration ----------------------------------------------------- | ||||||
|  |  | ||||||
| # Add any Sphinx extension module names here, as strings. They can be extensions | # Add any Sphinx extension module names here, as strings. They can be extensions | ||||||
| # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | ||||||
| extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] | extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "readthedocs_ext.readthedocs"] | ||||||
|  |  | ||||||
| # Add any paths that contain templates here, relative to this directory. | # Add any paths that contain templates here, relative to this directory. | ||||||
| templates_path = ['_templates'] | templates_path = ["_templates"] | ||||||
|  |  | ||||||
| # The suffix of source filenames. | # The suffix of source filenames. | ||||||
| source_suffix = '.rst' | source_suffix = ".rst" | ||||||
|  |  | ||||||
| # The encoding of source files. | # The encoding of source files. | ||||||
| #source_encoding = 'utf-8' | # source_encoding = 'utf-8' | ||||||
|  |  | ||||||
| # The master toctree document. | # The master toctree document. | ||||||
| master_doc = 'index' | master_doc = "index" | ||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = "MongoEngine" | ||||||
| copyright = u'2009, MongoEngine Authors' | copyright = "2009, MongoEngine Authors"  # noqa: A001 | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| # built documents. | # built documents. | ||||||
| # | # | ||||||
| import mongoengine |  | ||||||
| # The short X.Y version. | # The short X.Y version. | ||||||
| version = mongoengine.get_version() | version = mongoengine.get_version() | ||||||
| # The full version, including alpha/beta/rc tags. | # The full version, including alpha/beta/rc tags. | ||||||
| @@ -52,66 +55,66 @@ release = mongoengine.get_version() | |||||||
|  |  | ||||||
| # The language for content autogenerated by Sphinx. Refer to documentation | # The language for content autogenerated by Sphinx. Refer to documentation | ||||||
| # for a list of supported languages. | # for a list of supported languages. | ||||||
| #language = None | # language = None | ||||||
|  |  | ||||||
| # There are two options for replacing |today|: either, you set today to some | # There are two options for replacing |today|: either, you set today to some | ||||||
| # non-false value, then it is used: | # non-false value, then it is used: | ||||||
| #today = '' | # today = '' | ||||||
| # Else, today_fmt is used as the format for a strftime call. | # Else, today_fmt is used as the format for a strftime call. | ||||||
| #today_fmt = '%B %d, %Y' | # today_fmt = '%B %d, %Y' | ||||||
|  |  | ||||||
| # List of documents that shouldn't be included in the build. | # List of documents that shouldn't be included in the build. | ||||||
| #unused_docs = [] | # unused_docs = [] | ||||||
|  |  | ||||||
| # List of directories, relative to source directory, that shouldn't be searched | # List of directories, relative to source directory, that shouldn't be searched | ||||||
| # for source files. | # for source files. | ||||||
| exclude_trees = ['_build'] | exclude_trees = ["_build"] | ||||||
|  |  | ||||||
| # The reST default role (used for this markup: `text`) to use for all documents. | # The reST default role (used for this markup: `text`) to use for all documents. | ||||||
| #default_role = None | # default_role = None | ||||||
|  |  | ||||||
| # If true, '()' will be appended to :func: etc. cross-reference text. | # If true, '()' will be appended to :func: etc. cross-reference text. | ||||||
| #add_function_parentheses = True | # add_function_parentheses = True | ||||||
|  |  | ||||||
| # If true, the current module name will be prepended to all description | # If true, the current module name will be prepended to all description | ||||||
| # unit titles (such as .. function::). | # unit titles (such as .. function::). | ||||||
| #add_module_names = True | # add_module_names = True | ||||||
|  |  | ||||||
| # If true, sectionauthor and moduleauthor directives will be shown in the | # If true, sectionauthor and moduleauthor directives will be shown in the | ||||||
| # output. They are ignored by default. | # output. They are ignored by default. | ||||||
| #show_authors = False | # show_authors = False | ||||||
|  |  | ||||||
| # The name of the Pygments (syntax highlighting) style to use. | # The name of the Pygments (syntax highlighting) style to use. | ||||||
| pygments_style = 'sphinx' | pygments_style = "sphinx" | ||||||
|  |  | ||||||
| # A list of ignored prefixes for module index sorting. | # A list of ignored prefixes for module index sorting. | ||||||
| #modindex_common_prefix = [] | # modindex_common_prefix = [] | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for HTML output --------------------------------------------------- | # -- Options for HTML output --------------------------------------------------- | ||||||
|  |  | ||||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | # Sphinx are currently 'default' and 'sphinxdoc'. | ||||||
| html_theme = 'sphinx_rtd_theme' | html_theme = "sphinx_rtd_theme" | ||||||
|  |  | ||||||
| # Theme options are theme-specific and customize the look and feel of a theme | # Theme options are theme-specific and customize the look and feel of a theme | ||||||
| # further.  For a list of options available for each theme, see the | # further.  For a list of options available for each theme, see the | ||||||
| # documentation. | # documentation. | ||||||
| #html_theme_options = {} | html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} | ||||||
|  |  | ||||||
| # Add any paths that contain custom themes here, relative to this directory. | # Add any paths that contain custom themes here, relative to this directory. | ||||||
| html_theme_path = ['_themes'] | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | ||||||
|  |  | ||||||
| # The name for this set of Sphinx documents.  If None, it defaults to | # The name for this set of Sphinx documents.  If None, it defaults to | ||||||
| # "<project> v<release> documentation". | # "<project> v<release> documentation". | ||||||
| #html_title = None | # html_title = None | ||||||
|  |  | ||||||
| # A shorter title for the navigation bar.  Default is the same as html_title. | # A shorter title for the navigation bar.  Default is the same as html_title. | ||||||
| #html_short_title = None | # html_short_title = None | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top | # The name of an image file (relative to this directory) to place at the top | ||||||
| # of the sidebar. | # of the sidebar. | ||||||
| #html_logo = None | # html_logo = None | ||||||
|  |  | ||||||
| # The name of an image file (within the static path) to use as favicon of the | # The name of an image file (within the static path) to use as favicon of the | ||||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||||
| @@ -121,11 +124,11 @@ html_favicon = "favicon.ico" | |||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| #html_static_path = ['_static'] | # html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
| #html_last_updated_fmt = '%b %d, %Y' | # html_last_updated_fmt = '%b %d, %Y' | ||||||
|  |  | ||||||
| # If true, SmartyPants will be used to convert quotes and dashes to | # If true, SmartyPants will be used to convert quotes and dashes to | ||||||
| # typographically correct entities. | # typographically correct entities. | ||||||
| @@ -133,73 +136,68 @@ html_use_smartypants = True | |||||||
|  |  | ||||||
| # Custom sidebar templates, maps document names to template names. | # Custom sidebar templates, maps document names to template names. | ||||||
| html_sidebars = { | html_sidebars = { | ||||||
|     'index': ['globaltoc.html', 'searchbox.html'], |     "index": ["globaltoc.html", "searchbox.html"], | ||||||
|     '**': ['localtoc.html', 'relations.html', 'searchbox.html'] |     "**": ["localtoc.html", "relations.html", "searchbox.html"], | ||||||
| } | } | ||||||
|  |  | ||||||
|  |  | ||||||
| # Additional templates that should be rendered to pages, maps page names to | # Additional templates that should be rendered to pages, maps page names to | ||||||
| # template names. | # template names. | ||||||
| #html_additional_pages = {} | # html_additional_pages = {} | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #html_use_modindex = True | # html_use_modindex = True | ||||||
|  |  | ||||||
| # If false, no index is generated. | # If false, no index is generated. | ||||||
| #html_use_index = True | # html_use_index = True | ||||||
|  |  | ||||||
| # If true, the index is split into individual pages for each letter. | # If true, the index is split into individual pages for each letter. | ||||||
| #html_split_index = False | # html_split_index = False | ||||||
|  |  | ||||||
| # If true, links to the reST sources are added to the pages. | # If true, links to the reST sources are added to the pages. | ||||||
| #html_show_sourcelink = True | # html_show_sourcelink = True | ||||||
|  |  | ||||||
| # If true, an OpenSearch description file will be output, and all pages will | # If true, an OpenSearch description file will be output, and all pages will | ||||||
| # contain a <link> tag referring to it.  The value of this option must be the | # contain a <link> tag referring to it.  The value of this option must be the | ||||||
| # base URL from which the finished HTML is served. | # base URL from which the finished HTML is served. | ||||||
| #html_use_opensearch = '' | # html_use_opensearch = '' | ||||||
|  |  | ||||||
| # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | ||||||
| #html_file_suffix = '' | # html_file_suffix = '' | ||||||
|  |  | ||||||
| # Output file base name for HTML help builder. | # Output file base name for HTML help builder. | ||||||
| htmlhelp_basename = 'MongoEnginedoc' | htmlhelp_basename = "MongoEnginedoc" | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for LaTeX output -------------------------------------------------- | # -- Options for LaTeX output -------------------------------------------------- | ||||||
|  |  | ||||||
| # The paper size ('letter' or 'a4'). | # The paper size ('letter' or 'a4'). | ||||||
| latex_paper_size = 'a4' | latex_paper_size = "a4" | ||||||
|  |  | ||||||
| # The font size ('10pt', '11pt' or '12pt'). | # The font size ('10pt', '11pt' or '12pt'). | ||||||
| #latex_font_size = '10pt' | # latex_font_size = '10pt' | ||||||
|  |  | ||||||
| # Grouping the document tree into LaTeX files. List of tuples | # Grouping the document tree into LaTeX files. List of tuples | ||||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | # (source start file, target name, title, author, documentclass [howto/manual]). | ||||||
| latex_documents = [ | latex_documents = [ | ||||||
|   ('index', 'MongoEngine.tex', 'MongoEngine Documentation', |     ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") | ||||||
|    'Ross Lawley', 'manual'), |  | ||||||
| ] | ] | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top of | # The name of an image file (relative to this directory) to place at the top of | ||||||
| # the title page. | # the title page. | ||||||
| #latex_logo = None | # latex_logo = None | ||||||
|  |  | ||||||
| # For "manual" documents, if this is true, then toplevel headings are parts, | # For "manual" documents, if this is true, then toplevel headings are parts, | ||||||
| # not chapters. | # not chapters. | ||||||
| #latex_use_parts = False | # latex_use_parts = False | ||||||
|  |  | ||||||
| # Additional stuff for the LaTeX preamble. | # Additional stuff for the LaTeX preamble. | ||||||
| #latex_preamble = '' | # latex_preamble = '' | ||||||
|  |  | ||||||
| # Documents to append as an appendix to all manuals. | # Documents to append as an appendix to all manuals. | ||||||
| #latex_appendices = [] | # latex_appendices = [] | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #latex_use_modindex = True | # latex_use_modindex = True | ||||||
|  |  | ||||||
| autoclass_content = 'both' | autoclass_content = "both" | ||||||
|  |  | ||||||
| html_theme_options = dict( |  | ||||||
|     canonical_url='http://docs.mongoengine.org/en/latest/' |  | ||||||
| ) |  | ||||||
|   | |||||||
							
								
								
									
										180
									
								
								docs/django.rst
									
									
									
									
									
								
							
							
						
						
									
										180
									
								
								docs/django.rst
									
									
									
									
									
								
							| @@ -2,176 +2,18 @@ | |||||||
| Django Support | Django Support | ||||||
| ============== | ============== | ||||||
|  |  | ||||||
| .. note:: Updated to support Django 1.5 | .. note:: Django support has been split from the main MongoEngine | ||||||
|  |     repository. The *legacy* Django extension may be found bundled with the | ||||||
| Connecting |     0.9 release of MongoEngine. | ||||||
| ========== |  | ||||||
| In your **settings.py** file, ignore the standard database settings (unless you |  | ||||||
| also plan to use the ORM in your project), and instead call |  | ||||||
| :func:`~mongoengine.connect` somewhere in the settings module. |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|    If you are not using another Database backend you may need to add a dummy |  | ||||||
|    database backend to ``settings.py`` eg:: |  | ||||||
|  |  | ||||||
|         DATABASES = { |  | ||||||
|             'default': { |  | ||||||
|                 'ENGINE': 'django.db.backends.dummy' |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|  |  | ||||||
| Authentication |  | ||||||
| ============== |  | ||||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The |  | ||||||
| :class:`~mongoengine.django.auth.User` model is a MongoEngine |  | ||||||
| :class:`~mongoengine.Document`, but implements most of the methods and |  | ||||||
| attributes that the standard Django :class:`User` model does - so the two are |  | ||||||
| moderately compatible. Using this backend will allow you to store users in |  | ||||||
| MongoDB but still use many of the Django authentication infrastructure (such as |  | ||||||
| the :func:`login_required` decorator and the :func:`authenticate` function). To |  | ||||||
| enable the MongoEngine auth backend, add the following to your **settings.py** |  | ||||||
| file:: |  | ||||||
|  |  | ||||||
|     AUTHENTICATION_BACKENDS = ( |  | ||||||
|         'mongoengine.django.auth.MongoEngineBackend', |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
| The :mod:`~mongoengine.django.auth` module also contains a |  | ||||||
| :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's |  | ||||||
| :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.1.3 |  | ||||||
|  |  | ||||||
| Custom User model |  | ||||||
| ================= |  | ||||||
| Django 1.5 introduced `Custom user Models |  | ||||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_ |  | ||||||
| which can be used as an alternative to the MongoEngine authentication backend. |  | ||||||
|  |  | ||||||
| The main advantage of this option is that other components relying on |  | ||||||
| :mod:`django.contrib.auth` and supporting the new swappable user model are more |  | ||||||
| likely to work. For example, you can use the ``createsuperuser`` management |  | ||||||
| command as usual. |  | ||||||
|  |  | ||||||
| To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'`` |  | ||||||
| in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user |  | ||||||
| user model to use. In your **settings.py** file you will have:: |  | ||||||
|  |  | ||||||
|     INSTALLED_APPS = ( |  | ||||||
|         ... |  | ||||||
|         'django.contrib.auth', |  | ||||||
|         'mongoengine.django.mongo_auth', |  | ||||||
|         ... |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     AUTH_USER_MODEL = 'mongo_auth.MongoUser' |  | ||||||
|  |  | ||||||
| An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the |  | ||||||
| :class:`~mongoengine.django.auth.User` class with another class of your choice:: |  | ||||||
|  |  | ||||||
|     MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' |  | ||||||
|  |  | ||||||
| The custom :class:`User` must be a :class:`~mongoengine.Document` class, but |  | ||||||
| otherwise has the same requirements as a standard custom user model, |  | ||||||
| as specified in the `Django Documentation |  | ||||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_. |  | ||||||
| In particular, the custom class must define :attr:`USERNAME_FIELD` and |  | ||||||
| :attr:`REQUIRED_FIELDS` attributes. |  | ||||||
|  |  | ||||||
| Sessions |  | ||||||
| ======== |  | ||||||
| Django allows the use of different backend stores for its sessions. MongoEngine |  | ||||||
| provides a MongoDB-based session backend for Django, which allows you to use |  | ||||||
| sessions in your Django application with just MongoDB. To enable the MongoEngine |  | ||||||
| session backend, ensure that your settings module has |  | ||||||
| ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the |  | ||||||
| ``MIDDLEWARE_CLASSES`` field  and ``'django.contrib.sessions'`` in your |  | ||||||
| ``INSTALLED_APPS``. From there, all you need to do is add the following line |  | ||||||
| into your settings module:: |  | ||||||
|  |  | ||||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' |  | ||||||
|     SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer' |  | ||||||
|  |  | ||||||
| Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports  `mongodb TTL |  | ||||||
| <http://docs.mongodb.org/manual/tutorial/expire-data/>`_. |  | ||||||
|  |  | ||||||
| .. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default |  | ||||||
|    serializer is based around JSON and doesn't know how to convert |  | ||||||
|    ``bson.objectid.ObjectId`` instances to strings. |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.2.1 |  | ||||||
|  |  | ||||||
| Storage |  | ||||||
| ======= |  | ||||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`, |  | ||||||
| it is useful to have a Django file storage backend that wraps this. The new |  | ||||||
| storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. |  | ||||||
| Using it is very similar to using the default FileSystemStorage.:: |  | ||||||
|  |  | ||||||
|     from mongoengine.django.storage import GridFSStorage |  | ||||||
|     fs = GridFSStorage() |  | ||||||
|  |  | ||||||
|     filename = fs.save('hello.txt', 'Hello, World!') |  | ||||||
|  |  | ||||||
| All of the `Django Storage API methods |  | ||||||
| <http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been |  | ||||||
| implemented except :func:`path`. If the filename provided already exists, an |  | ||||||
| underscore and a number (before # the file extension, if one exists) will be |  | ||||||
| appended to the filename until the generated filename doesn't exist. The |  | ||||||
| :func:`save` method will return the new filename.:: |  | ||||||
|  |  | ||||||
|     >>> fs.exists('hello.txt') |  | ||||||
|     True |  | ||||||
|     >>> fs.open('hello.txt').read() |  | ||||||
|     'Hello, World!' |  | ||||||
|     >>> fs.size('hello.txt') |  | ||||||
|     13 |  | ||||||
|     >>> fs.url('hello.txt') |  | ||||||
|     'http://your_media_url/hello.txt' |  | ||||||
|     >>> fs.open('hello.txt').name |  | ||||||
|     'hello.txt' |  | ||||||
|     >>> fs.listdir() |  | ||||||
|     ([], [u'hello.txt']) |  | ||||||
|  |  | ||||||
| All files will be saved and retrieved in GridFS via the :class:`FileDocument` |  | ||||||
| document, allowing easy access to the files without the GridFSStorage |  | ||||||
| backend.:: |  | ||||||
|  |  | ||||||
|     >>> from mongoengine.django.storage import FileDocument |  | ||||||
|     >>> FileDocument.objects() |  | ||||||
|     [<FileDocument: FileDocument object>] |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 |  | ||||||
|  |  | ||||||
| Shortcuts |  | ||||||
| ========= |  | ||||||
| Inspired by the `Django shortcut get_object_or_404 |  | ||||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_, |  | ||||||
| the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns  |  | ||||||
| a document or raises an Http404 exception if the document does not exist:: |  | ||||||
|  |  | ||||||
|     from mongoengine.django.shortcuts import get_document_or_404 |  | ||||||
|      |  | ||||||
|     admin_user = get_document_or_404(User, username='root') |  | ||||||
|  |  | ||||||
| The first argument may be a Document or QuerySet object. All other passed arguments |  | ||||||
| and keyword arguments are used in the query:: |  | ||||||
|  |  | ||||||
|     foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email |  | ||||||
|  |  | ||||||
| .. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one |  | ||||||
|     object is found. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Also inspired by the `Django shortcut get_list_or_404 |  | ||||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_, |  | ||||||
| the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of |  | ||||||
| documents or raises an Http404 exception if the list is empty:: |  | ||||||
|  |  | ||||||
|     from mongoengine.django.shortcuts import get_list_or_404 | Help Wanted! | ||||||
|      | ------------ | ||||||
|     active_users = get_list_or_404(User, is_active=True) |  | ||||||
|  |  | ||||||
| The first argument may be a Document or QuerySet object. All other passed |  | ||||||
| arguments and keyword arguments are used to filter the query. |  | ||||||
|  |  | ||||||
|  | The MongoEngine team is looking for help contributing and maintaining a new | ||||||
|  | Django extension for MongoEngine! If you have Django experience and would like | ||||||
|  | to help contribute to the project, please get in touch on the | ||||||
|  | `mailing list <http://groups.google.com/group/mongoengine-users>`_ or by | ||||||
|  | simply contributing on | ||||||
|  | `GitHub <https://github.com/MongoEngine/django-mongoengine>`_. | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | ========================== | ||||||
|  | Frequently Asked Questions | ||||||
|  | ========================== | ||||||
|  |  | ||||||
|  | Does MongoEngine support asynchronous drivers (Motor, TxMongo)? | ||||||
|  | --------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. | ||||||
|  | If this is a requirement for your project, check the alternative:  `uMongo`_ and `MotorEngine`_. | ||||||
|  |  | ||||||
|  | .. _uMongo: https://umongo.readthedocs.io/ | ||||||
|  | .. _MotorEngine: https://motorengine.readthedocs.io/ | ||||||
| @@ -4,131 +4,204 @@ | |||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | Connections in MongoEngine are registered globally and are identified with aliases. | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | If no ``alias`` is provided during the connection, it will use "default" as alias. | ||||||
| database to connect to:: |  | ||||||
|  | To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` | ||||||
|  | function. The first argument is the name of the database to connect to:: | ||||||
|  |  | ||||||
|     from mongoengine import connect |     from mongoengine import connect | ||||||
|     connect('project1') |     connect('project1') | ||||||
|  |  | ||||||
| By default, MongoEngine assumes that the :program:`mongod` instance is running | By default, MongoEngine assumes that the :program:`mongod` instance is running | ||||||
| on **localhost** on port **27017**. If MongoDB is running elsewhere, you should | on **localhost** on port **27017**. | ||||||
| provide the :attr:`host` and :attr:`port` arguments to |  | ||||||
| :func:`~mongoengine.connect`:: |  | ||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) | If MongoDB is running elsewhere, you need to provide details on how to connect. There are two ways of | ||||||
|  | doing this. Using a connection string in URI format (**this is the preferred method**) or individual attributes | ||||||
|  | provided as keyword arguments. | ||||||
|  |  | ||||||
| If the database requires authentication, :attr:`username` and :attr:`password` | Connect with URI string | ||||||
| arguments should be provided:: | ======================= | ||||||
|  |  | ||||||
|     connect('project1', username='webapp', password='pwd123') | When using a connection string in URI format you should specify the connection details | ||||||
|  | as the :attr:`host` to :func:`~mongoengine.connect`. In a web application context for instance, the URI | ||||||
|  | is typically read from the config file:: | ||||||
|  |  | ||||||
| URI style connections are also supported -- just supply the URI as |         connect(host="mongodb://127.0.0.1:27017/my_db") | ||||||
| the :attr:`host` to |  | ||||||
| :func:`~mongoengine.connect`:: |  | ||||||
|  |  | ||||||
|     connect('project1', host='mongodb://localhost/database_name') | If the database requires authentication, you can specify it in the | ||||||
|  | URI. As each database can have its own users configured, you need to tell MongoDB | ||||||
|  | where to look for the user you are working with, that's what the ``?authSource=admin`` bit | ||||||
|  | of the MongoDB connection string is for:: | ||||||
|  |  | ||||||
|  |     # Connects to 'my_db' database by authenticating | ||||||
|  |     # with given credentials against the 'admin' database (by default as authSource isn't provided) | ||||||
|  |     connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db") | ||||||
|  |  | ||||||
|  |     # Equivalent to previous connection but explicitly states that | ||||||
|  |     # it should use admin as the authentication source database | ||||||
|  |     connect(host="mongodb://my_user:my_password@hostname:port/my_db?authSource=admin") | ||||||
|  |  | ||||||
|  |     # Connects to 'my_db' database by authenticating | ||||||
|  |     # with given credentials against that same database | ||||||
|  |     connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=my_db") | ||||||
|  |  | ||||||
|  | The URI string can also be used to configure advanced parameters like ssl, replicaSet, etc. For more | ||||||
|  | information or example about URI string, you can refer to the `official doc <https://docs.mongodb.com/manual/reference/connection-string/>`_:: | ||||||
|  |  | ||||||
|  |     connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=admin&ssl=true&replicaSet=globaldb") | ||||||
|  |  | ||||||
|  | .. note:: URI containing SRV records (e.g "mongodb+srv://server.example.com/") can be used as well | ||||||
|  |  | ||||||
|  | Connect with keyword attributes | ||||||
|  | =============================== | ||||||
|  |  | ||||||
|  | The second option for specifying the connection details is to provide the information as keyword | ||||||
|  | attributes to :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|  |     connect('my_db', host='127.0.0.1', port=27017) | ||||||
|  |  | ||||||
|  | If the database requires authentication, :attr:`username`, :attr:`password` | ||||||
|  | and :attr:`authentication_source` arguments should be provided:: | ||||||
|  |  | ||||||
|  |     connect('my_db', username='my_user', password='my_password', authentication_source='admin') | ||||||
|  |  | ||||||
|  | The set of attributes that :func:`~mongoengine.connect` recognizes includes but is not limited to: | ||||||
|  | :attr:`host`, :attr:`port`, :attr:`read_preference`, :attr:`username`, :attr:`password`, :attr:`authentication_source`, :attr:`authentication_mechanism`, | ||||||
|  | :attr:`replicaset`, :attr:`tls`, etc. Most of the parameters accepted by `pymongo.MongoClient <https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_ | ||||||
|  | can be used with :func:`~mongoengine.connect` and will simply be forwarded when instantiating the `pymongo.MongoClient`. | ||||||
|  |  | ||||||
| .. note:: Database, username and password from URI string overrides | .. note:: Database, username and password from URI string overrides | ||||||
|     corresponding parameters in :func:`~mongoengine.connect`: :: |     corresponding parameters in :func:`~mongoengine.connect`, this should | ||||||
|  |     obviously be avoided: :: | ||||||
|  |  | ||||||
|         connect( |         connect( | ||||||
|             name='test', |             db='test', | ||||||
|             username='user', |             username='user', | ||||||
|             password='12345', |             password='12345', | ||||||
|             host='mongodb://admin:qwerty@localhost/production' |             host='mongodb://admin:qwerty@localhost/production' | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     will establish connection to ``production`` database using |     will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password. | ||||||
|     ``admin`` username and ``qwerty`` password. |  | ||||||
|  |  | ||||||
| ReplicaSets | .. note:: Calling :func:`~mongoengine.connect` without argument will establish | ||||||
| =========== |     a connection to the "test" database by default | ||||||
|  |  | ||||||
| MongoEngine supports | Read Preferences | ||||||
| :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`. To use them, | ================ | ||||||
| please use an URI style connection and provide the ``replicaSet`` name |  | ||||||
| in the connection kwargs. |  | ||||||
|  |  | ||||||
| Read preferences are supported through the connection or via individual | As stated above, Read preferences are supported through the connection but also via individual | ||||||
| queries by passing the read_preference :: | queries by passing the read_preference :: | ||||||
|  |  | ||||||
|  |     from pymongo import ReadPreference | ||||||
|  |  | ||||||
|     Bar.objects().read_preference(ReadPreference.PRIMARY) |     Bar.objects().read_preference(ReadPreference.PRIMARY) | ||||||
|     Bar.objects(read_preference=ReadPreference.PRIMARY) |     Bar.objects(read_preference=ReadPreference.PRIMARY) | ||||||
|  |  | ||||||
| Multiple Databases | Multiple Databases | ||||||
| ================== | ================== | ||||||
|  |  | ||||||
| Multiple database support was added in MongoEngine 0.6. To use multiple | To use multiple databases you can use :func:`~mongoengine.connect` and provide | ||||||
| databases you can use :func:`~mongoengine.connect` and provide an `alias` name | an `alias` name for the connection - if no `alias` is provided then "default" | ||||||
| for the connection - if no `alias` is provided then "default" is used. | is used. | ||||||
|  |  | ||||||
| In the background this uses :func:`~mongoengine.register_connection` to | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
| store the data and you can register all aliases up front if required. | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
| Individual documents can also support multiple databases by providing a | Documents defined in different database | ||||||
| `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects | --------------------------------------- | ||||||
| to point across databases and collections.  Below is an example schema, using | Individual documents can be attached to different databases by providing a | ||||||
| 3 different databases to store data:: | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||||
|  | objects to point across databases and collections. Below is an example schema, | ||||||
|  | using 3 different databases to store data:: | ||||||
|  |  | ||||||
|  |         connect(alias='user-db-alias', db='user-db') | ||||||
|  |         connect(alias='book-db-alias', db='book-db') | ||||||
|  |         connect(alias='users-books-db-alias', db='users-books-db') | ||||||
|  |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {"db_alias": "user-db"} |             meta = {'db_alias': 'user-db-alias'} | ||||||
|  |  | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {"db_alias": "book-db"} |             meta = {'db_alias': 'book-db-alias'} | ||||||
|  |  | ||||||
|         class AuthorBooks(Document): |         class AuthorBooks(Document): | ||||||
|             author = ReferenceField(User) |             author = ReferenceField(User) | ||||||
|             book = ReferenceField(Book) |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|             meta = {"db_alias": "users-books-db"} |             meta = {'db_alias': 'users-books-db-alias'} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Disconnecting an existing connection | ||||||
|  | ------------------------------------ | ||||||
|  | The function :func:`~mongoengine.disconnect` can be used to | ||||||
|  | disconnect a particular connection. This can be used to change a | ||||||
|  | connection globally:: | ||||||
|  |  | ||||||
|  |         from mongoengine import connect, disconnect | ||||||
|  |         connect('a_db', alias='db1') | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         disconnect(alias='db1') | ||||||
|  |  | ||||||
|  |         connect('another_db', alias='db1') | ||||||
|  |  | ||||||
|  | .. note:: Calling :func:`~mongoengine.disconnect` without argument | ||||||
|  |     will disconnect the "default" connection | ||||||
|  |  | ||||||
|  | .. note:: Since connections gets registered globally, it is important | ||||||
|  |     to use the `disconnect` function from MongoEngine and not the | ||||||
|  |     `disconnect()` method of an existing connection (pymongo.MongoClient) | ||||||
|  |  | ||||||
|  | .. note:: :class:`~mongoengine.Document` are caching the pymongo collection. | ||||||
|  |     using `disconnect` ensures that it gets cleaned as well | ||||||
|  |  | ||||||
| Context Managers | Context Managers | ||||||
| ================ | ================ | ||||||
| Sometimes you may want to switch the database or collection to query against | Sometimes you may want to switch the database or collection to query against. | ||||||
| for a class. |  | ||||||
| For example, archiving older data into a separate database for performance | For example, archiving older data into a separate database for performance | ||||||
| reasons or writing functions that dynamically choose collections to write | reasons or writing functions that dynamically choose collections to write | ||||||
| document to. | a document to. | ||||||
|  |  | ||||||
| Switch Database | Switch Database | ||||||
| --------------- | --------------- | ||||||
| The :class:`~mongoengine.context_managers.switch_db` context manager allows | The :class:`~mongoengine.context_managers.switch_db` context manager allows | ||||||
| you to change the database alias for a given class allowing quick and easy | you to change the database alias for a given class allowing quick and easy | ||||||
| access the same User document across databases:: | access to the same User document across databases:: | ||||||
|  |  | ||||||
|     from mongoengine.context_managers import switch_db |     from mongoengine.context_managers import switch_db | ||||||
|  |  | ||||||
|     class User(Document): |     class User(Document): | ||||||
|         name = StringField() |         name = StringField() | ||||||
|  |  | ||||||
|         meta = {"db_alias": "user-db"} |         meta = {'db_alias': 'user-db'} | ||||||
|  |  | ||||||
|     with switch_db(User, 'archive-user-db') as User: |     with switch_db(User, 'archive-user-db') as User: | ||||||
|         User(name="Ross").save()  # Saves the 'archive-user-db' |         User(name='Ross').save()  # Saves the 'archive-user-db' | ||||||
|  |  | ||||||
|  |  | ||||||
| Switch Collection | Switch Collection | ||||||
| ----------------- | ----------------- | ||||||
| The :class:`~mongoengine.context_managers.switch_collection` context manager | The :func:`~mongoengine.context_managers.switch_collection` context manager | ||||||
| allows you to change the collection for a given class allowing quick and easy | allows you to change the collection for a given class allowing quick and easy | ||||||
| access the same Group document across collection:: | access to the same Group document across collection:: | ||||||
|  |  | ||||||
|         from mongoengine.context_managers import switch_collection |         from mongoengine.context_managers import switch_collection | ||||||
|  |  | ||||||
|         class Group(Document): |         class Group(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         Group(name="test").save()  # Saves in the default db |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|         with switch_collection(Group, 'group2000') as Group: |         with switch_collection(Group, 'group2000') as Group: | ||||||
|             Group(name="hello Group 2000 collection!").save()  # Saves in group2000 collection |             Group(name='hello Group 2000 collection!').save()  # Saves in group2000 collection | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .. note:: Make sure any aliases have been registered with | .. note:: Make sure any aliases have been registered with | ||||||
|   | |||||||
| @@ -22,14 +22,14 @@ objects** as class attributes to the document class:: | |||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.datetime.now) |         date_modified = DateTimeField(default=datetime.datetime.utcnow) | ||||||
|  |  | ||||||
| As BSON (the binary format for storing data in mongodb) is order dependent, | As BSON (the binary format for storing data in mongodb) is order dependent, | ||||||
| documents are serialized based on their field order. | documents are serialized based on their field order. | ||||||
|  |  | ||||||
| Dynamic document schemas | Dynamic document schemas | ||||||
| ======================== | ======================== | ||||||
| One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | One of the benefits of MongoDB is dynamic schemas for a collection, whilst data | ||||||
| should be planned and organised (after all explicit is better than implicit!) | should be planned and organised (after all explicit is better than implicit!) | ||||||
| there are scenarios where having dynamic / expando style documents is desirable. | there are scenarios where having dynamic / expando style documents is desirable. | ||||||
|  |  | ||||||
| @@ -75,17 +75,22 @@ are as follows: | |||||||
| * :class:`~mongoengine.fields.DynamicField` | * :class:`~mongoengine.fields.DynamicField` | ||||||
| * :class:`~mongoengine.fields.EmailField` | * :class:`~mongoengine.fields.EmailField` | ||||||
| * :class:`~mongoengine.fields.EmbeddedDocumentField` | * :class:`~mongoengine.fields.EmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.fields.EmbeddedDocumentListField` | ||||||
|  | * :class:`~mongoengine.fields.EnumField` | ||||||
| * :class:`~mongoengine.fields.FileField` | * :class:`~mongoengine.fields.FileField` | ||||||
| * :class:`~mongoengine.fields.FloatField` | * :class:`~mongoengine.fields.FloatField` | ||||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||||
| * :class:`~mongoengine.fields.GenericReferenceField` | * :class:`~mongoengine.fields.GenericReferenceField` | ||||||
|  | * :class:`~mongoengine.fields.GenericLazyReferenceField` | ||||||
| * :class:`~mongoengine.fields.GeoPointField` | * :class:`~mongoengine.fields.GeoPointField` | ||||||
| * :class:`~mongoengine.fields.ImageField` | * :class:`~mongoengine.fields.ImageField` | ||||||
| * :class:`~mongoengine.fields.IntField` | * :class:`~mongoengine.fields.IntField` | ||||||
| * :class:`~mongoengine.fields.ListField` | * :class:`~mongoengine.fields.ListField` | ||||||
|  | * :class:`~mongoengine.fields.LongField` | ||||||
| * :class:`~mongoengine.fields.MapField` | * :class:`~mongoengine.fields.MapField` | ||||||
| * :class:`~mongoengine.fields.ObjectIdField` | * :class:`~mongoengine.fields.ObjectIdField` | ||||||
| * :class:`~mongoengine.fields.ReferenceField` | * :class:`~mongoengine.fields.ReferenceField` | ||||||
|  | * :class:`~mongoengine.fields.LazyReferenceField` | ||||||
| * :class:`~mongoengine.fields.SequenceField` | * :class:`~mongoengine.fields.SequenceField` | ||||||
| * :class:`~mongoengine.fields.SortedListField` | * :class:`~mongoengine.fields.SortedListField` | ||||||
| * :class:`~mongoengine.fields.StringField` | * :class:`~mongoengine.fields.StringField` | ||||||
| @@ -114,7 +119,7 @@ arguments can be set on all fields: | |||||||
| :attr:`default` (Default: None) | :attr:`default` (Default: None) | ||||||
|     A value to use when no value is set for this field. |     A value to use when no value is set for this field. | ||||||
|  |  | ||||||
|     The definion of default parameters follow `the general rules on Python |     The definition of default parameters follow `the general rules on Python | ||||||
|     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, |     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||||
|     which means that some care should be taken when dealing with default mutable objects |     which means that some care should be taken when dealing with default mutable objects | ||||||
|     (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: |     (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: | ||||||
| @@ -146,11 +151,13 @@ arguments can be set on all fields: | |||||||
|     When True, use this field as a primary key for the collection.  `DictField` |     When True, use this field as a primary key for the collection.  `DictField` | ||||||
|     and `EmbeddedDocuments` both support being the primary key for a document. |     and `EmbeddedDocuments` both support being the primary key for a document. | ||||||
|  |  | ||||||
|  |     .. note:: If set, this field is also accessible through the `pk` field. | ||||||
|  |  | ||||||
| :attr:`choices` (Default: None) | :attr:`choices` (Default: None) | ||||||
|     An iterable (e.g. a list or tuple) of choices to which the value of this |     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||||
|     field should be limited. |     field should be limited. | ||||||
|  |  | ||||||
|     Can be either be a nested tuples of value (stored in mongo) and a |     Can either be nested tuples of value (stored in mongo) and a | ||||||
|     human readable key :: |     human readable key :: | ||||||
|  |  | ||||||
|         SIZE = (('S', 'Small'), |         SIZE = (('S', 'Small'), | ||||||
| @@ -170,11 +177,26 @@ arguments can be set on all fields: | |||||||
|         class Shirt(Document): |         class Shirt(Document): | ||||||
|             size = StringField(max_length=3, choices=SIZE) |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
| :attr:`help_text` (Default: None) | :attr:`validation` (Optional) | ||||||
|     Optional help text to output with the field -- used by form libraries |     A callable to validate the value of the field. | ||||||
|  |     The callable takes the value as parameter and should raise a ValidationError | ||||||
|  |     if validation fails | ||||||
|  |  | ||||||
| :attr:`verbose_name` (Default: None) |     e.g :: | ||||||
|     Optional human-readable name for the field -- used by form libraries |  | ||||||
|  |         def _not_empty(val): | ||||||
|  |             if not val: | ||||||
|  |                 raise ValidationError('value can not be empty') | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(validation=_not_empty) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | :attr:`**kwargs` (Optional) | ||||||
|  |     You can supply additional metadata as arbitrary additional keyword | ||||||
|  |     arguments.  You can not override existing attributes, however.  Common | ||||||
|  |     choices include `help_text` and `verbose_name`, commonly used by form and | ||||||
|  |     widget libraries. | ||||||
|  |  | ||||||
|  |  | ||||||
| List fields | List fields | ||||||
| @@ -211,9 +233,9 @@ document class as the first argument:: | |||||||
|  |  | ||||||
| Dictionary Fields | Dictionary Fields | ||||||
| ----------------- | ----------------- | ||||||
| Often, an embedded document may be used instead of a dictionary -- generally | Often, an embedded document may be used instead of a dictionary – generally | ||||||
| this is recommended as dictionaries don't support validation or custom field | embedded documents are recommended as dictionaries don’t support validation | ||||||
| types. However, sometimes you will not know the structure of what you want to | or custom field types. However, sometimes you will not know the structure of what you want to | ||||||
| store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: | store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: | ||||||
|  |  | ||||||
|     class SurveyResponse(Document): |     class SurveyResponse(Document): | ||||||
| @@ -221,7 +243,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate | |||||||
|         user = ReferenceField(User) |         user = ReferenceField(User) | ||||||
|         answers = DictField() |         answers = DictField() | ||||||
|  |  | ||||||
|     survey_response = SurveyResponse(date=datetime.now(), user=request.user) |     survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) | ||||||
|     response_form = ResponseForm(request.POST) |     response_form = ResponseForm(request.POST) | ||||||
|     survey_response.answers = response_form.cleaned_data() |     survey_response.answers = response_form.cleaned_data() | ||||||
|     survey_response.save() |     survey_response.save() | ||||||
| @@ -268,12 +290,12 @@ as the constructor's argument:: | |||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| .. _one-to-many-with-listfields: | .. _many-to-many-with-listfields: | ||||||
|  |  | ||||||
| One to Many with ListFields | Many to Many with ListFields | ||||||
| ''''''''''''''''''''''''''' | ''''''''''''''''''''''''''' | ||||||
|  |  | ||||||
| If you are implementing a one to many relationship via a list of references, | If you are implementing a many to many relationship via a list of references, | ||||||
| then the references are stored as DBRefs and to query you need to pass an | then the references are stored as DBRefs and to query you need to pass an | ||||||
| instance of the object to the query:: | instance of the object to the query:: | ||||||
|  |  | ||||||
| @@ -313,12 +335,12 @@ reference with a delete rule specification.  A delete rule is specified by | |||||||
| supplying the :attr:`reverse_delete_rule` attributes on the | supplying the :attr:`reverse_delete_rule` attributes on the | ||||||
| :class:`ReferenceField` definition, like this:: | :class:`ReferenceField` definition, like this:: | ||||||
|  |  | ||||||
|     class Employee(Document): |     class ProfilePage(Document): | ||||||
|         ... |         ... | ||||||
|         profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) |         employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) | ||||||
|  |  | ||||||
| The declaration in this example means that when an :class:`Employee` object is | The declaration in this example means that when an :class:`Employee` object is | ||||||
| removed, the :class:`ProfilePage` that belongs to that employee is removed as | removed, the :class:`ProfilePage` that references that employee is removed as | ||||||
| well.  If a whole batch of employees is removed, all profile pages that are | well.  If a whole batch of employees is removed, all profile pages that are | ||||||
| linked are removed as well. | linked are removed as well. | ||||||
|  |  | ||||||
| @@ -331,7 +353,7 @@ Its value can take any of the following constants: | |||||||
|   Deletion is denied if there still exist references to the object being |   Deletion is denied if there still exist references to the object being | ||||||
|   deleted. |   deleted. | ||||||
| :const:`mongoengine.NULLIFY` | :const:`mongoengine.NULLIFY` | ||||||
|   Any object's fields still referring to the object being deleted are removed |   Any object's fields still referring to the object being deleted are set to None | ||||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. |   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||||
| :const:`mongoengine.CASCADE` | :const:`mongoengine.CASCADE` | ||||||
|   Any object containing fields that are referring to the object being deleted |   Any object containing fields that are referring to the object being deleted | ||||||
| @@ -358,11 +380,6 @@ Its value can take any of the following constants: | |||||||
|    In Django, be sure to put all apps that have such delete rule declarations in |    In Django, be sure to put all apps that have such delete rule declarations in | ||||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. |    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||||
|  |  | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|    Signals are not triggered when doing cascading updates / deletes - if this |  | ||||||
|    is required you must manually handle the update / delete. |  | ||||||
|  |  | ||||||
| Generic reference fields | Generic reference fields | ||||||
| '''''''''''''''''''''''' | '''''''''''''''''''''''' | ||||||
| A second kind of reference field also exists, | A second kind of reference field also exists, | ||||||
| @@ -401,7 +418,7 @@ MongoEngine allows you to specify that a field should be unique across a | |||||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's | collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's | ||||||
| constructor. If you try to save a document that has the same value for a unique | constructor. If you try to save a document that has the same value for a unique | ||||||
| field as a document that is already in the database, a | field as a document that is already in the database, a | ||||||
| :class:`~mongoengine.OperationError` will be raised. You may also specify | :class:`~mongoengine.NotUniqueError` will be raised. You may also specify | ||||||
| multi-field uniqueness constraints by using :attr:`unique_with`, which may be | multi-field uniqueness constraints by using :attr:`unique_with`, which may be | ||||||
| either a single field name, or a list or tuple of field names:: | either a single field name, or a list or tuple of field names:: | ||||||
|  |  | ||||||
| @@ -410,28 +427,15 @@ either a single field name, or a list or tuple of field names:: | |||||||
|         first_name = StringField() |         first_name = StringField() | ||||||
|         last_name = StringField(unique_with='first_name') |         last_name = StringField(unique_with='first_name') | ||||||
|  |  | ||||||
| Skipping Document validation on save |  | ||||||
| ------------------------------------ |  | ||||||
| You can also skip the whole document validation process by setting |  | ||||||
| ``validate=False`` when calling the :meth:`~mongoengine.document.Document.save` |  | ||||||
| method:: |  | ||||||
|  |  | ||||||
|     class Recipient(Document): |  | ||||||
|         name = StringField() |  | ||||||
|         email = EmailField() |  | ||||||
|  |  | ||||||
|     recipient = Recipient(name='admin', email='root@localhost') |  | ||||||
|     recipient.save()               # will raise a ValidationError while |  | ||||||
|     recipient.save(validate=False) # won't |  | ||||||
|  |  | ||||||
| Document collections | Document collections | ||||||
| ==================== | ==================== | ||||||
| Document classes that inherit **directly** from :class:`~mongoengine.Document` | Document classes that inherit **directly** from :class:`~mongoengine.Document` | ||||||
| will have their own **collection** in the database. The name of the collection | will have their own **collection** in the database. The name of the collection | ||||||
| is by default the name of the class, converted to lowercase (so in the example | is by default the name of the class converted to snake_case (e.g if your Document class | ||||||
| above, the collection would be called `page`). If you need to change the name | is named `CompanyUser`, the corresponding collection would be `company_user`). If you need | ||||||
| of the collection (e.g. to use MongoEngine with an existing database), then | to change the name of the collection (e.g. to use MongoEngine with an existing database), | ||||||
| create a class dictionary attribute called :attr:`meta` on your document, and | then create a class dictionary attribute called :attr:`meta` on your document, and | ||||||
| set :attr:`collection` to the name of the collection that you want your | set :attr:`collection` to the name of the collection that you want your | ||||||
| document class to use:: | document class to use:: | ||||||
|  |  | ||||||
| @@ -445,8 +449,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | |||||||
| :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. | ||||||
| :attr:`max_documents` is the maximum number of documents that is allowed to be | :attr:`max_documents` is the maximum number of documents that is allowed to be | ||||||
| stored in the collection, and :attr:`max_size` is the maximum size of the | stored in the collection, and :attr:`max_size` is the maximum size of the | ||||||
| collection in bytes. If :attr:`max_size` is not specified and | collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256 | ||||||
| :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). | by MongoDB internally and mongoengine before. Use also a multiple of 256 to | ||||||
|  | avoid confusions. If :attr:`max_size` is not specified and | ||||||
|  | :attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB). | ||||||
| The following example shows a :class:`Log` document that will be limited to | The following example shows a :class:`Log` document that will be limited to | ||||||
| 1000 entries and 2MB of disk space:: | 1000 entries and 2MB of disk space:: | ||||||
|  |  | ||||||
| @@ -463,19 +469,26 @@ You can specify indexes on collections to make querying faster. This is done | |||||||
| by creating a list of index specifications called :attr:`indexes` in the | by creating a list of index specifications called :attr:`indexes` in the | ||||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||||
| either be a single field name, a tuple containing multiple field names, or a | either be a single field name, a tuple containing multiple field names, or a | ||||||
| dictionary containing a full index definition. A direction may be specified on | dictionary containing a full index definition. | ||||||
| fields by prefixing the field name with a **+** (for ascending) or a **-** sign |  | ||||||
| (for descending). Note that direction only matters on multi-field indexes. | A direction may be specified on fields by prefixing the field name with a | ||||||
| Text indexes may be specified by prefixing the field name with a **$**. :: | **+** (for ascending) or a **-** sign (for descending). Note that direction | ||||||
|  | only matters on multi-field indexes. Text indexes may be specified by prefixing | ||||||
|  | the field name with a **$**. Hashed indexes may be specified by prefixing | ||||||
|  | the field name with a **#**:: | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|  |         category = IntField() | ||||||
|         title = StringField() |         title = StringField() | ||||||
|         rating = StringField() |         rating = StringField() | ||||||
|         created = DateTimeField() |         created = DateTimeField() | ||||||
|         meta = { |         meta = { | ||||||
|             'indexes': [ |             'indexes': [ | ||||||
|                 'title', |                 'title', | ||||||
|  |                 '$title',  # text index | ||||||
|  |                 '#title',  # hashed index | ||||||
|                 ('title', '-rating'), |                 ('title', '-rating'), | ||||||
|  |                 ('category', '_cls'), | ||||||
|                 { |                 { | ||||||
|                     'fields': ['created'], |                     'fields': ['created'], | ||||||
|                     'expireAfterSeconds': 3600 |                     'expireAfterSeconds': 3600 | ||||||
| @@ -483,7 +496,9 @@ Text indexes may be specified by prefixing the field name with a **$**. :: | |||||||
|             ] |             ] | ||||||
|         } |         } | ||||||
|  |  | ||||||
| If a dictionary is passed then the following options are available: | If a dictionary is passed then additional options become available. Valid options include, | ||||||
|  | but are not limited to: | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`fields` (Default: None) | :attr:`fields` (Default: None) | ||||||
|     The fields to index. Specified in the same format as described above. |     The fields to index. Specified in the same format as described above. | ||||||
| @@ -504,8 +519,15 @@ If a dictionary is passed then the following options are available: | |||||||
|     Allows you to automatically expire data from a collection by setting the |     Allows you to automatically expire data from a collection by setting the | ||||||
|     time in seconds to expire the a field. |     time in seconds to expire the a field. | ||||||
|  |  | ||||||
|  | :attr:`name` (Optional) | ||||||
|  |     Allows you to specify a name for the index | ||||||
|  |  | ||||||
|  | :attr:`collation` (Optional) | ||||||
|  |     Allows to create case insensitive indexes (MongoDB v3.4+ only) | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|  |     Additional options are forwarded as **kwargs to pymongo's create_index method. | ||||||
|     Inheritance adds extra fields indices see: :ref:`document-inheritance`. |     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||||
|  |  | ||||||
| Global index default options | Global index default options | ||||||
| @@ -517,25 +539,28 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|         title = StringField() |         title = StringField() | ||||||
|         rating = StringField() |         rating = StringField() | ||||||
|         meta = { |         meta = { | ||||||
|             'index_options': {}, |             'index_opts': {}, | ||||||
|             'index_background': True, |             'index_background': True, | ||||||
|             'index_drop_dups': True, |             'index_cls': False, | ||||||
|             'index_cls': False |             'auto_create_index': True, | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`index_options` (Optional) | :attr:`index_opts` (Optional) | ||||||
|     Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_ |     Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_ | ||||||
|  |  | ||||||
| :attr:`index_background` (Optional) | :attr:`index_background` (Optional) | ||||||
|     Set the default value for if an index should be indexed in the background |     Set the default value for if an index should be indexed in the background | ||||||
|  |  | ||||||
| :attr:`index_drop_dups` (Optional) |  | ||||||
|     Set the default value for if an index should drop duplicates |  | ||||||
|  |  | ||||||
| :attr:`index_cls` (Optional) | :attr:`index_cls` (Optional) | ||||||
|     A way to turn off a specific index for _cls. |     A way to turn off a specific index for _cls. | ||||||
|  |  | ||||||
|  | :attr:`auto_create_index` (Optional) | ||||||
|  |     When this is True (default), MongoEngine will ensure that the correct | ||||||
|  |     indexes exist in MongoDB each time a command is run. This can be disabled | ||||||
|  |     in systems where indexes are managed separately. Disabling this will improve | ||||||
|  |     performance. | ||||||
|  |  | ||||||
|  |  | ||||||
| Compound Indexes and Indexing sub documents | Compound Indexes and Indexing sub documents | ||||||
| ------------------------------------------- | ------------------------------------------- | ||||||
| @@ -608,7 +633,7 @@ collection after a given period. See the official | |||||||
| documentation for more information.  A common usecase might be session data:: | documentation for more information.  A common usecase might be session data:: | ||||||
|  |  | ||||||
|     class Session(Document): |     class Session(Document): | ||||||
|         created = DateTimeField(default=datetime.now) |         created = DateTimeField(default=datetime.utcnow) | ||||||
|         meta = { |         meta = { | ||||||
|             'indexes': [ |             'indexes': [ | ||||||
|                 {'fields': ['created'], 'expireAfterSeconds': 3600} |                 {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||||
| @@ -671,11 +696,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
| Shard keys | Shard keys | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| If your collection is sharded, then you need to specify the shard key as a tuple, | If your collection is sharded by multiple keys, then you can improve shard | ||||||
| using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. | routing (and thus the performance of your application) by specifying the shard | ||||||
| This ensures that the shard key is sent with the query when calling the | key, using the :attr:`shard_key` attribute of | ||||||
| :meth:`~mongoengine.document.Document.save` or | :attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. | ||||||
| :meth:`~mongoengine.document.Document.update` method on an existing |  | ||||||
|  | This ensures that the full shard key is sent with the query when calling | ||||||
|  | methods such as :meth:`~mongoengine.document.Document.save`, | ||||||
|  | :meth:`~mongoengine.document.Document.update`, | ||||||
|  | :meth:`~mongoengine.document.Document.modify`, or | ||||||
|  | :meth:`~mongoengine.document.Document.delete` on an existing | ||||||
| :class:`~mongoengine.Document` instance:: | :class:`~mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|     class LogEntry(Document): |     class LogEntry(Document): | ||||||
| @@ -685,7 +715,8 @@ This ensures that the shard key is sent with the query when calling the | |||||||
|         data = StringField() |         data = StringField() | ||||||
|  |  | ||||||
|         meta = { |         meta = { | ||||||
|             'shard_key': ('machine', 'timestamp',) |             'shard_key': ('machine', 'timestamp'), | ||||||
|  |             'indexes': ('machine', 'timestamp'), | ||||||
|         } |         } | ||||||
|  |  | ||||||
| .. _document-inheritance: | .. _document-inheritance: | ||||||
| @@ -695,7 +726,7 @@ Document inheritance | |||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this new class is not a direct subclass of | ||||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||||
| will use the same collection as its superclass uses. This allows for more | will use the same collection as its superclass uses. This allows for more | ||||||
| convenient and efficient retrieval of related documents -- all you need do is | convenient and efficient retrieval of related documents -- all you need do is | ||||||
| @@ -715,6 +746,30 @@ document.:: | |||||||
| .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||||
|           to False, meaning you must set it to True to use inheritance. |           to False, meaning you must set it to True to use inheritance. | ||||||
|  |  | ||||||
|  |           Setting :attr:`allow_inheritance` to True should also be used in | ||||||
|  |           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it | ||||||
|  |  | ||||||
|  | When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query | ||||||
|  | both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. | ||||||
|  | Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains | ||||||
|  | the class name in every documents. When a document is loaded, MongoEngine checks | ||||||
|  | it's :attr:`_cls` attribute and use that class to construct the instance.:: | ||||||
|  |  | ||||||
|  |     Page(title='a funky title').save() | ||||||
|  |     DatedPage(title='another title', date=datetime.utcnow()).save() | ||||||
|  |  | ||||||
|  |     print(Page.objects().count())         # 2 | ||||||
|  |     print(DatedPage.objects().count())    # 1 | ||||||
|  |  | ||||||
|  |     # print documents in their native form | ||||||
|  |     # we remove 'id' to avoid polluting the output with unnecessary detail | ||||||
|  |     qs = Page.objects.exclude('id').as_pymongo() | ||||||
|  |     print(list(qs)) | ||||||
|  |     # [ | ||||||
|  |     #   {'_cls': u 'Page', 'title': 'a funky title'}, | ||||||
|  |     #   {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} | ||||||
|  |     # ] | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||||
|   | |||||||
| @@ -41,34 +41,6 @@ already exist, then any changes will be updated atomically.  For example:: | |||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
| Pre save data validation and cleaning |  | ||||||
| ------------------------------------- |  | ||||||
| MongoEngine allows you to create custom cleaning rules for your documents when |  | ||||||
| calling :meth:`~mongoengine.Document.save`.  By providing a custom |  | ||||||
| :meth:`~mongoengine.Document.clean` method you can do any pre validation / data |  | ||||||
| cleaning. |  | ||||||
|  |  | ||||||
| This might be useful if you want to ensure a default value based on other |  | ||||||
| document values for example:: |  | ||||||
|  |  | ||||||
|     class Essay(Document): |  | ||||||
|         status = StringField(choices=('Published', 'Draft'), required=True) |  | ||||||
|         pub_date = DateTimeField() |  | ||||||
|  |  | ||||||
|         def clean(self): |  | ||||||
|             """Ensures that only published essays have a `pub_date` and |  | ||||||
|             automatically sets the pub_date if published and not set""" |  | ||||||
|             if self.status == 'Draft' and self.pub_date is not None: |  | ||||||
|                 msg = 'Draft entries should not have a publication date.' |  | ||||||
|                 raise ValidationError(msg) |  | ||||||
|             # Set the pub_date for published items if not set. |  | ||||||
|             if self.status == 'Published' and self.pub_date is None: |  | ||||||
|                 self.pub_date = datetime.now() |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|     Cleaning is only called if validation is turned on and when calling |  | ||||||
|     :meth:`~mongoengine.Document.save`. |  | ||||||
|  |  | ||||||
| Cascading Saves | Cascading Saves | ||||||
| --------------- | --------------- | ||||||
| If your document contains :class:`~mongoengine.fields.ReferenceField` or | If your document contains :class:`~mongoengine.fields.ReferenceField` or | ||||||
|   | |||||||
| @@ -2,16 +2,15 @@ | |||||||
| GridFS | GridFS | ||||||
| ====== | ====== | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 |  | ||||||
|  |  | ||||||
| Writing | Writing | ||||||
| ------- | ------- | ||||||
|  |  | ||||||
| GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | ||||||
| object. This field acts as a file-like object and provides a couple of | object. This field acts as a file-like object and provides a couple of | ||||||
| different ways of inserting and retrieving data. Arbitrary metadata such as | different ways of inserting and retrieving data. Arbitrary metadata such as | ||||||
| content type can also be stored alongside the files. In the following example, | content type can also be stored alongside the files. The object returned when accessing a | ||||||
| a document is created to store details about animals, including a photo:: | FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_ | ||||||
|  | In the following example, a document is created to store details about animals, including a photo:: | ||||||
|  |  | ||||||
|     class Animal(Document): |     class Animal(Document): | ||||||
|         genus = StringField() |         genus = StringField() | ||||||
| @@ -20,8 +19,8 @@ a document is created to store details about animals, including a photo:: | |||||||
|  |  | ||||||
|     marmot = Animal(genus='Marmota', family='Sciuridae') |     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|     marmot_photo = open('marmot.jpg', 'rb') |     with open('marmot.jpg', 'rb') as fd: | ||||||
|     marmot.photo.put(marmot_photo, content_type = 'image/jpeg') |         marmot.photo.put(fd, content_type = 'image/jpeg') | ||||||
|     marmot.save() |     marmot.save() | ||||||
|  |  | ||||||
| Retrieval | Retrieval | ||||||
| @@ -34,6 +33,20 @@ field. The file can also be retrieved just as easily:: | |||||||
|     photo = marmot.photo.read() |     photo = marmot.photo.read() | ||||||
|     content_type = marmot.photo.content_type |     content_type = marmot.photo.content_type | ||||||
|  |  | ||||||
|  | .. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" | ||||||
|  |     the file-like object using `seek`:: | ||||||
|  |  | ||||||
|  |         marmot = Animal.objects(genus='Marmota').first() | ||||||
|  |         content1 = marmot.photo.read() | ||||||
|  |         assert content1 != "" | ||||||
|  |  | ||||||
|  |         content2 = marmot.photo.read()    # will be empty | ||||||
|  |         assert content2 == "" | ||||||
|  |  | ||||||
|  |         marmot.photo.seek(0)              # rewind the file by setting the current position of the cursor in the file to 0 | ||||||
|  |         content3 = marmot.photo.read() | ||||||
|  |         assert content3 == content1 | ||||||
|  |  | ||||||
| Streaming | Streaming | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| @@ -53,7 +66,8 @@ Deletion | |||||||
|  |  | ||||||
| Deleting stored files is achieved with the :func:`delete` method:: | Deleting stored files is achieved with the :func:`delete` method:: | ||||||
|  |  | ||||||
|     marmot.photo.delete() |     marmot.photo.delete()    # Deletes the GridFS document | ||||||
|  |     marmot.save()            # Saves the GridFS reference (being None) contained in the marmot instance | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
|  |  | ||||||
| @@ -71,4 +85,5 @@ Files can be replaced with the :func:`replace` method. This works just like | |||||||
| the :func:`put` method so even metadata can (and should) be replaced:: | the :func:`put` method so even metadata can (and should) be replaced:: | ||||||
|  |  | ||||||
|     another_marmot = open('another_marmot.png', 'rb') |     another_marmot = open('another_marmot.png', 'rb') | ||||||
|     marmot.photo.replace(another_marmot, content_type='image/png') |     marmot.photo.replace(another_marmot, content_type='image/png')  # Replaces the GridFS document | ||||||
|  |     marmot.save()                                                   # Replaces the GridFS reference contained in marmot instance | ||||||
|   | |||||||
| @@ -10,6 +10,10 @@ User Guide | |||||||
|    defining-documents |    defining-documents | ||||||
|    document-instances |    document-instances | ||||||
|    querying |    querying | ||||||
|  |    validation | ||||||
|    gridfs |    gridfs | ||||||
|    signals |    signals | ||||||
|    text-indexes |    text-indexes | ||||||
|  |    migration | ||||||
|  |    logging-monitoring | ||||||
|  |    mongomock | ||||||
|   | |||||||
| @@ -2,17 +2,17 @@ | |||||||
| Installing MongoEngine | Installing MongoEngine | ||||||
| ====================== | ====================== | ||||||
|  |  | ||||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_ | ||||||
| and ensure it is running in an accessible location. You will also need | and ensure it is running in an accessible location. You will also need | ||||||
| `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | ||||||
| install MongoEngine using setuptools, then the dependencies will be handled for | install MongoEngine using setuptools, then the dependencies will be handled for | ||||||
| you. | you. | ||||||
|  |  | ||||||
| MongoEngine is available on PyPI, so to use it you can use :program:`pip`: | MongoEngine is available on PyPI, so you can use :program:`pip`: | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Alternatively, if you don't have setuptools installed, `download it from PyPi | Alternatively, if you don't have setuptools installed, `download it from PyPi | ||||||
| <http://pypi.python.org/pypi/mongoengine/>`_ and run | <http://pypi.python.org/pypi/mongoengine/>`_ and run | ||||||
|   | |||||||
							
								
								
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | ================== | ||||||
|  | Logging/Monitoring | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor | ||||||
|  | the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by | ||||||
|  | MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners | ||||||
|  | **before** establishing the database connection (i.e calling `connect`): | ||||||
|  |  | ||||||
|  | The following snippet provides a basic logging of all command events: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import logging | ||||||
|  |     from pymongo import monitoring | ||||||
|  |     from mongoengine import * | ||||||
|  |  | ||||||
|  |     log = logging.getLogger() | ||||||
|  |     log.setLevel(logging.DEBUG) | ||||||
|  |     logging.basicConfig(level=logging.DEBUG) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class CommandLogger(monitoring.CommandListener): | ||||||
|  |  | ||||||
|  |         def started(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} started on server " | ||||||
|  |                      "{0.connection_id}".format(event)) | ||||||
|  |  | ||||||
|  |         def succeeded(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "succeeded in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |         def failed(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "failed in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |     monitoring.register(CommandLogger()) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class Jedi(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     connect() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     log.info('GO!') | ||||||
|  |  | ||||||
|  |     log.info('Saving an item through MongoEngine...') | ||||||
|  |     Jedi(name='Obi-Wan Kenobii').save() | ||||||
|  |  | ||||||
|  |     log.info('Querying through MongoEngine...') | ||||||
|  |     obiwan = Jedi.objects.first() | ||||||
|  |  | ||||||
|  |     log.info('Updating through MongoEngine...') | ||||||
|  |     obiwan.name = 'Obi-Wan Kenobi' | ||||||
|  |     obiwan.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Executing this prints the following output:: | ||||||
|  |  | ||||||
|  |     INFO:root:GO! | ||||||
|  |     INFO:root:Saving an item through MongoEngine... | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds | ||||||
|  |     INFO:root:Querying through MongoEngine... | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds | ||||||
|  |     INFO:root:Updating through MongoEngine... | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds | ||||||
|  |  | ||||||
|  | More details can of course be obtained by checking the `event` argument from the `CommandListener`. | ||||||
							
								
								
									
										267
									
								
								docs/guide/migration.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										267
									
								
								docs/guide/migration.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,267 @@ | |||||||
|  | =================== | ||||||
|  | Documents migration | ||||||
|  | =================== | ||||||
|  |  | ||||||
|  | The structure of your documents and their associated mongoengine schemas are likely | ||||||
|  | to change over the lifetime of an application. This section provides guidance and | ||||||
|  | recommendations on how to deal with migrations. | ||||||
|  |  | ||||||
|  | Due to the very flexible nature of mongodb, migrations of models aren't trivial and | ||||||
|  | for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent | ||||||
|  | library that will manage the migration in an automatic fashion for mongoengine. | ||||||
|  |  | ||||||
|  | Example 1: Addition of a field | ||||||
|  | ============================== | ||||||
|  |  | ||||||
|  | Let's start by taking a simple example of a model change and review the different option you | ||||||
|  | have to deal with the migration. | ||||||
|  |  | ||||||
|  | Let's assume we start with the following schema and save an instance: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     User(name="John Doe").save() | ||||||
|  |  | ||||||
|  |     # print the objects as they exist in mongodb | ||||||
|  |     print(User.objects().as_pymongo())    # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}] | ||||||
|  |  | ||||||
|  | On the next version of your application, let's now assume that a new field `enabled` gets added to the | ||||||
|  | existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField(required=True) | ||||||
|  |         enabled = BooleanField(default=True) | ||||||
|  |  | ||||||
|  | Without applying any migration, we now reload an object from the database into the ``User`` class | ||||||
|  | and checks its `enabled` attribute: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     assert User.objects.count() == 1 | ||||||
|  |     user = User.objects().first() | ||||||
|  |     assert user.enabled is True | ||||||
|  |     assert User.objects(enabled=True).count() == 0    # uh? | ||||||
|  |     assert User.objects(enabled=False).count() == 0   # uh? | ||||||
|  |  | ||||||
|  |     # this is consistent with what we have in the database | ||||||
|  |     # in fact, 'enabled' does not exist | ||||||
|  |     print(User.objects().as_pymongo().first())    # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'} | ||||||
|  |     assert User.objects(enabled=None).count() == 1 | ||||||
|  |  | ||||||
|  | As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it | ||||||
|  | loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the | ||||||
|  | existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying. | ||||||
|  |  | ||||||
|  | In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so | ||||||
|  | if you don't actually migrate the existing documents, you are taking a risk that querying/updating | ||||||
|  | will be missing relevant record. | ||||||
|  |  | ||||||
|  | When adding fields/modifying default values, you can use any of the following to do the migration | ||||||
|  | as a standalone script: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     # Use mongoengine to set a default value for a given field | ||||||
|  |     User.objects().update(enabled=True) | ||||||
|  |     # or use pymongo | ||||||
|  |     user_coll = User._get_collection() | ||||||
|  |     user_coll.update_many({}, {'$set': {'enabled': True}}) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Example 2: Inheritance change | ||||||
|  | ============================= | ||||||
|  |  | ||||||
|  | Let's consider the following example: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Human(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |     class Jedi(Human): | ||||||
|  |         dark_side = BooleanField() | ||||||
|  |         light_saber_color = StringField() | ||||||
|  |  | ||||||
|  |     Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save() | ||||||
|  |     Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save() | ||||||
|  |  | ||||||
|  |     assert Human.objects.count() == 2 | ||||||
|  |     assert Jedi.objects.count() == 2 | ||||||
|  |  | ||||||
|  |     # Let's check how these documents got stored in mongodb | ||||||
|  |     print(Jedi.objects.as_pymongo()) | ||||||
|  |     # [ | ||||||
|  |     #   {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}, | ||||||
|  |     #   {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'} | ||||||
|  |     # ] | ||||||
|  |  | ||||||
|  | As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep | ||||||
|  | track of the Document class. | ||||||
|  |  | ||||||
|  | Let's now take the scenario that you want to refactor the inheritance schema and: | ||||||
|  | - Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith | ||||||
|  | - get rid of the 'dark_side' field | ||||||
|  |  | ||||||
|  | move to the following schemas: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     # unchanged | ||||||
|  |     class Human(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |     # attribute 'dark_side' removed | ||||||
|  |     class GoodJedi(Human): | ||||||
|  |         light_saber_color = StringField() | ||||||
|  |  | ||||||
|  |     # new class | ||||||
|  |     class BadSith(Human): | ||||||
|  |         light_saber_color = StringField() | ||||||
|  |  | ||||||
|  | MongoEngine doesn't know about the change or how to map them with the existing data | ||||||
|  | so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly | ||||||
|  | empty. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     # As a reminder, the documents that we inserted | ||||||
|  |     # have the _cls field = 'Human.Jedi' | ||||||
|  |  | ||||||
|  |     # Following has no match | ||||||
|  |     # because the query that is used behind the scene is | ||||||
|  |     # filtering on {'_cls': 'Human.GoodJedi'} | ||||||
|  |     assert GoodJedi.objects().count() == 0 | ||||||
|  |  | ||||||
|  |     # Following has also no match | ||||||
|  |     # because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}} | ||||||
|  |     # which has no match | ||||||
|  |     assert Human.objects.count() == 0 | ||||||
|  |     assert Human.objects.first() is None | ||||||
|  |  | ||||||
|  |     # If we bypass MongoEngine and make use of underlying driver (PyMongo) | ||||||
|  |     # we can see that the documents are there | ||||||
|  |     humans_coll = Human._get_collection() | ||||||
|  |     assert humans_coll.count_documents({}) == 2 | ||||||
|  |     # print first document | ||||||
|  |     print(humans_coll.find_one()) | ||||||
|  |     # {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'} | ||||||
|  |  | ||||||
|  | As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of | ||||||
|  | 'dark_side' documents. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     humans_coll = Human._get_collection() | ||||||
|  |     old_class = 'Human.Jedi' | ||||||
|  |     good_jedi_class = 'Human.GoodJedi' | ||||||
|  |     bad_sith_class = 'Human.BadSith' | ||||||
|  |     humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}}) | ||||||
|  |     humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}}) | ||||||
|  |  | ||||||
|  | Let's now check if querying improved in MongoEngine: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     assert GoodJedi.objects().count() == 1  # Hoorah! | ||||||
|  |     assert BadSith.objects().count() == 1   # Hoorah! | ||||||
|  |     assert Human.objects.count() == 2       # Hoorah! | ||||||
|  |  | ||||||
|  |     # let's now check that documents load correctly | ||||||
|  |     jedi = GoodJedi.objects().first() | ||||||
|  |     # raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi" | ||||||
|  |  | ||||||
|  | In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields | ||||||
|  | which does not exist anymore on the GoodJedi's and BadSith's models. | ||||||
|  | Let's remove the field from the collections: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     humans_coll = Human._get_collection() | ||||||
|  |     humans_coll.update_many({}, {'$unset': {'dark_side': 1}}) | ||||||
|  |  | ||||||
|  | .. note:: We did this migration in 2 different steps for the sake of example but it could have been combined | ||||||
|  |     with the migration of the _cls fields: :: | ||||||
|  |  | ||||||
|  |         humans_coll.update_many( | ||||||
|  |             {'_cls': old_class, 'dark_side': False}, | ||||||
|  |             { | ||||||
|  |                 '$set': {'_cls': good_jedi_class}, | ||||||
|  |                 '$unset': {'dark_side': 1} | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | And verify that the documents now load correctly: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     jedi = GoodJedi.objects().first() | ||||||
|  |     assert jedi.name == "Obi Wan Kenobi" | ||||||
|  |  | ||||||
|  |     sith = BadSith.objects().first() | ||||||
|  |     assert sith.name == "Darth Vader" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | An other way of dealing with this migration is to iterate over | ||||||
|  | the documents and update/replace them one by one. This is way slower but | ||||||
|  | it is often useful for complex migrations of Document models. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     for doc in humans_coll.find(): | ||||||
|  |         if doc['_cls'] == 'Human.Jedi': | ||||||
|  |             doc['_cls'] =  'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi' | ||||||
|  |             doc.pop('dark_side') | ||||||
|  |             humans_coll.replace_one({'_id': doc['_id']}, doc) | ||||||
|  |  | ||||||
|  | .. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating | ||||||
|  |  | ||||||
|  | Recommendations | ||||||
|  | =============== | ||||||
|  |  | ||||||
|  | - Write migration scripts whenever you do changes to the model schemas | ||||||
|  | - Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist. | ||||||
|  | - Write post-processing checks to verify that migrations script worked. See below | ||||||
|  |  | ||||||
|  | Post-processing checks | ||||||
|  | ====================== | ||||||
|  |  | ||||||
|  | The following recipe can be used to sanity check a Document collection after you applied migration. | ||||||
|  | It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and | ||||||
|  | run some quick checks on the documents to make sure the document looks OK. As it is, it will fail | ||||||
|  | on the first occurrence of an error but this is something that can be adapted based on your needs. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     def get_random_oids(collection, sample_size): | ||||||
|  |         pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}] | ||||||
|  |         return [s['_id'] for s in collection.aggregate(pipeline)] | ||||||
|  |  | ||||||
|  |     def get_random_documents(DocCls, sample_size): | ||||||
|  |         doc_collection = DocCls._get_collection() | ||||||
|  |         random_oids = get_random_oids(doc_collection, sample_size) | ||||||
|  |         return DocCls.objects(id__in=random_oids) | ||||||
|  |  | ||||||
|  |     def check_documents(DocCls, sample_size): | ||||||
|  |         for doc in get_random_documents(DocCls, sample_size): | ||||||
|  |             # general validation (types and values) | ||||||
|  |             doc.validate() | ||||||
|  |  | ||||||
|  |             # load all subfields, | ||||||
|  |             # this may trigger additional queries if you have ReferenceFields | ||||||
|  |             # so it may be slow | ||||||
|  |             for field in doc._fields: | ||||||
|  |                 try: | ||||||
|  |                     getattr(doc, field) | ||||||
|  |                 except Exception: | ||||||
|  |                     LOG.warning(f"Could not load field {field} in Document {doc.id}") | ||||||
|  |                     raise | ||||||
|  |  | ||||||
|  |     check_documents(Human, sample_size=1000) | ||||||
							
								
								
									
										48
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								docs/guide/mongomock.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | |||||||
|  | ============================== | ||||||
|  | Use mongomock for testing | ||||||
|  | ============================== | ||||||
|  |  | ||||||
|  | `mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just | ||||||
|  | what the name implies, mocking a mongo database. | ||||||
|  |  | ||||||
|  | To use with mongoengine, simply specify mongomock when connecting with | ||||||
|  | mongoengine: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     connect('mongoenginetest', host='mongomock://localhost') | ||||||
|  |     conn = get_connection() | ||||||
|  |  | ||||||
|  | or with an alias: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||||
|  |     conn = get_connection('testdb') | ||||||
|  |  | ||||||
|  | Example of test file: | ||||||
|  | --------------------- | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import unittest | ||||||
|  |     from mongoengine import connect, disconnect | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class TestPerson(unittest.TestCase): | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def setUpClass(cls): | ||||||
|  |             connect('mongoenginetest', host='mongomock://localhost') | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def tearDownClass(cls): | ||||||
|  |            disconnect() | ||||||
|  |  | ||||||
|  |         def test_thing(self): | ||||||
|  |             pers = Person(name='John') | ||||||
|  |             pers.save() | ||||||
|  |  | ||||||
|  |             fresh_pers = Person.objects().first() | ||||||
|  |             assert fresh_pers.name ==  'John' | ||||||
| @@ -39,6 +39,14 @@ syntax:: | |||||||
|     # been written by a user whose 'country' field is set to 'uk' |     # been written by a user whose 'country' field is set to 'uk' | ||||||
|     uk_pages = Page.objects(author__country='uk') |     uk_pages = Page.objects(author__country='uk') | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    (version **0.9.1+**) if your field name is like mongodb operator name (for example | ||||||
|  |    type, lte, lt...) and you want to place it at the end of lookup keyword | ||||||
|  |    mongoengine automatically  prepend $ to it. To avoid this use  __ at the end of | ||||||
|  |    your lookup keyword. For example if your field name is ``type`` and you want to | ||||||
|  |    query by this field you must use ``.objects(user__type__="admin")`` instead of | ||||||
|  |    ``.objects(user__type="admin")`` | ||||||
|  |  | ||||||
| Query operators | Query operators | ||||||
| =============== | =============== | ||||||
| @@ -56,7 +64,7 @@ Available operators are as follows: | |||||||
| * ``gt`` -- greater than | * ``gt`` -- greater than | ||||||
| * ``gte`` -- greater than or equal to | * ``gte`` -- greater than or equal to | ||||||
| * ``not`` -- negate a standard check, may be used before other operators (e.g. | * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||||
|   ``Q(age__not__mod=5)``) |   ``Q(age__not__mod=(5, 0))``) | ||||||
| * ``in`` -- value is in list (a list of values should be provided) | * ``in`` -- value is in list (a list of values should be provided) | ||||||
| * ``nin`` -- value is not in list (a list of values should be provided) | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
| @@ -138,9 +146,10 @@ The following were added in MongoEngine 0.8 for | |||||||
|         loc.objects(point__near=[40, 5]) |         loc.objects(point__near=[40, 5]) | ||||||
|         loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) |         loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) | ||||||
|  |  | ||||||
|   You can also set the maximum distance in meters as well:: |   You can also set the maximum and/or the minimum distance in meters as well:: | ||||||
|  |  | ||||||
|         loc.objects(point__near=[40, 5], point__max_distance=1000) |         loc.objects(point__near=[40, 5], point__max_distance=1000) | ||||||
|  |         loc.objects(point__near=[40, 5], point__min_distance=100) | ||||||
|  |  | ||||||
| The older 2D indexes are still supported with the | The older 2D indexes are still supported with the | ||||||
| :class:`~mongoengine.fields.GeoPointField`: | :class:`~mongoengine.fields.GeoPointField`: | ||||||
| @@ -160,7 +169,8 @@ The older 2D indexes are still supported with the | |||||||
|  |  | ||||||
| * ``max_distance`` -- can be added to your location queries to set a maximum | * ``max_distance`` -- can be added to your location queries to set a maximum | ||||||
|   distance. |   distance. | ||||||
|  | * ``min_distance`` -- can be added to your location queries to set a minimum | ||||||
|  |   distance. | ||||||
|  |  | ||||||
| Querying lists | Querying lists | ||||||
| -------------- | -------------- | ||||||
| @@ -212,6 +222,18 @@ keyword argument:: | |||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
|  | Sorting/Ordering results | ||||||
|  | ======================== | ||||||
|  | It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. | ||||||
|  | The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: | ||||||
|  |  | ||||||
|  |     # Order by ascending date | ||||||
|  |     blogs = BlogPost.objects().order_by('date')    # equivalent to .order_by('+date') | ||||||
|  |  | ||||||
|  |     # Order by ascending date first, then descending title | ||||||
|  |     blogs = BlogPost.objects().order_by('+date', '-title') | ||||||
|  |  | ||||||
|  |  | ||||||
| Limiting and skipping results | Limiting and skipping results | ||||||
| ============================= | ============================= | ||||||
| Just as with traditional ORMs, you may limit the number of results returned or | Just as with traditional ORMs, you may limit the number of results returned or | ||||||
| @@ -227,7 +249,7 @@ is preferred for achieving this:: | |||||||
|     # All except for the first 5 people |     # All except for the first 5 people | ||||||
|     users = User.objects[5:] |     users = User.objects[5:] | ||||||
|  |  | ||||||
|     # 5 users, starting from the 10th user found |     # 5 users, starting from the 11th user found | ||||||
|     users = User.objects[10:15] |     users = User.objects[10:15] | ||||||
|  |  | ||||||
| You may also index the query to retrieve a single result. If an item at that | You may also index the query to retrieve a single result. If an item at that | ||||||
| @@ -255,21 +277,11 @@ no document matches the query, and | |||||||
| if more than one document matched the query.  These exceptions are merged into | if more than one document matched the query.  These exceptions are merged into | ||||||
| your document definitions eg: `MyDoc.DoesNotExist` | your document definitions eg: `MyDoc.DoesNotExist` | ||||||
|  |  | ||||||
| A variation of this method exists, | A variation of this method, get_or_create() existed, but it was unsafe. It | ||||||
| :meth:`~mongoengine.queryset.QuerySet.get_or_create`, that will create a new | could not be made safe, because there are no transactions in mongoDB. Other | ||||||
| document with the query arguments if no documents match the query. An | approaches should be investigated, to ensure you don't accidentally duplicate | ||||||
| additional keyword argument, :attr:`defaults` may be provided, which will be | data when using something similar to this method. Therefore it was deprecated | ||||||
| used as default values for the new document, in the case that it should need | in 0.8 and removed in 0.10. | ||||||
| to be created:: |  | ||||||
|  |  | ||||||
|     >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) |  | ||||||
|     >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) |  | ||||||
|     >>> a.name == b.name and a.age == b.age |  | ||||||
|     True |  | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|     :meth:`~mongoengine.queryset.QuerySet.get_or_create` method is deprecated |  | ||||||
|     since :mod:`mongoengine` 0.8. |  | ||||||
|  |  | ||||||
| Default Document queries | Default Document queries | ||||||
| ======================== | ======================== | ||||||
| @@ -340,12 +352,19 @@ Javascript code that is executed on the database server. | |||||||
|  |  | ||||||
| Counting results | Counting results | ||||||
| ---------------- | ---------------- | ||||||
| Just as with limiting and skipping results, there is a method on | Just as with limiting and skipping results, there is a method on a | ||||||
| :class:`~mongoengine.queryset.QuerySet` objects -- | :class:`~mongoengine.queryset.QuerySet` object -- | ||||||
| :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | :meth:`~mongoengine.queryset.QuerySet.count`:: | ||||||
| way of achieving this:: |  | ||||||
|  |  | ||||||
|     num_users = len(User.objects) |     num_users = User.objects.count() | ||||||
|  |  | ||||||
|  | You could technically use ``len(User.objects)`` to get the same result, but it | ||||||
|  | would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
|  | When you execute a server-side count query, you let MongoDB do the heavy | ||||||
|  | lifting and you receive a single integer over the wire. Meanwhile, ``len()`` | ||||||
|  | retrieves all the results, places them in a local cache, and finally counts | ||||||
|  | them. If we compare the performance of the two operations, ``len()`` is much slower | ||||||
|  | than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
|  |  | ||||||
| Further aggregation | Further aggregation | ||||||
| ------------------- | ------------------- | ||||||
| @@ -379,6 +398,25 @@ would be generating "tag-clouds":: | |||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | MongoDB aggregation API | ||||||
|  | ----------------------- | ||||||
|  | If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_ | ||||||
|  | through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. | ||||||
|  | An example of its use would be:: | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person(name='John').save() | ||||||
|  |         Person(name='Bob').save() | ||||||
|  |  | ||||||
|  |         pipeline = [ | ||||||
|  |             {"$sort" : {"name" : -1}}, | ||||||
|  |             {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} | ||||||
|  |             ] | ||||||
|  |         data = Person.objects().aggregate(pipeline) | ||||||
|  |         assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] | ||||||
|  |  | ||||||
| Query efficiency and performance | Query efficiency and performance | ||||||
| ================================ | ================================ | ||||||
|  |  | ||||||
| @@ -449,14 +487,14 @@ data. To turn off dereferencing of the results of a query use | |||||||
| :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||||
|  |  | ||||||
|     post = Post.objects.no_dereference().first() |     post = Post.objects.no_dereference().first() | ||||||
|     assert(isinstance(post.author, ObjectId)) |     assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
| You can also turn off all dereferencing for a fixed period by using the | You can also turn off all dereferencing for a fixed period by using the | ||||||
| :class:`~mongoengine.context_managers.no_dereference` context manager:: | :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||||
|  |  | ||||||
|     with no_dereference(Post) as Post: |     with no_dereference(Post) as Post: | ||||||
|         post = Post.objects.first() |         post = Post.objects.first() | ||||||
|         assert(isinstance(post.author, ObjectId)) |         assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
|     # Outside the context manager dereferencing occurs. |     # Outside the context manager dereferencing occurs. | ||||||
|     assert(isinstance(post.author, User)) |     assert(isinstance(post.author, User)) | ||||||
| @@ -477,6 +515,8 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | |||||||
| first positional argument to :attr:`Document.objects` when you filter it by | first positional argument to :attr:`Document.objects` when you filter it by | ||||||
| calling it with keyword arguments:: | calling it with keyword arguments:: | ||||||
|  |  | ||||||
|  |     from mongoengine.queryset.visitor import Q | ||||||
|  |  | ||||||
|     # Get published posts |     # Get published posts | ||||||
|     Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) |     Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) | ||||||
|  |  | ||||||
| @@ -556,10 +596,20 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | |||||||
|     >>> post.tags |     >>> post.tags | ||||||
|     ['database', 'mongodb'] |     ['database', 'mongodb'] | ||||||
|  |  | ||||||
|  | From MongoDB version 2.6, push operator supports $position value which allows | ||||||
|  | to push values with index:: | ||||||
|  |  | ||||||
|  |     >>> post = BlogPost(title="Test", tags=["mongo"]) | ||||||
|  |     >>> post.save() | ||||||
|  |     >>> post.update(push__tags__0=["database", "code"]) | ||||||
|  |     >>> post.reload() | ||||||
|  |     >>> post.tags | ||||||
|  |     ['database', 'code', 'mongo'] | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|     Currently only top level lists are handled, future versions of mongodb / |     Currently only top level lists are handled, future versions of mongodb / | ||||||
|     pymongo plan to support nested positional operators.  See `The $ positional |     pymongo plan to support nested positional operators.  See `The $ positional | ||||||
|     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. |     operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_. | ||||||
|  |  | ||||||
| Server-side javascript execution | Server-side javascript execution | ||||||
| ================================ | ================================ | ||||||
| @@ -598,7 +648,7 @@ Some variables are made available in the scope of the Javascript function: | |||||||
|  |  | ||||||
| The following example demonstrates the intended usage of | The following example demonstrates the intended usage of | ||||||
| :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums | ||||||
| over a field on a document (this functionality is already available throught | over a field on a document (this functionality is already available through | ||||||
| :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of | ||||||
| example):: | example):: | ||||||
|  |  | ||||||
| @@ -663,4 +713,3 @@ following example shows how the substitutions are made:: | |||||||
|         return comments; |         return comments; | ||||||
|     } |     } | ||||||
|     """) |     """) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -43,10 +43,10 @@ Available signals include: | |||||||
|   has taken place but before saving. |   has taken place but before saving. | ||||||
|  |  | ||||||
| `post_save` | `post_save` | ||||||
|   Called within :meth:`~mongoengine.Document.save` after all actions |   Called within :meth:`~mongoengine.Document.save` after most actions | ||||||
|   (validation, insert/update, cascades, clearing dirty flags) have completed |   (validation, insert/update, and cascades, but not clearing dirty flags) have | ||||||
|   successfully.  Passed the additional boolean keyword argument `created` to |   completed successfully.  Passed the additional boolean keyword argument | ||||||
|   indicate if the save was an insert or an update. |   `created` to indicate if the save was an insert or an update. | ||||||
|  |  | ||||||
| `pre_delete` | `pre_delete` | ||||||
|   Called within :meth:`~mongoengine.Document.delete` prior to |   Called within :meth:`~mongoengine.Document.delete` prior to | ||||||
| @@ -113,6 +113,10 @@ handlers within your subclass:: | |||||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |  | ||||||
|  |     Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. | ||||||
|  |  | ||||||
| Finally, you can also use this small decorator to quickly create a number of | Finally, you can also use this small decorator to quickly create a number of | ||||||
| signals and attach them to your :class:`~mongoengine.Document` or | signals and attach them to your :class:`~mongoengine.Document` or | ||||||
| :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||||
| @@ -142,11 +146,4 @@ cleaner looking while still allowing manual execution of the callback:: | |||||||
|         modified = DateTimeField() |         modified = DateTimeField() | ||||||
|  |  | ||||||
|  |  | ||||||
| ReferenceFields and Signals |  | ||||||
| --------------------------- |  | ||||||
|  |  | ||||||
| Currently `reverse_delete_rule` does not trigger signals on the other part of |  | ||||||
| the relationship.  If this is required you must manually handle the |  | ||||||
| reverse deletion. |  | ||||||
|  |  | ||||||
| .. _blinker: http://pypi.python.org/pypi/blinker | .. _blinker: http://pypi.python.org/pypi/blinker | ||||||
|   | |||||||
| @@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes. | |||||||
| Defining a Document with text index | Defining a Document with text index | ||||||
| =================================== | =================================== | ||||||
| Use the *$* prefix to set a text index, Look the declaration:: | Use the *$* prefix to set a text index, Look the declaration:: | ||||||
|    |  | ||||||
|   class News(Document): |   class News(Document): | ||||||
|       title = StringField() |       title = StringField() | ||||||
|       content = StringField() |       content = StringField() | ||||||
| @@ -17,7 +17,7 @@ Use the *$* prefix to set a text index, Look the declaration:: | |||||||
|       meta = {'indexes': [ |       meta = {'indexes': [ | ||||||
|           {'fields': ['$title', "$content"], |           {'fields': ['$title', "$content"], | ||||||
|            'default_language': 'english', |            'default_language': 'english', | ||||||
|            'weight': {'title': 10, 'content': 2} |            'weights': {'title': 10, 'content': 2} | ||||||
|           } |           } | ||||||
|       ]} |       ]} | ||||||
|  |  | ||||||
| @@ -35,10 +35,10 @@ Saving a document:: | |||||||
|        content="Various improvements").save() |        content="Various improvements").save() | ||||||
|  |  | ||||||
| Next, start a text search using :attr:`QuerySet.search_text` method:: | Next, start a text search using :attr:`QuerySet.search_text` method:: | ||||||
|    |  | ||||||
|   document = News.objects.search_text('testing').first() |   document = News.objects.search_text('testing').first() | ||||||
|   document.title # may be: "Using mongodb text search" |   document.title # may be: "Using mongodb text search" | ||||||
|    |  | ||||||
|   document = News.objects.search_text('released').first() |   document = News.objects.search_text('released').first() | ||||||
|   document.title # may be: "MongoEngine 0.9 released" |   document.title # may be: "MongoEngine 0.9 released" | ||||||
|  |  | ||||||
| @@ -48,4 +48,4 @@ Ordering by text score | |||||||
|  |  | ||||||
| :: | :: | ||||||
|  |  | ||||||
|   objects = News.objects.search('mongo').order_by('$text_score') |   objects = News.objects.search_text('mongo').order_by('$text_score') | ||||||
|   | |||||||
							
								
								
									
										122
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,122 @@ | |||||||
|  | ==================== | ||||||
|  | Document Validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB | ||||||
|  | and makes sure they are consistent with the fields defined in your models. | ||||||
|  |  | ||||||
|  | MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema. | ||||||
|  | This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance | ||||||
|  | of your model but this operation may fail under some circumstances (e.g. if there is a field in | ||||||
|  | the document fetched from the database that is not defined in your model). | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Built-in validation | ||||||
|  | =================== | ||||||
|  |  | ||||||
|  | Mongoengine provides different fields that encapsulate the corresponding validation | ||||||
|  | out of the box. Validation runs when calling `.validate()` or `.save()` | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     from mongoengine import Document, EmailField | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         email = EmailField() | ||||||
|  |         age = IntField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     user = User(email='invalid@', age=24) | ||||||
|  |     user.validate()     # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |     user.save()         # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |  | ||||||
|  |     user2 = User(email='john.doe@garbage.com', age=1000) | ||||||
|  |     user2.save()        # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |  | ||||||
|  | Custom validation | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | The following feature can be used to customize the validation: | ||||||
|  |  | ||||||
|  | * Field `validation` parameter | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     def not_john_doe(name): | ||||||
|  |         if name == 'John Doe': | ||||||
|  |             raise ValidationError("John Doe is not a valid name") | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         full_name = StringField(validation=not_john_doe) | ||||||
|  |  | ||||||
|  |     Person(full_name='Billy Doe').save() | ||||||
|  |     Person(full_name='John Doe').save()  # raises ValidationError (John Doe is not a valid name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | * Document `clean` method | ||||||
|  |  | ||||||
|  | This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide | ||||||
|  | custom model validation and/or to modify some of the field values prior to validation. | ||||||
|  | For instance, you could use it to automatically provide a value for a field, or to do validation | ||||||
|  | that requires access to more than a single field. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Essay(Document): | ||||||
|  |         status = StringField(choices=('Published', 'Draft'), required=True) | ||||||
|  |         pub_date = DateTimeField() | ||||||
|  |  | ||||||
|  |         def clean(self): | ||||||
|  |             # Validate that only published essays have a `pub_date` | ||||||
|  |             if self.status == 'Draft' and self.pub_date is not None: | ||||||
|  |                 raise ValidationError('Draft entries should not have a publication date.') | ||||||
|  |             # Set the pub_date for published items if not set. | ||||||
|  |             if self.status == 'Published' and self.pub_date is None: | ||||||
|  |                 self.pub_date = datetime.now() | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |     Cleaning is only called if validation is turned on and when calling | ||||||
|  |     :meth:`~mongoengine.Document.save`. | ||||||
|  |  | ||||||
|  | * Adding custom Field classes | ||||||
|  |  | ||||||
|  | We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible | ||||||
|  | to subclass a Field and encapsulate some validation by overriding the `validate` method | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class AgeField(IntField): | ||||||
|  |  | ||||||
|  |         def validate(self, value): | ||||||
|  |             super(AgeField, self).validate(value)     # let IntField.validate run first | ||||||
|  |             if value == 60: | ||||||
|  |                 self.error('60 is not allowed') | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = AgeField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     Person(age=20).save()   # passes | ||||||
|  |     Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |     Person(age=60).save()   # raises ValidationError (Person:None) (60 is not allowed: ['age']) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly, | ||||||
|  |    it will provide a better context with the error message | ||||||
|  |  | ||||||
|  | Skipping validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | Although discouraged as it allows to violate fields constraints, if for some reason you need to disable | ||||||
|  | the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = IntField(max_value=100) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save()    # raises ValidationError (Integer value is too large) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save(validate=False) | ||||||
|  |     person = Person.objects.first() | ||||||
|  |     assert person.age == 1000 | ||||||
| @@ -7,7 +7,7 @@ MongoDB. To install it, simply run | |||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install -U mongoengine |     $ python -m pip install -U mongoengine | ||||||
|  |  | ||||||
| :doc:`tutorial` | :doc:`tutorial` | ||||||
|   A quick tutorial building a tumblelog to get you up and running with |   A quick tutorial building a tumblelog to get you up and running with | ||||||
| @@ -23,9 +23,18 @@ MongoDB. To install it, simply run | |||||||
| :doc:`upgrade` | :doc:`upgrade` | ||||||
|   How to upgrade MongoEngine. |   How to upgrade MongoEngine. | ||||||
|  |  | ||||||
|  | :doc:`faq` | ||||||
|  |   Frequently Asked Questions | ||||||
|  |  | ||||||
| :doc:`django` | :doc:`django` | ||||||
|   Using MongoEngine and Django |   Using MongoEngine and Django | ||||||
|  |  | ||||||
|  | MongoDB and driver support | ||||||
|  | -------------------------- | ||||||
|  |  | ||||||
|  | MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. | ||||||
|  | For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_. | ||||||
|  |  | ||||||
| Community | Community | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| @@ -73,6 +82,7 @@ formats for offline reading. | |||||||
|     apireference |     apireference | ||||||
|     changelog |     changelog | ||||||
|     upgrade |     upgrade | ||||||
|  |     faq | ||||||
|     django |     django | ||||||
|  |  | ||||||
| Indices and tables | Indices and tables | ||||||
| @@ -81,4 +91,3 @@ Indices and tables | |||||||
| * :ref:`genindex` | * :ref:`genindex` | ||||||
| * :ref:`modindex` | * :ref:`modindex` | ||||||
| * :ref:`search` | * :ref:`search` | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | |||||||
|  | Sphinx==3.3.0 | ||||||
|  | sphinx-rtd-theme==0.5.0 | ||||||
|  | readthedocs-sphinx-ext==2.1.1 | ||||||
| @@ -3,11 +3,10 @@ Tutorial | |||||||
| ======== | ======== | ||||||
|  |  | ||||||
| This tutorial introduces **MongoEngine** by means of example --- we will walk | This tutorial introduces **MongoEngine** by means of example --- we will walk | ||||||
| through how to create a simple **Tumblelog** application. A Tumblelog is a type | through how to create a simple **Tumblelog** application. A tumblelog is a | ||||||
| of blog where posts are not constrained to being conventional text-based posts. | blog that supports mixed media content, including text, images, links, video, | ||||||
| As well as text-based entries, users may post images, links, videos, etc. For | audio, etc. For simplicity's sake, we'll stick to text, image, and link | ||||||
| simplicity's sake, we'll stick to text, image and link entries in our | entries. As the purpose of this tutorial is to introduce MongoEngine, we'll | ||||||
| application. As the purpose of this tutorial is to introduce MongoEngine, we'll |  | ||||||
| focus on the data-modelling side of the application, leaving out a user | focus on the data-modelling side of the application, leaving out a user | ||||||
| interface. | interface. | ||||||
|  |  | ||||||
| @@ -16,14 +15,14 @@ Getting started | |||||||
|  |  | ||||||
| Before we start, make sure that a copy of MongoDB is running in an accessible | Before we start, make sure that a copy of MongoDB is running in an accessible | ||||||
| location --- running it locally will be easier, but if that is not an option | location --- running it locally will be easier, but if that is not an option | ||||||
| then it may be run on a remote server. If you haven't installed mongoengine, | then it may be run on a remote server. If you haven't installed MongoEngine, | ||||||
| simply use pip to install it like so:: | simply use pip to install it like so:: | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Before we can start using MongoEngine, we need to tell it how to connect to our | Before we can start using MongoEngine, we need to tell it how to connect to our | ||||||
| instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | ||||||
| function. If running locally the only argument we need to provide is the name | function. If running locally, the only argument we need to provide is the name | ||||||
| of the MongoDB database to use:: | of the MongoDB database to use:: | ||||||
|  |  | ||||||
|     from mongoengine import * |     from mongoengine import * | ||||||
| @@ -39,18 +38,18 @@ Defining our documents | |||||||
| MongoDB is *schemaless*, which means that no schema is enforced by the database | MongoDB is *schemaless*, which means that no schema is enforced by the database | ||||||
| --- we may add and remove fields however we want and MongoDB won't complain. | --- we may add and remove fields however we want and MongoDB won't complain. | ||||||
| This makes life a lot easier in many regards, especially when there is a change | This makes life a lot easier in many regards, especially when there is a change | ||||||
| to the data model. However, defining schemata for our documents can help to | to the data model. However, defining schemas for our documents can help to iron | ||||||
| iron out bugs involving incorrect types or missing fields, and also allow us to | out bugs involving incorrect types or missing fields, and also allow us to | ||||||
| define utility methods on our documents in the same way that traditional | define utility methods on our documents in the same way that traditional | ||||||
| :abbr:`ORMs (Object-Relational Mappers)` do. | :abbr:`ORMs (Object-Relational Mappers)` do. | ||||||
|  |  | ||||||
| In our Tumblelog application we need to store several different types of | In our Tumblelog application we need to store several different types of | ||||||
| information.  We will need to have a collection of **users**, so that we may | information. We will need to have a collection of **users**, so that we may | ||||||
| link posts to an individual. We also need to store our different types of | link posts to an individual. We also need to store our different types of | ||||||
| **posts** (eg: text, image and link) in the database. To aid navigation of our | **posts** (eg: text, image and link) in the database. To aid navigation of our | ||||||
| Tumblelog, posts may have **tags** associated with them, so that the list of | Tumblelog, posts may have **tags** associated with them, so that the list of | ||||||
| posts shown to the user may be limited to posts that have been assigned a | posts shown to the user may be limited to posts that have been assigned a | ||||||
| specific tag.  Finally, it would be nice if **comments** could be added to | specific tag. Finally, it would be nice if **comments** could be added to | ||||||
| posts. We'll start with **users**, as the other document models are slightly | posts. We'll start with **users**, as the other document models are slightly | ||||||
| more involved. | more involved. | ||||||
|  |  | ||||||
| @@ -78,7 +77,7 @@ Now we'll think about how to store the rest of the information. If we were | |||||||
| using a relational database, we would most likely have a table of **posts**, a | using a relational database, we would most likely have a table of **posts**, a | ||||||
| table of **comments** and a table of **tags**.  To associate the comments with | table of **comments** and a table of **tags**.  To associate the comments with | ||||||
| individual posts, we would put a column in the comments table that contained a | individual posts, we would put a column in the comments table that contained a | ||||||
| foreign key to the posts table.  We'd also need a link table to provide the | foreign key to the posts table. We'd also need a link table to provide the | ||||||
| many-to-many relationship between posts and tags. Then we'd need to address the | many-to-many relationship between posts and tags. Then we'd need to address the | ||||||
| problem of storing the specialised post-types (text, image and link). There are | problem of storing the specialised post-types (text, image and link). There are | ||||||
| several ways we can achieve this, but each of them have their problems --- none | several ways we can achieve this, but each of them have their problems --- none | ||||||
| @@ -87,7 +86,7 @@ of them stand out as particularly intuitive solutions. | |||||||
| Posts | Posts | ||||||
| ^^^^^ | ^^^^^ | ||||||
|  |  | ||||||
| Happily mongoDB *isn't* a relational database, so we're not going to do it that | Happily MongoDB *isn't* a relational database, so we're not going to do it that | ||||||
| way. As it turns out, we can use MongoDB's schemaless nature to provide us with | way. As it turns out, we can use MongoDB's schemaless nature to provide us with | ||||||
| a much nicer solution. We will store all of the posts in *one collection* and | a much nicer solution. We will store all of the posts in *one collection* and | ||||||
| each post type will only store the fields it needs. If we later want to add | each post type will only store the fields it needs. If we later want to add | ||||||
| @@ -96,7 +95,7 @@ using* the new fields we need to support video posts. This fits with the | |||||||
| Object-Oriented principle of *inheritance* nicely. We can think of | Object-Oriented principle of *inheritance* nicely. We can think of | ||||||
| :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | ||||||
| :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | ||||||
| this kind of modelling out of the box --- all you need do is turn on inheritance | this kind of modeling out of the box --- all you need do is turn on inheritance | ||||||
| by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: | by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: | ||||||
|  |  | ||||||
|     class Post(Document): |     class Post(Document): | ||||||
| @@ -128,8 +127,8 @@ link table, we can just store a list of tags in each post. So, for both | |||||||
| efficiency and simplicity's sake, we'll store the tags as strings directly | efficiency and simplicity's sake, we'll store the tags as strings directly | ||||||
| within the post, rather than storing references to tags in a separate | within the post, rather than storing references to tags in a separate | ||||||
| collection. Especially as tags are generally very short (often even shorter | collection. Especially as tags are generally very short (often even shorter | ||||||
| than a document's id), this denormalisation won't impact very strongly on the | than a document's id), this denormalization won't impact the size of the | ||||||
| size of our database. So let's take a look that the code our modified | database very strongly. Let's take a look at the code of our modified | ||||||
| :class:`Post` class:: | :class:`Post` class:: | ||||||
|  |  | ||||||
|     class Post(Document): |     class Post(Document): | ||||||
| @@ -141,7 +140,7 @@ The :class:`~mongoengine.fields.ListField` object that is used to define a Post' | |||||||
| takes a field object as its first argument --- this means that you can have | takes a field object as its first argument --- this means that you can have | ||||||
| lists of any type of field (including lists). | lists of any type of field (including lists). | ||||||
|  |  | ||||||
| .. note:: We don't need to modify the specialised post types as they all | .. note:: We don't need to modify the specialized post types as they all | ||||||
|     inherit from :class:`Post`. |     inherit from :class:`Post`. | ||||||
|  |  | ||||||
| Comments | Comments | ||||||
| @@ -149,12 +148,12 @@ Comments | |||||||
|  |  | ||||||
| A comment is typically associated with *one* post. In a relational database, to | A comment is typically associated with *one* post. In a relational database, to | ||||||
| display a post with its comments, we would have to retrieve the post from the | display a post with its comments, we would have to retrieve the post from the | ||||||
| database, then query the database again for the comments associated with the | database and then query the database again for the comments associated with the | ||||||
| post. This works, but there is no real reason to be storing the comments | post. This works, but there is no real reason to be storing the comments | ||||||
| separately from their associated posts, other than to work around the | separately from their associated posts, other than to work around the | ||||||
| relational model. Using MongoDB we can store the comments as a list of | relational model. Using MongoDB we can store the comments as a list of | ||||||
| *embedded documents* directly on a post document. An embedded document should | *embedded documents* directly on a post document. An embedded document should | ||||||
| be treated no differently that a regular document; it just doesn't have its own | be treated no differently than a regular document; it just doesn't have its own | ||||||
| collection in the database. Using MongoEngine, we can define the structure of | collection in the database. Using MongoEngine, we can define the structure of | ||||||
| embedded documents, along with utility methods, in exactly the same way we do | embedded documents, along with utility methods, in exactly the same way we do | ||||||
| with regular documents:: | with regular documents:: | ||||||
| @@ -207,7 +206,10 @@ object:: | |||||||
|         ross.last_name = 'Lawley' |         ross.last_name = 'Lawley' | ||||||
|         ross.save() |         ross.save() | ||||||
|  |  | ||||||
| Now that we've got our user in the database, let's add a couple of posts:: | Assign another user to a variable called ``john``, just like we did above with | ||||||
|  | ``ross``. | ||||||
|  |  | ||||||
|  | Now that we've got our users in the database, let's add a couple of posts:: | ||||||
|  |  | ||||||
|     post1 = TextPost(title='Fun with MongoEngine', author=john) |     post1 = TextPost(title='Fun with MongoEngine', author=john) | ||||||
|     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' |     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | ||||||
| @@ -219,8 +221,8 @@ Now that we've got our user in the database, let's add a couple of posts:: | |||||||
|     post2.tags = ['mongoengine'] |     post2.tags = ['mongoengine'] | ||||||
|     post2.save() |     post2.save() | ||||||
|  |  | ||||||
| .. note:: If you change a field on a object that has already been saved, then | .. note:: If you change a field on an object that has already been saved and | ||||||
|     call :meth:`save` again, the document will be updated. |     then call :meth:`save` again, the document will be updated. | ||||||
|  |  | ||||||
| Accessing our data | Accessing our data | ||||||
| ================== | ================== | ||||||
| @@ -232,17 +234,17 @@ used to access the documents in the database collection associated with that | |||||||
| class. So let's see how we can get our posts' titles:: | class. So let's see how we can get our posts' titles:: | ||||||
|  |  | ||||||
|     for post in Post.objects: |     for post in Post.objects: | ||||||
|         print post.title |         print(post.title) | ||||||
|  |  | ||||||
| Retrieving type-specific information | Retrieving type-specific information | ||||||
| ------------------------------------ | ------------------------------------ | ||||||
|  |  | ||||||
| This will print the titles of our posts, one on each line. But What if we want | This will print the titles of our posts, one on each line. But what if we want | ||||||
| to access the type-specific data (link_url, content, etc.)? One way is simply | to access the type-specific data (link_url, content, etc.)? One way is simply | ||||||
| to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | ||||||
|  |  | ||||||
|     for post in TextPost.objects: |     for post in TextPost.objects: | ||||||
|         print post.content |         print(post.content) | ||||||
|  |  | ||||||
| Using TextPost's :attr:`objects` attribute only returns documents that were | Using TextPost's :attr:`objects` attribute only returns documents that were | ||||||
| created using :class:`TextPost`. Actually, there is a more general rule here: | created using :class:`TextPost`. Actually, there is a more general rule here: | ||||||
| @@ -259,16 +261,14 @@ instances of :class:`Post` --- they were instances of the subclass of | |||||||
| practice:: | practice:: | ||||||
|  |  | ||||||
|     for post in Post.objects: |     for post in Post.objects: | ||||||
|         print post.title |         print(post.title) | ||||||
|         print '=' * len(post.title) |         print('=' * len(post.title)) | ||||||
|  |  | ||||||
|         if isinstance(post, TextPost): |         if isinstance(post, TextPost): | ||||||
|             print post.content |             print(post.content) | ||||||
|  |  | ||||||
|         if isinstance(post, LinkPost): |         if isinstance(post, LinkPost): | ||||||
|             print 'Link:', post.link_url |             print('Link: {}'.format(post.link_url)) | ||||||
|  |  | ||||||
|         print |  | ||||||
|  |  | ||||||
| This would print the title of each post, followed by the content if it was a | This would print the title of each post, followed by the content if it was a | ||||||
| text post, and "Link: <url>" if it was a link post. | text post, and "Link: <url>" if it was a link post. | ||||||
| @@ -283,7 +283,7 @@ your query.  Let's adjust our query so that only posts with the tag "mongodb" | |||||||
| are returned:: | are returned:: | ||||||
|  |  | ||||||
|     for post in Post.objects(tags='mongodb'): |     for post in Post.objects(tags='mongodb'): | ||||||
|         print post.title |         print(post.title) | ||||||
|  |  | ||||||
| There are also methods available on :class:`~mongoengine.queryset.QuerySet` | There are also methods available on :class:`~mongoengine.queryset.QuerySet` | ||||||
| objects that allow different results to be returned, for example, calling | objects that allow different results to be returned, for example, calling | ||||||
| @@ -292,11 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be | |||||||
| used on :class:`~mongoengine.queryset.QuerySet` objects:: | used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||||
|  |  | ||||||
|     num_posts = Post.objects(tags='mongodb').count() |     num_posts = Post.objects(tags='mongodb').count() | ||||||
|     print 'Found %d posts with tag "mongodb"' % num_posts |     print('Found {} posts with tag "mongodb"'.format(num_posts)) | ||||||
|  |  | ||||||
| Learning more about mongoengine | Learning more about MongoEngine | ||||||
| ------------------------------- | ------------------------------- | ||||||
|  |  | ||||||
| If you got this far you've made a great start, so well done!  The next step on | If you got this far you've made a great start, so well done! The next step on | ||||||
| your mongoengine journey is the `full user guide <guide/index.html>`_, where you | your MongoEngine journey is the `full user guide <guide/index.html>`_, where | ||||||
| can learn indepth about how to use mongoengine and mongodb. | you can learn in-depth about how to use MongoEngine and MongoDB. | ||||||
|   | |||||||
| @@ -2,6 +2,94 @@ | |||||||
| Upgrading | Upgrading | ||||||
| ######### | ######### | ||||||
|  |  | ||||||
|  | Development | ||||||
|  | *********** | ||||||
|  | (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||||
|  |  | ||||||
|  | URLField's constructor no longer takes `verify_exists` | ||||||
|  |  | ||||||
|  | 0.15.0 | ||||||
|  | ****** | ||||||
|  |  | ||||||
|  | 0.14.0 | ||||||
|  | ****** | ||||||
|  | This release includes a few bug fixes and a significant code cleanup. The most | ||||||
|  | important change is that `QuerySet.as_pymongo` no longer supports a | ||||||
|  | `coerce_types` mode. If you used it in the past, a) please let us know of your | ||||||
|  | use case, b) you'll need to override `as_pymongo` to get the desired outcome. | ||||||
|  |  | ||||||
|  | This release also makes the EmbeddedDocument not hashable by default. If you | ||||||
|  | use embedded documents in sets or dictionaries, you might have to override | ||||||
|  | `__hash__` and implement a hashing logic specific to your use case. See #1528 | ||||||
|  | for the reason behind this change. | ||||||
|  |  | ||||||
|  | 0.13.0 | ||||||
|  | ****** | ||||||
|  | This release adds Unicode support to the `EmailField` and changes its | ||||||
|  | structure significantly. Previously, email addresses containing Unicode | ||||||
|  | characters didn't work at all. Starting with v0.13.0, domains with Unicode | ||||||
|  | characters are supported out of the box, meaning some emails that previously | ||||||
|  | didn't pass validation now do. Make sure the rest of your application can | ||||||
|  | accept such email addresses. Additionally, if you subclassed the `EmailField` | ||||||
|  | in your application and overrode `EmailField.EMAIL_REGEX`, you will have to | ||||||
|  | adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, | ||||||
|  | and potentially `EmailField.UTF8_USER_REGEX`. | ||||||
|  |  | ||||||
|  | 0.12.0 | ||||||
|  | ****** | ||||||
|  | This release includes various fixes for the `BaseQuerySet` methods and how they | ||||||
|  | are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size | ||||||
|  | to an already-existing queryset wouldn't modify the underlying PyMongo cursor. | ||||||
|  | This has been fixed now, so you'll need to make sure that your code didn't rely | ||||||
|  | on the broken implementation. | ||||||
|  |  | ||||||
|  | Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private | ||||||
|  | `_clone_into`. If you directly used that method in your code, you'll need to | ||||||
|  | rename its occurrences. | ||||||
|  |  | ||||||
|  | 0.11.0 | ||||||
|  | ****** | ||||||
|  | This release includes a major rehaul of MongoEngine's code quality and | ||||||
|  | introduces a few breaking changes. It also touches many different parts of | ||||||
|  | the package and although all the changes have been tested and scrutinized, | ||||||
|  | you're encouraged to thoroughly test the upgrade. | ||||||
|  |  | ||||||
|  | First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | ||||||
|  | If you import or catch this exception, you'll need to rename it in your code. | ||||||
|  |  | ||||||
|  | Second breaking change drops Python v2.6 support. If you run MongoEngine on | ||||||
|  | that Python version, you'll need to upgrade it first. | ||||||
|  |  | ||||||
|  | Third breaking change drops an old backward compatibility measure where | ||||||
|  | `from mongoengine.base import ErrorClass` would work on top of | ||||||
|  | `from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. | ||||||
|  | `ValidationError`). If you import any exceptions from `mongoengine.base`, | ||||||
|  | change it to `mongoengine.errors`. | ||||||
|  |  | ||||||
|  | 0.10.8 | ||||||
|  | ****** | ||||||
|  | This version fixed an issue where specifying a MongoDB URI host would override | ||||||
|  | more information than it should. These changes are minor, but they still | ||||||
|  | subtly modify the connection logic and thus you're encouraged to test your | ||||||
|  | MongoDB connection before shipping v0.10.8 in production. | ||||||
|  |  | ||||||
|  | 0.10.7 | ||||||
|  | ****** | ||||||
|  |  | ||||||
|  | `QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use | ||||||
|  | `QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework | ||||||
|  | by default from now on. | ||||||
|  |  | ||||||
|  | 0.9.0 | ||||||
|  | ***** | ||||||
|  |  | ||||||
|  | The 0.8.7 package on pypi was corrupted.  If upgrading from 0.8.7 to 0.9.0 please follow: :: | ||||||
|  |  | ||||||
|  |     python -m pip uninstall pymongo | ||||||
|  |     python -m pip uninstall mongoengine | ||||||
|  |     python -m pip install pymongo==2.8 | ||||||
|  |     python -m pip install mongoengine | ||||||
|  |  | ||||||
| 0.8.7 | 0.8.7 | ||||||
| ***** | ***** | ||||||
|  |  | ||||||
| @@ -65,7 +153,7 @@ inherited classes like so: :: | |||||||
|  |  | ||||||
|     # 4. Remove indexes |     # 4. Remove indexes | ||||||
|     info = collection.index_information() |     info = collection.index_information() | ||||||
|     indexes_to_drop = [key for key, value in info.iteritems() |     indexes_to_drop = [key for key, value in info.items() | ||||||
|                        if '_types' in dict(value['key'])] |                        if '_types' in dict(value['key'])] | ||||||
|     for index in indexes_to_drop: |     for index in indexes_to_drop: | ||||||
|         collection.drop_index(index) |         collection.drop_index(index) | ||||||
|   | |||||||
| @@ -1,26 +1,43 @@ | |||||||
| import document | # Import submodules so that we can expose their __all__ | ||||||
| from document import * | from mongoengine import ( | ||||||
| import fields |     connection, | ||||||
| from fields import * |     document, | ||||||
| import connection |     errors, | ||||||
| from connection import * |     fields, | ||||||
| import queryset |     queryset, | ||||||
| from queryset import * |     signals, | ||||||
| import signals | ) | ||||||
| from signals import * |  | ||||||
| from errors import * |  | ||||||
| import errors |  | ||||||
| import django |  | ||||||
|  |  | ||||||
| __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + | # Import everything from each submodule so that it can be accessed via | ||||||
|            list(queryset.__all__) + signals.__all__ + list(errors.__all__)) | # mongoengine, e.g. instead of `from mongoengine.connection import connect`, | ||||||
|  | # users can simply use `from mongoengine import connect`, or even | ||||||
|  | # `from mongoengine import *` and then `connect('testdb')`. | ||||||
|  | from mongoengine.connection import *  # noqa: F401 | ||||||
|  | from mongoengine.document import *  # noqa: F401 | ||||||
|  | from mongoengine.errors import *  # noqa: F401 | ||||||
|  | from mongoengine.fields import *  # noqa: F401 | ||||||
|  | from mongoengine.queryset import *  # noqa: F401 | ||||||
|  | from mongoengine.signals import *  # noqa: F401 | ||||||
|  |  | ||||||
| VERSION = (0, 9, 0) | __all__ = ( | ||||||
|  |     list(document.__all__) | ||||||
|  |     + list(fields.__all__) | ||||||
|  |     + list(connection.__all__) | ||||||
|  |     + list(queryset.__all__) | ||||||
|  |     + list(signals.__all__) | ||||||
|  |     + list(errors.__all__) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | VERSION = (0, 23, 1) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     if isinstance(VERSION[-1], basestring): |     """Return the VERSION as a string. | ||||||
|         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] |  | ||||||
|     return '.'.join(map(str, VERSION)) |     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. | ||||||
|  |     """ | ||||||
|  |     return ".".join(map(str, VERSION)) | ||||||
|  |  | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|   | |||||||
| @@ -1,8 +1,33 @@ | |||||||
|  | # Base module is split into several files for convenience. Files inside of | ||||||
|  | # this module should import from a specific submodule (e.g. | ||||||
|  | # `from mongoengine.base.document import BaseDocument`), but all of the | ||||||
|  | # other modules should import directly from the top-level module (e.g. | ||||||
|  | # `from mongoengine.base import BaseDocument`). This approach is cleaner and | ||||||
|  | # also helps with cyclical import errors. | ||||||
| from mongoengine.base.common import * | from mongoengine.base.common import * | ||||||
| from mongoengine.base.datastructures import * | from mongoengine.base.datastructures import * | ||||||
| from mongoengine.base.document import * | from mongoengine.base.document import * | ||||||
| from mongoengine.base.fields import * | from mongoengine.base.fields import * | ||||||
| from mongoengine.base.metaclasses import * | from mongoengine.base.metaclasses import * | ||||||
|  |  | ||||||
| # Help with backwards compatibility | __all__ = ( | ||||||
| from mongoengine.errors import * |     # common | ||||||
|  |     "UPDATE_OPERATORS", | ||||||
|  |     "_document_registry", | ||||||
|  |     "get_document", | ||||||
|  |     # datastructures | ||||||
|  |     "BaseDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|  |     # document | ||||||
|  |     "BaseDocument", | ||||||
|  |     # fields | ||||||
|  |     "BaseField", | ||||||
|  |     "ComplexBaseField", | ||||||
|  |     "ObjectIdField", | ||||||
|  |     "GeoJsonBaseField", | ||||||
|  |     # metaclasses | ||||||
|  |     "DocumentMetaclass", | ||||||
|  |     "TopLevelDocumentMetaclass", | ||||||
|  | ) | ||||||
|   | |||||||
| @@ -1,26 +1,62 @@ | |||||||
| from mongoengine.errors import NotRegistered | from mongoengine.errors import NotRegistered | ||||||
|  |  | ||||||
| __all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') | __all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | UPDATE_OPERATORS = { | ||||||
|  |     "set", | ||||||
|  |     "unset", | ||||||
|  |     "inc", | ||||||
|  |     "dec", | ||||||
|  |     "mul", | ||||||
|  |     "pop", | ||||||
|  |     "push", | ||||||
|  |     "push_all", | ||||||
|  |     "pull", | ||||||
|  |     "pull_all", | ||||||
|  |     "add_to_set", | ||||||
|  |     "set_on_insert", | ||||||
|  |     "min", | ||||||
|  |     "max", | ||||||
|  |     "rename", | ||||||
|  | } | ||||||
|  |  | ||||||
| ALLOW_INHERITANCE = False |  | ||||||
|  |  | ||||||
| _document_registry = {} | _document_registry = {} | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_document(name): | def get_document(name): | ||||||
|  |     """Get a registered Document class by name.""" | ||||||
|     doc = _document_registry.get(name, None) |     doc = _document_registry.get(name, None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         # Possible old style name |         # Possible old style name | ||||||
|         single_end = name.split('.')[-1] |         single_end = name.split(".")[-1] | ||||||
|         compound_end = '.%s' % single_end |         compound_end = ".%s" % single_end | ||||||
|         possible_match = [k for k in _document_registry.keys() |         possible_match = [ | ||||||
|                           if k.endswith(compound_end) or k == single_end] |             k for k in _document_registry if k.endswith(compound_end) or k == single_end | ||||||
|  |         ] | ||||||
|         if len(possible_match) == 1: |         if len(possible_match) == 1: | ||||||
|             doc = _document_registry.get(possible_match.pop(), None) |             doc = _document_registry.get(possible_match.pop(), None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         raise NotRegistered(""" |         raise NotRegistered( | ||||||
|  |             """ | ||||||
|             `%s` has not been registered in the document registry. |             `%s` has not been registered in the document registry. | ||||||
|             Importing the document class automatically registers it, has it |             Importing the document class automatically registers it, has it | ||||||
|             been imported? |             been imported? | ||||||
|         """.strip() % name) |         """.strip() | ||||||
|  |             % name | ||||||
|  |         ) | ||||||
|     return doc |     return doc | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_documents_by_db(connection_alias, default_connection_alias): | ||||||
|  |     """Get all registered Documents class attached to a given database""" | ||||||
|  |  | ||||||
|  |     def get_doc_alias(doc_cls): | ||||||
|  |         return doc_cls._meta.get("db_alias", default_connection_alias) | ||||||
|  |  | ||||||
|  |     return [ | ||||||
|  |         doc_cls | ||||||
|  |         for doc_cls in _document_registry.values() | ||||||
|  |         if get_doc_alias(doc_cls) == connection_alias | ||||||
|  |     ] | ||||||
|   | |||||||
| @@ -1,60 +1,81 @@ | |||||||
| import weakref | import weakref | ||||||
| import functools |  | ||||||
| import itertools | from bson import DBRef | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||||
|  |  | ||||||
| __all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList") | __all__ = ( | ||||||
|  |     "BaseDict", | ||||||
|  |     "StrictDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorator that ensures _mark_as_changed method gets called.""" | ||||||
|  |  | ||||||
|  |     def wrapper(self, *args, **kwargs): | ||||||
|  |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, *args, **kwargs) | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return result | ||||||
|  |  | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_key_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorator that ensures _mark_as_changed method gets called with the key argument""" | ||||||
|  |  | ||||||
|  |     def wrapper(self, key, *args, **kwargs): | ||||||
|  |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, key, *args, **kwargs) | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return result | ||||||
|  |  | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseDict(dict): | class BaseDict(dict): | ||||||
|     """A special dict so we can watch any changes""" |     """A special dict so we can watch any changes.""" | ||||||
|  |  | ||||||
|     _dereferenced = False |     _dereferenced = False | ||||||
|     _instance = None |     _instance = None | ||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, dict_items, instance, name): |     def __init__(self, dict_items, instance, name): | ||||||
|         Document = _import_class('Document') |         BaseDocument = _import_class("BaseDocument") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |  | ||||||
|  |  | ||||||
|         if isinstance(instance, (Document, EmbeddedDocument)): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         return super(BaseDict, self).__init__(dict_items) |         super().__init__(dict_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key, *args, **kwargs): |     def get(self, key, default=None): | ||||||
|         value = super(BaseDict, self).__getitem__(key) |         # get does not use __getitem__ by default so we must override it as well | ||||||
|  |         try: | ||||||
|  |             return self.__getitem__(key) | ||||||
|  |         except KeyError: | ||||||
|  |             return default | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |     def __getitem__(self, key): | ||||||
|  |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, f"{self._name}.{key}") | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, f"{self._name}.{key}") | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__setitem__(key, value) |  | ||||||
|  |  | ||||||
|     def __delete__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __delitem__(self, key, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__delitem__(key) |  | ||||||
|  |  | ||||||
|     def __delattr__(self, key, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__delattr__(key) |  | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
|         self._dereferenced = False |         self._dereferenced = False | ||||||
| @@ -64,88 +85,66 @@ class BaseDict(dict): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def clear(self, *args, **kwargs): |     __setitem__ = mark_key_as_changed_wrapper(dict.__setitem__) | ||||||
|         self._mark_as_changed() |     __delattr__ = mark_key_as_changed_wrapper(dict.__delattr__) | ||||||
|         return super(BaseDict, self).clear(*args, **kwargs) |     __delitem__ = mark_key_as_changed_wrapper(dict.__delitem__) | ||||||
|  |     pop = mark_as_changed_wrapper(dict.pop) | ||||||
|     def pop(self, *args, **kwargs): |     clear = mark_as_changed_wrapper(dict.clear) | ||||||
|         self._mark_as_changed() |     update = mark_as_changed_wrapper(dict.update) | ||||||
|         return super(BaseDict, self).pop(*args, **kwargs) |     popitem = mark_as_changed_wrapper(dict.popitem) | ||||||
|  |     setdefault = mark_as_changed_wrapper(dict.setdefault) | ||||||
|     def popitem(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).popitem(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def setdefault(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).setdefault(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def update(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).update(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|             if key: |             if key: | ||||||
|                 self._instance._mark_as_changed('%s.%s' % (self._name, key)) |                 self._instance._mark_as_changed(f"{self._name}.{key}") | ||||||
|             else: |             else: | ||||||
|                 self._instance._mark_as_changed(self._name) |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseList(list): | class BaseList(list): | ||||||
|     """A special list so we can watch any changes |     """A special list so we can watch any changes.""" | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     _dereferenced = False |     _dereferenced = False | ||||||
|     _instance = None |     _instance = None | ||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         Document = _import_class('Document') |         BaseDocument = _import_class("BaseDocument") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |  | ||||||
|  |  | ||||||
|         if isinstance(instance, (Document, EmbeddedDocument)): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseList, self).__init__(list_items) |         super().__init__(list_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key, *args, **kwargs): |     def __getitem__(self, key): | ||||||
|         value = super(BaseList, self).__getitem__(key) |         # change index to positive value because MongoDB does not support negative one | ||||||
|  |         if isinstance(key, int) and key < 0: | ||||||
|  |             key = len(self) + key | ||||||
|  |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         if isinstance(key, slice): | ||||||
|  |             # When receiving a slice operator, we don't convert the structure and bind | ||||||
|  |             # to parent's instance. This is buggy for now but would require more work to be handled properly | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             # Replace dict by BaseDict | ||||||
|             super(BaseList, self).__setitem__(key, value) |             value = BaseDict(value, None, f"{self._name}.{key}") | ||||||
|  |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             # Replace list by BaseList | ||||||
|             super(BaseList, self).__setitem__(key, value) |             value = BaseList(value, None, f"{self._name}.{key}") | ||||||
|  |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value, *args, **kwargs): |     def __iter__(self): | ||||||
|         if isinstance(key, slice): |         yield from super().__iter__() | ||||||
|             self._mark_as_changed() |  | ||||||
|         else: |  | ||||||
|             self._mark_as_changed(key) |  | ||||||
|         return super(BaseList, self).__setitem__(key, value) |  | ||||||
|  |  | ||||||
|     def __delitem__(self, key, *args, **kwargs): |  | ||||||
|         if isinstance(key, slice): |  | ||||||
|             self._mark_as_changed() |  | ||||||
|         else: |  | ||||||
|             self._mark_as_changed(key) |  | ||||||
|         return super(BaseList, self).__delitem__(key) |  | ||||||
|  |  | ||||||
|     def __setslice__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).__setslice__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __delslice__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).__delslice__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
| @@ -156,66 +155,67 @@ class BaseList(list): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def append(self, *args, **kwargs): |     def __setitem__(self, key, value): | ||||||
|         self._mark_as_changed() |         changed_key = key | ||||||
|         return super(BaseList, self).append(*args, **kwargs) |         if isinstance(key, slice): | ||||||
|  |             # In case of slice, we don't bother to identify the exact elements being updated | ||||||
|  |             # instead, we simply marks the whole list as changed | ||||||
|  |             changed_key = None | ||||||
|  |  | ||||||
|     def extend(self, *args, **kwargs): |         result = super().__setitem__(key, value) | ||||||
|         self._mark_as_changed() |         self._mark_as_changed(changed_key) | ||||||
|         return super(BaseList, self).extend(*args, **kwargs) |         return result | ||||||
|  |  | ||||||
|     def insert(self, *args, **kwargs): |     append = mark_as_changed_wrapper(list.append) | ||||||
|         self._mark_as_changed() |     extend = mark_as_changed_wrapper(list.extend) | ||||||
|         return super(BaseList, self).insert(*args, **kwargs) |     insert = mark_as_changed_wrapper(list.insert) | ||||||
|  |     pop = mark_as_changed_wrapper(list.pop) | ||||||
|     def pop(self, *args, **kwargs): |     remove = mark_as_changed_wrapper(list.remove) | ||||||
|         self._mark_as_changed() |     reverse = mark_as_changed_wrapper(list.reverse) | ||||||
|         return super(BaseList, self).pop(*args, **kwargs) |     sort = mark_as_changed_wrapper(list.sort) | ||||||
|  |     __delitem__ = mark_as_changed_wrapper(list.__delitem__) | ||||||
|     def remove(self, *args, **kwargs): |     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||||
|         self._mark_as_changed() |     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||||
|         return super(BaseList, self).remove(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def reverse(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).reverse(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def sort(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).sort(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|             if key: |             if key is not None: | ||||||
|                 self._instance._mark_as_changed('%s.%s' % (self._name, key)) |                 self._instance._mark_as_changed(f"{self._name}.{key % len(self)}") | ||||||
|             else: |             else: | ||||||
|                 self._instance._mark_as_changed(self._name) |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocumentList(BaseList): | class EmbeddedDocumentList(BaseList): | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def __match_all(cls, i, kwargs): |  | ||||||
|         items = kwargs.items() |  | ||||||
|         return all([ |  | ||||||
|             getattr(i, k) == v or str(getattr(i, k)) == v for k, v in items |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def __only_matches(cls, obj, kwargs): |  | ||||||
|         if not kwargs: |  | ||||||
|             return obj |  | ||||||
|         return filter(lambda i: cls.__match_all(i, kwargs), obj) |  | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) |         super().__init__(list_items, instance, name) | ||||||
|         self._instance = instance |         self._instance = instance | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def __match_all(cls, embedded_doc, kwargs): | ||||||
|  |         """Return True if a given embedded doc matches all the filter | ||||||
|  |         kwargs. If it doesn't return False. | ||||||
|  |         """ | ||||||
|  |         for key, expected_value in kwargs.items(): | ||||||
|  |             doc_val = getattr(embedded_doc, key) | ||||||
|  |             if doc_val != expected_value and str(doc_val) != expected_value: | ||||||
|  |                 return False | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def __only_matches(cls, embedded_docs, kwargs): | ||||||
|  |         """Return embedded docs that match the filter kwargs.""" | ||||||
|  |         if not kwargs: | ||||||
|  |             return embedded_docs | ||||||
|  |         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||||
|  |  | ||||||
|     def filter(self, **kwargs): |     def filter(self, **kwargs): | ||||||
|         """ |         """ | ||||||
|         Filters the list by only including embedded documents with the |         Filters the list by only including embedded documents with the | ||||||
|         given keyword arguments. |         given keyword arguments. | ||||||
|  |  | ||||||
|  |         This method only supports simple comparison (e.g. .filter(name='John Doe')) | ||||||
|  |         and does not support operators like __gte, __lte, __icontains like queryset.filter does | ||||||
|  |  | ||||||
|         :param kwargs: The keyword arguments corresponding to the fields to |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|          filter on. *Multiple arguments are treated as if they are ANDed |          filter on. *Multiple arguments are treated as if they are ANDed | ||||||
|          together.* |          together.* | ||||||
| @@ -270,9 +270,7 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         """ |         """ | ||||||
|         values = self.__only_matches(self, kwargs) |         values = self.__only_matches(self, kwargs) | ||||||
|         if len(values) == 0: |         if len(values) == 0: | ||||||
|             raise DoesNotExist( |             raise DoesNotExist("%s matching query does not exist." % self._name) | ||||||
|                 "%s matching query does not exist." % self._name |  | ||||||
|             ) |  | ||||||
|         elif len(values) > 1: |         elif len(values) > 1: | ||||||
|             raise MultipleObjectsReturned( |             raise MultipleObjectsReturned( | ||||||
|                 "%d items returned, instead of 1" % len(values) |                 "%d items returned, instead of 1" % len(values) | ||||||
| @@ -281,19 +279,19 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         return values[0] |         return values[0] | ||||||
|  |  | ||||||
|     def first(self): |     def first(self): | ||||||
|         """ |         """Return the first embedded document in the list, or ``None`` | ||||||
|         Returns the first embedded document in the list, or ``None`` if empty. |         if empty. | ||||||
|         """ |         """ | ||||||
|         if len(self) > 0: |         if len(self) > 0: | ||||||
|             return self[0] |             return self[0] | ||||||
|  |  | ||||||
|     def create(self, **values): |     def create(self, **values): | ||||||
|         """ |         """ | ||||||
|         Creates a new embedded document and saves it to the database. |         Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. | ||||||
|  |  | ||||||
|         .. note:: |         .. note:: | ||||||
|             The embedded document changes are not automatically saved |             the instance of the EmbeddedDocument is not automatically saved to the database. | ||||||
|             to the database after calling this method. |             You still need to call .save() on the parent Document. | ||||||
|  |  | ||||||
|         :param values: A dictionary of values for the embedded document. |         :param values: A dictionary of values for the embedded document. | ||||||
|         :return: The new embedded document instance. |         :return: The new embedded document instance. | ||||||
| @@ -333,7 +331,8 @@ class EmbeddedDocumentList(BaseList): | |||||||
|  |  | ||||||
|     def update(self, **update): |     def update(self, **update): | ||||||
|         """ |         """ | ||||||
|         Updates the embedded documents with the given update values. |         Updates the embedded documents with the given replacement values. This | ||||||
|  |         function does not support mongoDB update operators such as ``inc__``. | ||||||
|  |  | ||||||
|         .. note:: |         .. note:: | ||||||
|             The embedded document changes are not automatically saved |             The embedded document changes are not automatically saved | ||||||
| @@ -353,29 +352,35 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         return len(values) |         return len(values) | ||||||
|  |  | ||||||
|  |  | ||||||
| class StrictDict(object): | class StrictDict: | ||||||
|     __slots__ = () |     __slots__ = () | ||||||
|     _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) |     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||||
|     _classes = {} |     _classes = {} | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, **kwargs): | ||||||
|         for k,v in kwargs.iteritems(): |         for k, v in kwargs.items(): | ||||||
|             setattr(self, k, v) |             setattr(self, k, v) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
|         key = '_reserved_' + key if key in self._special_fields else key |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|         try: |         try: | ||||||
|             return getattr(self, key) |             return getattr(self, key) | ||||||
|         except AttributeError: |         except AttributeError: | ||||||
|             raise KeyError(key) |             raise KeyError(key) | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value): |     def __setitem__(self, key, value): | ||||||
|         key = '_reserved_' + key if key in self._special_fields else key |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|         return setattr(self, key, value) |         return setattr(self, key, value) | ||||||
|  |  | ||||||
|     def __contains__(self, key): |     def __contains__(self, key): | ||||||
|         return hasattr(self, key) |         return hasattr(self, key) | ||||||
|  |  | ||||||
|     def get(self, key, default=None): |     def get(self, key, default=None): | ||||||
|         try: |         try: | ||||||
|             return self[key] |             return self[key] | ||||||
|         except KeyError: |         except KeyError: | ||||||
|             return default |             return default | ||||||
|  |  | ||||||
|     def pop(self, key, default=None): |     def pop(self, key, default=None): | ||||||
|         v = self.get(key, default) |         v = self.get(key, default) | ||||||
|         try: |         try: | ||||||
| @@ -383,67 +388,86 @@ class StrictDict(object): | |||||||
|         except AttributeError: |         except AttributeError: | ||||||
|             pass |             pass | ||||||
|         return v |         return v | ||||||
|  |  | ||||||
|     def iteritems(self): |     def iteritems(self): | ||||||
|         for key in self: |         for key in self: | ||||||
|             yield key, self[key] |             yield key, self[key] | ||||||
|  |  | ||||||
|     def items(self): |     def items(self): | ||||||
|         return [(k, self[k]) for k in iter(self)] |         return [(k, self[k]) for k in iter(self)] | ||||||
|  |  | ||||||
|  |     def iterkeys(self): | ||||||
|  |         return iter(self) | ||||||
|  |  | ||||||
|     def keys(self): |     def keys(self): | ||||||
|         return list(iter(self)) |         return list(iter(self)) | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         return (key for key in self.__slots__ if hasattr(self, key)) |         return (key for key in self.__slots__ if hasattr(self, key)) | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(list(self.iteritems())) |         return len(list(self.items())) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
|         return self.items() == other.items() |         return list(self.items()) == list(other.items()) | ||||||
|     def __neq__(self, other): |  | ||||||
|         return self.items() != other.items() |     def __ne__(self, other): | ||||||
|  |         return not (self == other) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create(cls, allowed_keys): |     def create(cls, allowed_keys): | ||||||
|         allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) |         allowed_keys_tuple = tuple( | ||||||
|  |             ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys | ||||||
|  |         ) | ||||||
|         allowed_keys = frozenset(allowed_keys_tuple) |         allowed_keys = frozenset(allowed_keys_tuple) | ||||||
|         if allowed_keys not in cls._classes: |         if allowed_keys not in cls._classes: | ||||||
|  |  | ||||||
|             class SpecificStrictDict(cls): |             class SpecificStrictDict(cls): | ||||||
|                 __slots__ = allowed_keys_tuple |                 __slots__ = allowed_keys_tuple | ||||||
|  |  | ||||||
|                 def __repr__(self): |                 def __repr__(self): | ||||||
|                     return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems()) |                     return "{%s}" % ", ".join( | ||||||
|  |                         f'"{k!s}": {v!r}' for k, v in self.items() | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|             cls._classes[allowed_keys] = SpecificStrictDict |             cls._classes[allowed_keys] = SpecificStrictDict | ||||||
|         return cls._classes[allowed_keys] |         return cls._classes[allowed_keys] | ||||||
|  |  | ||||||
|  |  | ||||||
| class SemiStrictDict(StrictDict): | class LazyReference(DBRef): | ||||||
|     __slots__ = ('_extras') |     __slots__ = ("_cached_doc", "passthrough", "document_type") | ||||||
|     _classes = {} |  | ||||||
|     def __getattr__(self, attr): |  | ||||||
|         try: |  | ||||||
|             super(SemiStrictDict, self).__getattr__(attr) |  | ||||||
|         except AttributeError: |  | ||||||
|             try: |  | ||||||
|                 return self.__getattribute__('_extras')[attr] |  | ||||||
|             except KeyError as e: |  | ||||||
|                 raise AttributeError(e) |  | ||||||
|     def __setattr__(self, attr, value): |  | ||||||
|         try: |  | ||||||
|             super(SemiStrictDict, self).__setattr__(attr, value) |  | ||||||
|         except AttributeError: |  | ||||||
|             try: |  | ||||||
|                 self._extras[attr] = value |  | ||||||
|             except AttributeError: |  | ||||||
|                 self._extras = {attr: value} |  | ||||||
|  |  | ||||||
|     def __delattr__(self, attr): |     def fetch(self, force=False): | ||||||
|         try: |         if not self._cached_doc or force: | ||||||
|             super(SemiStrictDict, self).__delattr__(attr) |             self._cached_doc = self.document_type.objects.get(pk=self.pk) | ||||||
|         except AttributeError: |             if not self._cached_doc: | ||||||
|             try: |                 raise DoesNotExist("Trying to dereference unknown document %s" % (self)) | ||||||
|                 del self._extras[attr] |         return self._cached_doc | ||||||
|             except KeyError as e: |  | ||||||
|                 raise AttributeError(e) |  | ||||||
|  |  | ||||||
|     def __iter__(self): |     @property | ||||||
|  |     def pk(self): | ||||||
|  |         return self.id | ||||||
|  |  | ||||||
|  |     def __init__(self, document_type, pk, cached_doc=None, passthrough=False): | ||||||
|  |         self.document_type = document_type | ||||||
|  |         self._cached_doc = cached_doc | ||||||
|  |         self.passthrough = passthrough | ||||||
|  |         super().__init__(self.document_type._get_collection_name(), pk) | ||||||
|  |  | ||||||
|  |     def __getitem__(self, name): | ||||||
|  |         if not self.passthrough: | ||||||
|  |             raise KeyError() | ||||||
|  |         document = self.fetch() | ||||||
|  |         return document[name] | ||||||
|  |  | ||||||
|  |     def __getattr__(self, name): | ||||||
|  |         if not object.__getattribute__(self, "passthrough"): | ||||||
|  |             raise AttributeError() | ||||||
|  |         document = self.fetch() | ||||||
|         try: |         try: | ||||||
|             extras_iter = iter(self.__getattribute__('_extras')) |             return document[name] | ||||||
|         except AttributeError: |         except KeyError: | ||||||
|             extras_iter = () |             raise AttributeError() | ||||||
|         return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return f"<LazyReference({self.document_type}, {self.pk!r})>" | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,31 +1,27 @@ | |||||||
| import operator | import operator | ||||||
| import warnings |  | ||||||
| import weakref | import weakref | ||||||
|  |  | ||||||
| from bson import DBRef, ObjectId, SON |  | ||||||
| import pymongo | import pymongo | ||||||
|  | from bson import SON, DBRef, ObjectId | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.base.common import UPDATE_OPERATORS | ||||||
| from mongoengine.errors import ValidationError |  | ||||||
|  |  | ||||||
| from mongoengine.base.common import ALLOW_INHERITANCE |  | ||||||
| from mongoengine.base.datastructures import ( | from mongoengine.base.datastructures import ( | ||||||
|     BaseDict, BaseList, EmbeddedDocumentList |     BaseDict, | ||||||
|  |     BaseList, | ||||||
|  |     EmbeddedDocumentList, | ||||||
| ) | ) | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.errors import DeprecatedError, ValidationError | ||||||
|  |  | ||||||
| __all__ = ("BaseField", "ComplexBaseField", | __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||||
|            "ObjectIdField", "GeoJsonBaseField") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseField(object): | class BaseField: | ||||||
|  |  | ||||||
|     """A base class for fields in a MongoDB document. Instances of this class |     """A base class for fields in a MongoDB document. Instances of this class | ||||||
|     may be added to subclasses of `Document` to define a document's schema. |     may be added to subclasses of `Document` to define a document's schema. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.5 - added verbose and help text |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     name = None |     name = None  # set in TopLevelDocumentMetaclass | ||||||
|     _geo_index = False |     _geo_index = False | ||||||
|     _auto_gen = False  # Call `generate` to generate a value |     _auto_gen = False  # Call `generate` to generate a value | ||||||
|     _auto_dereference = True |     _auto_dereference = True | ||||||
| @@ -36,14 +32,23 @@ class BaseField(object): | |||||||
|     creation_counter = 0 |     creation_counter = 0 | ||||||
|     auto_creation_counter = -1 |     auto_creation_counter = -1 | ||||||
|  |  | ||||||
|     def __init__(self, db_field=None, name=None, required=False, default=None, |     def __init__( | ||||||
|                  unique=False, unique_with=None, primary_key=False, |         self, | ||||||
|                  validation=None, choices=None, verbose_name=None, |         db_field=None, | ||||||
|                  help_text=None, null=False, sparse=False): |         required=False, | ||||||
|  |         default=None, | ||||||
|  |         unique=False, | ||||||
|  |         unique_with=None, | ||||||
|  |         primary_key=False, | ||||||
|  |         validation=None, | ||||||
|  |         choices=None, | ||||||
|  |         null=False, | ||||||
|  |         sparse=False, | ||||||
|  |         **kwargs, | ||||||
|  |     ): | ||||||
|         """ |         """ | ||||||
|         :param db_field: The database field to store this field in |         :param db_field: The database field to store this field in | ||||||
|             (defaults to the name of the field) |             (defaults to the name of the field) | ||||||
|         :param name: Depreciated - use db_field |  | ||||||
|         :param required: If the field is required. Whether it has to have a |         :param required: If the field is required. Whether it has to have a | ||||||
|             value or not. Defaults to False. |             value or not. Defaults to False. | ||||||
|         :param default: (optional) The default value for this field if no value |         :param default: (optional) The default value for this field if no value | ||||||
| @@ -54,24 +59,21 @@ class BaseField(object): | |||||||
|             unique with. |             unique with. | ||||||
|         :param primary_key: Mark this field as the primary key. Defaults to False. |         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||||
|         :param validation: (optional) A callable to validate the value of the |         :param validation: (optional) A callable to validate the value of the | ||||||
|             field.  Generally this is deprecated in favour of the |             field.  The callable takes the value as parameter and should raise | ||||||
|             `FIELD.validate` method |             a ValidationError if validation fails | ||||||
|         :param choices: (optional) The valid choices |         :param choices: (optional) The valid choices | ||||||
|         :param verbose_name: (optional)  The verbose name for the field. |         :param null: (optional) If the field value can be null. If no and there is a default value | ||||||
|             Designed to be human readable and is often used when generating |  | ||||||
|             model forms from the document model. |  | ||||||
|         :param help_text: (optional) The help text for this field and is often |  | ||||||
|             used when generating model forms from the document model. |  | ||||||
|         :param null: (optional) Is the field value can be null. If no and there is a default value |  | ||||||
|             then the default value is set |             then the default value is set | ||||||
|         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` |         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||||
|             means that uniqueness won't be enforced for `None` values |             means that uniqueness won't be enforced for `None` values | ||||||
|  |         :param **kwargs: (optional) Arbitrary indirection-free metadata for | ||||||
|  |             this field can be supplied as additional keyword arguments and | ||||||
|  |             accessed as attributes of the field. Must not conflict with any | ||||||
|  |             existing attributes. Common metadata includes `verbose_name` and | ||||||
|  |             `help_text`. | ||||||
|         """ |         """ | ||||||
|         self.db_field = (db_field or name) if not primary_key else '_id' |         self.db_field = db_field if not primary_key else "_id" | ||||||
|  |  | ||||||
|         if name: |  | ||||||
|             msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" |  | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         self.required = required or primary_key |         self.required = required or primary_key | ||||||
|         self.default = default |         self.default = default | ||||||
|         self.unique = bool(unique or unique_with) |         self.unique = bool(unique or unique_with) | ||||||
| @@ -79,13 +81,39 @@ class BaseField(object): | |||||||
|         self.primary_key = primary_key |         self.primary_key = primary_key | ||||||
|         self.validation = validation |         self.validation = validation | ||||||
|         self.choices = choices |         self.choices = choices | ||||||
|         self.verbose_name = verbose_name |  | ||||||
|         self.help_text = help_text |  | ||||||
|         self.null = null |         self.null = null | ||||||
|         self.sparse = sparse |         self.sparse = sparse | ||||||
|  |         self._owner_document = None | ||||||
|  |  | ||||||
|  |         # Make sure db_field is a string (if it's explicitly defined). | ||||||
|  |         if self.db_field is not None and not isinstance(self.db_field, str): | ||||||
|  |             raise TypeError("db_field should be a string.") | ||||||
|  |  | ||||||
|  |         # Make sure db_field doesn't contain any forbidden characters. | ||||||
|  |         if isinstance(self.db_field, str) and ( | ||||||
|  |             "." in self.db_field | ||||||
|  |             or "\0" in self.db_field | ||||||
|  |             or self.db_field.startswith("$") | ||||||
|  |         ): | ||||||
|  |             raise ValueError( | ||||||
|  |                 'field names cannot contain dots (".") or null characters ' | ||||||
|  |                 '("\\0"), and they must not start with a dollar sign ("$").' | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Detect and report conflicts between metadata and base properties. | ||||||
|  |         conflicts = set(dir(self)) & set(kwargs) | ||||||
|  |         if conflicts: | ||||||
|  |             raise TypeError( | ||||||
|  |                 "%s already has attribute(s): %s" | ||||||
|  |                 % (self.__class__.__name__, ", ".join(conflicts)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Assign metadata to the instance | ||||||
|  |         # This efficient method is available because no __slots__ are defined. | ||||||
|  |         self.__dict__.update(kwargs) | ||||||
|  |  | ||||||
|         # Adjust the appropriate creation counter, and save our local copy. |         # Adjust the appropriate creation counter, and save our local copy. | ||||||
|         if self.db_field == '_id': |         if self.db_field == "_id": | ||||||
|             self.creation_counter = BaseField.auto_creation_counter |             self.creation_counter = BaseField.auto_creation_counter | ||||||
|             BaseField.auto_creation_counter -= 1 |             BaseField.auto_creation_counter -= 1 | ||||||
|         else: |         else: | ||||||
| @@ -93,8 +121,7 @@ class BaseField(object): | |||||||
|             BaseField.creation_counter += 1 |             BaseField.creation_counter += 1 | ||||||
|  |  | ||||||
|     def __get__(self, instance, owner): |     def __get__(self, instance, owner): | ||||||
|         """Descriptor for retrieving a value from a field in a document. |         """Descriptor for retrieving a value from a field in a document.""" | ||||||
|         """ |  | ||||||
|         if instance is None: |         if instance is None: | ||||||
|             # Document class being used rather than a document object |             # Document class being used rather than a document object | ||||||
|             return self |             return self | ||||||
| @@ -103,11 +130,9 @@ class BaseField(object): | |||||||
|         return instance._data.get(self.name) |         return instance._data.get(self.name) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         """Descriptor for assigning a value to a field in a document. |         """Descriptor for assigning a value to a field in a document.""" | ||||||
|         """ |         # If setting to None and there is a default value provided for this | ||||||
|  |         # field, then set the value to the default value. | ||||||
|         # If setting to None and theres a default |  | ||||||
|         # Then set the value to the default value |  | ||||||
|         if value is None: |         if value is None: | ||||||
|             if self.null: |             if self.null: | ||||||
|                 value = None |                 value = None | ||||||
| @@ -118,120 +143,182 @@ class BaseField(object): | |||||||
|  |  | ||||||
|         if instance._initialised: |         if instance._initialised: | ||||||
|             try: |             try: | ||||||
|                 if (self.name not in instance._data or |                 value_has_changed = ( | ||||||
|                         instance._data[self.name] != value): |                     self.name not in instance._data | ||||||
|  |                     or instance._data[self.name] != value | ||||||
|  |                 ) | ||||||
|  |                 if value_has_changed: | ||||||
|                     instance._mark_as_changed(self.name) |                     instance._mark_as_changed(self.name) | ||||||
|             except: |             except Exception: | ||||||
|                 # Values cant be compared eg: naive and tz datetimes |                 # Some values can't be compared and throw an error when we | ||||||
|                 # So mark it as changed |                 # attempt to do so (e.g. tz-naive and tz-aware datetimes). | ||||||
|  |                 # Mark the field as changed in such cases. | ||||||
|                 instance._mark_as_changed(self.name) |                 instance._mark_as_changed(self.name) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument): |         if isinstance(value, EmbeddedDocument): | ||||||
|             value._instance = weakref.proxy(instance) |             value._instance = weakref.proxy(instance) | ||||||
|  |         elif isinstance(value, (list, tuple)): | ||||||
|  |             for v in value: | ||||||
|  |                 if isinstance(v, EmbeddedDocument): | ||||||
|  |                     v._instance = weakref.proxy(instance) | ||||||
|  |  | ||||||
|         instance._data[self.name] = value |         instance._data[self.name] = value | ||||||
|  |  | ||||||
|     def error(self, message="", errors=None, field_name=None): |     def error(self, message="", errors=None, field_name=None): | ||||||
|         """Raises a ValidationError. |         """Raise a ValidationError.""" | ||||||
|         """ |  | ||||||
|         field_name = field_name if field_name else self.name |         field_name = field_name if field_name else self.name | ||||||
|         raise ValidationError(message, errors=errors, field_name=field_name) |         raise ValidationError(message, errors=errors, field_name=field_name) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         """Convert a MongoDB-compatible type to a Python type. |         """Convert a MongoDB-compatible type to a Python type.""" | ||||||
|         """ |  | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         """Convert a Python type to a MongoDB-compatible type. |         """Convert a Python type to a MongoDB-compatible type.""" | ||||||
|         """ |  | ||||||
|         return self.to_python(value) |         return self.to_python(value) | ||||||
|  |  | ||||||
|  |     def _to_mongo_safe_call(self, value, use_db_field=True, fields=None): | ||||||
|  |         """Helper method to call to_mongo with proper inputs.""" | ||||||
|  |         f_inputs = self.to_mongo.__code__.co_varnames | ||||||
|  |         ex_vars = {} | ||||||
|  |         if "fields" in f_inputs: | ||||||
|  |             ex_vars["fields"] = fields | ||||||
|  |  | ||||||
|  |         if "use_db_field" in f_inputs: | ||||||
|  |             ex_vars["use_db_field"] = use_db_field | ||||||
|  |  | ||||||
|  |         return self.to_mongo(value, **ex_vars) | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         """Prepare a value that is being used in a query for PyMongo. |         """Prepare a value that is being used in a query for PyMongo.""" | ||||||
|         """ |         if op in UPDATE_OPERATORS: | ||||||
|  |             self.validate(value) | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def validate(self, value, clean=True): |     def validate(self, value, clean=True): | ||||||
|         """Perform validation on a value. |         """Perform validation on a value.""" | ||||||
|         """ |  | ||||||
|         pass |         pass | ||||||
|  |  | ||||||
|     def _validate(self, value, **kwargs): |     def _validate_choices(self, value): | ||||||
|         Document = _import_class('Document') |         Document = _import_class("Document") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |  | ||||||
|  |         choice_list = self.choices | ||||||
|  |         if isinstance(next(iter(choice_list)), (list, tuple)): | ||||||
|  |             # next(iter) is useful for sets | ||||||
|  |             choice_list = [k for k, _ in choice_list] | ||||||
|  |  | ||||||
|  |         # Choices which are other types of Documents | ||||||
|  |         if isinstance(value, (Document, EmbeddedDocument)): | ||||||
|  |             if not any(isinstance(value, c) for c in choice_list): | ||||||
|  |                 self.error("Value must be an instance of %s" % (choice_list)) | ||||||
|  |         # Choices which are types other than Documents | ||||||
|  |         else: | ||||||
|  |             values = value if isinstance(value, (list, tuple)) else [value] | ||||||
|  |             if len(set(values) - set(choice_list)): | ||||||
|  |                 self.error("Value must be one of %s" % str(choice_list)) | ||||||
|  |  | ||||||
|  |     def _validate(self, value, **kwargs): | ||||||
|         # Check the Choices Constraint |         # Check the Choices Constraint | ||||||
|         if self.choices: |         if self.choices: | ||||||
|  |             self._validate_choices(value) | ||||||
|             choice_list = self.choices |  | ||||||
|             if isinstance(self.choices[0], (list, tuple)): |  | ||||||
|                 choice_list = [k for k, v in self.choices] |  | ||||||
|  |  | ||||||
|             # Choices which are other types of Documents |  | ||||||
|             if isinstance(value, (Document, EmbeddedDocument)): |  | ||||||
|                 if not any(isinstance(value, c) for c in choice_list): |  | ||||||
|                     self.error( |  | ||||||
|                         'Value must be instance of %s' % unicode(choice_list) |  | ||||||
|                     ) |  | ||||||
|             # Choices which are types other than Documents |  | ||||||
|             elif value not in choice_list: |  | ||||||
|                 self.error('Value must be one of %s' % unicode(choice_list)) |  | ||||||
|  |  | ||||||
|         # check validation argument |         # check validation argument | ||||||
|         if self.validation is not None: |         if self.validation is not None: | ||||||
|             if callable(self.validation): |             if callable(self.validation): | ||||||
|                 if not self.validation(value): |                 try: | ||||||
|                     self.error('Value does not match custom validation method') |                     # breaking change of 0.18 | ||||||
|  |                     # Get rid of True/False-type return for the validation method | ||||||
|  |                     # in favor of having validation raising a ValidationError | ||||||
|  |                     ret = self.validation(value) | ||||||
|  |                     if ret is not None: | ||||||
|  |                         raise DeprecatedError( | ||||||
|  |                             "validation argument for `%s` must not return anything, " | ||||||
|  |                             "it should raise a ValidationError if validation fails" | ||||||
|  |                             % self.name | ||||||
|  |                         ) | ||||||
|  |                 except ValidationError as ex: | ||||||
|  |                     self.error(str(ex)) | ||||||
|             else: |             else: | ||||||
|                 raise ValueError('validation argument for "%s" must be a ' |                 raise ValueError( | ||||||
|                                  'callable.' % self.name) |                     'validation argument for `"%s"` must be a ' "callable." % self.name | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         self.validate(value, **kwargs) |         self.validate(value, **kwargs) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def owner_document(self): | ||||||
|  |         return self._owner_document | ||||||
|  |  | ||||||
|  |     def _set_owner_document(self, owner_document): | ||||||
|  |         self._owner_document = owner_document | ||||||
|  |  | ||||||
|  |     @owner_document.setter | ||||||
|  |     def owner_document(self, owner_document): | ||||||
|  |         self._set_owner_document(owner_document) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComplexBaseField(BaseField): | class ComplexBaseField(BaseField): | ||||||
|  |  | ||||||
|     """Handles complex fields, such as lists / dictionaries. |     """Handles complex fields, such as lists / dictionaries. | ||||||
|  |  | ||||||
|     Allows for nesting of embedded documents inside complex types. |     Allows for nesting of embedded documents inside complex types. | ||||||
|     Handles the lazy dereferencing of a queryset by lazily dereferencing all |     Handles the lazy dereferencing of a queryset by lazily dereferencing all | ||||||
|     items in a list / dict rather than one at a time. |     items in a list / dict rather than one at a time. | ||||||
|  |  | ||||||
|     .. versionadded:: 0.5 |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     field = None |     def __init__(self, field=None, **kwargs): | ||||||
|  |         self.field = field | ||||||
|  |         super().__init__(**kwargs) | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def _lazy_load_refs(instance, name, ref_values, *, max_depth): | ||||||
|  |         _dereference = _import_class("DeReference")() | ||||||
|  |         documents = _dereference( | ||||||
|  |             ref_values, | ||||||
|  |             max_depth=max_depth, | ||||||
|  |             instance=instance, | ||||||
|  |             name=name, | ||||||
|  |         ) | ||||||
|  |         return documents | ||||||
|  |  | ||||||
|     def __get__(self, instance, owner): |     def __get__(self, instance, owner): | ||||||
|         """Descriptor to automatically dereference references. |         """Descriptor to automatically dereference references.""" | ||||||
|         """ |  | ||||||
|         if instance is None: |         if instance is None: | ||||||
|             # Document class being used rather than a document object |             # Document class being used rather than a document object | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class("ReferenceField") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |         EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|         dereference = (self._auto_dereference and |  | ||||||
|                        (self.field is None or isinstance(self.field, |  | ||||||
|                                                          (GenericReferenceField, ReferenceField)))) |  | ||||||
|  |  | ||||||
|         _dereference = _import_class("DeReference")() |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|  |  | ||||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference |         dereference = auto_dereference and ( | ||||||
|         if instance._initialised and dereference and instance._data.get(self.name): |             self.field is None | ||||||
|             instance._data[self.name] = _dereference( |             or isinstance(self.field, (GenericReferenceField, ReferenceField)) | ||||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, |         ) | ||||||
|                 name=self.name |  | ||||||
|  |         if ( | ||||||
|  |             instance._initialised | ||||||
|  |             and dereference | ||||||
|  |             and instance._data.get(self.name) | ||||||
|  |             and not getattr(instance._data[self.name], "_dereferenced", False) | ||||||
|  |         ): | ||||||
|  |             ref_values = instance._data.get(self.name) | ||||||
|  |             instance._data[self.name] = self._lazy_load_refs( | ||||||
|  |                 ref_values=ref_values, instance=instance, name=self.name, max_depth=1 | ||||||
|             ) |             ) | ||||||
|  |             if hasattr(instance._data[self.name], "_dereferenced"): | ||||||
|  |                 instance._data[self.name]._dereferenced = True | ||||||
|  |  | ||||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) |         value = super().__get__(instance, owner) | ||||||
|  |  | ||||||
|         # Convert lists / values so we can watch for any changes on them |         # Convert lists / values so we can watch for any changes on them | ||||||
|         if isinstance(value, (list, tuple)): |         if isinstance(value, (list, tuple)): | ||||||
|             if (issubclass(type(self), EmbeddedDocumentListField) and |             if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( | ||||||
|                     not isinstance(value, EmbeddedDocumentList)): |                 value, EmbeddedDocumentList | ||||||
|  |             ): | ||||||
|                 value = EmbeddedDocumentList(value, instance, self.name) |                 value = EmbeddedDocumentList(value, instance, self.name) | ||||||
|             elif not isinstance(value, BaseList): |             elif not isinstance(value, BaseList): | ||||||
|                 value = BaseList(value, instance, self.name) |                 value = BaseList(value, instance, self.name) | ||||||
| @@ -240,11 +327,14 @@ class ComplexBaseField(BaseField): | |||||||
|             value = BaseDict(value, instance, self.name) |             value = BaseDict(value, instance, self.name) | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|         if (self._auto_dereference and instance._initialised and |         if ( | ||||||
|                 isinstance(value, (BaseList, BaseDict)) |             auto_dereference | ||||||
|                 and not value._dereferenced): |             and instance._initialised | ||||||
|             value = _dereference( |             and isinstance(value, (BaseList, BaseDict)) | ||||||
|                 value, max_depth=1, instance=instance, name=self.name |             and not value._dereferenced | ||||||
|  |         ): | ||||||
|  |             value = self._lazy_load_refs( | ||||||
|  |                 ref_values=value, instance=instance, name=self.name, max_depth=1 | ||||||
|             ) |             ) | ||||||
|             value._dereferenced = True |             value._dereferenced = True | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
| @@ -252,138 +342,146 @@ class ComplexBaseField(BaseField): | |||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         """Convert a MongoDB-compatible type to a Python type. |         """Convert a MongoDB-compatible type to a Python type.""" | ||||||
|         """ |         if isinstance(value, str): | ||||||
|         Document = _import_class('Document') |  | ||||||
|  |  | ||||||
|         if isinstance(value, basestring): |  | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         if hasattr(value, 'to_python'): |         if hasattr(value, "to_python"): | ||||||
|             return value.to_python() |             return value.to_python() | ||||||
|  |  | ||||||
|  |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |         if isinstance(value, BaseDocument): | ||||||
|  |             # Something is wrong, return the value as it is | ||||||
|  |             return value | ||||||
|  |  | ||||||
|         is_list = False |         is_list = False | ||||||
|         if not hasattr(value, 'items'): |         if not hasattr(value, "items"): | ||||||
|             try: |             try: | ||||||
|                 is_list = True |                 is_list = True | ||||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) |                 value = {idx: v for idx, v in enumerate(value)} | ||||||
|             except TypeError:  # Not iterable return the value |             except TypeError:  # Not iterable return the value | ||||||
|                 return value |                 return value | ||||||
|  |  | ||||||
|         if self.field: |         if self.field: | ||||||
|             value_dict = dict([(key, self.field.to_python(item)) |             self.field._auto_dereference = self._auto_dereference | ||||||
|                                for key, item in value.items()]) |             value_dict = { | ||||||
|  |                 key: self.field.to_python(item) for key, item in value.items() | ||||||
|  |             } | ||||||
|  |         else: | ||||||
|  |             Document = _import_class("Document") | ||||||
|  |             value_dict = {} | ||||||
|  |             for k, v in value.items(): | ||||||
|  |                 if isinstance(v, Document): | ||||||
|  |                     # We need the id from the saved object to create the DBRef | ||||||
|  |                     if v.pk is None: | ||||||
|  |                         self.error( | ||||||
|  |                             "You can only reference documents once they" | ||||||
|  |                             " have been saved to the database" | ||||||
|  |                         ) | ||||||
|  |                     collection = v._get_collection_name() | ||||||
|  |                     value_dict[k] = DBRef(collection, v.pk) | ||||||
|  |                 elif hasattr(v, "to_python"): | ||||||
|  |                     value_dict[k] = v.to_python() | ||||||
|  |                 else: | ||||||
|  |                     value_dict[k] = self.to_python(v) | ||||||
|  |  | ||||||
|  |         if is_list:  # Convert back to a list | ||||||
|  |             return [ | ||||||
|  |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|  |         return value_dict | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value, use_db_field=True, fields=None): | ||||||
|  |         """Convert a Python type to a MongoDB-compatible type.""" | ||||||
|  |         Document = _import_class("Document") | ||||||
|  |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|  |         if isinstance(value, str): | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         if hasattr(value, "to_mongo"): | ||||||
|  |             if isinstance(value, Document): | ||||||
|  |                 return GenericReferenceField().to_mongo(value) | ||||||
|  |             cls = value.__class__ | ||||||
|  |             val = value.to_mongo(use_db_field, fields) | ||||||
|  |             # If it's a document that is not inherited add _cls | ||||||
|  |             if isinstance(value, EmbeddedDocument): | ||||||
|  |                 val["_cls"] = cls.__name__ | ||||||
|  |             return val | ||||||
|  |  | ||||||
|  |         is_list = False | ||||||
|  |         if not hasattr(value, "items"): | ||||||
|  |             try: | ||||||
|  |                 is_list = True | ||||||
|  |                 value = {k: v for k, v in enumerate(value)} | ||||||
|  |             except TypeError:  # Not iterable return the value | ||||||
|  |                 return value | ||||||
|  |  | ||||||
|  |         if self.field: | ||||||
|  |             value_dict = { | ||||||
|  |                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||||
|  |                 for key, item in value.items() | ||||||
|  |             } | ||||||
|         else: |         else: | ||||||
|             value_dict = {} |             value_dict = {} | ||||||
|             for k, v in value.items(): |             for k, v in value.items(): | ||||||
|                 if isinstance(v, Document): |                 if isinstance(v, Document): | ||||||
|                     # We need the id from the saved object to create the DBRef |                     # We need the id from the saved object to create the DBRef | ||||||
|                     if v.pk is None: |                     if v.pk is None: | ||||||
|                         self.error('You can only reference documents once they' |                         self.error( | ||||||
|                                    ' have been saved to the database') |                             "You can only reference documents once they" | ||||||
|                     collection = v._get_collection_name() |                             " have been saved to the database" | ||||||
|                     value_dict[k] = DBRef(collection, v.pk) |                         ) | ||||||
|                 elif hasattr(v, 'to_python'): |  | ||||||
|                     value_dict[k] = v.to_python() |  | ||||||
|                 else: |  | ||||||
|                     value_dict[k] = self.to_python(v) |  | ||||||
|  |  | ||||||
|         if is_list:  # Convert back to a list |  | ||||||
|             return [v for k, v in sorted(value_dict.items(), |  | ||||||
|                                          key=operator.itemgetter(0))] |  | ||||||
|         return value_dict |  | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |  | ||||||
|         """Convert a Python type to a MongoDB-compatible type. |  | ||||||
|         """ |  | ||||||
|         Document = _import_class("Document") |  | ||||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") |  | ||||||
|         GenericReferenceField = _import_class("GenericReferenceField") |  | ||||||
|  |  | ||||||
|         if isinstance(value, basestring): |  | ||||||
|             return value |  | ||||||
|  |  | ||||||
|         if hasattr(value, 'to_mongo'): |  | ||||||
|             if isinstance(value, Document): |  | ||||||
|                 return GenericReferenceField().to_mongo(value) |  | ||||||
|             cls = value.__class__ |  | ||||||
|             val = value.to_mongo() |  | ||||||
|             # If we its a document thats not inherited add _cls |  | ||||||
|             if (isinstance(value, EmbeddedDocument)): |  | ||||||
|                 val['_cls'] = cls.__name__ |  | ||||||
|             return val |  | ||||||
|  |  | ||||||
|         is_list = False |  | ||||||
|         if not hasattr(value, 'items'): |  | ||||||
|             try: |  | ||||||
|                 is_list = True |  | ||||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) |  | ||||||
|             except TypeError:  # Not iterable return the value |  | ||||||
|                 return value |  | ||||||
|  |  | ||||||
|         if self.field: |  | ||||||
|             value_dict = dict([(key, self.field.to_mongo(item)) |  | ||||||
|                                for key, item in value.iteritems()]) |  | ||||||
|         else: |  | ||||||
|             value_dict = {} |  | ||||||
|             for k, v in value.iteritems(): |  | ||||||
|                 if isinstance(v, Document): |  | ||||||
|                     # We need the id from the saved object to create the DBRef |  | ||||||
|                     if v.pk is None: |  | ||||||
|                         self.error('You can only reference documents once they' |  | ||||||
|                                    ' have been saved to the database') |  | ||||||
|  |  | ||||||
|                     # If its a document that is not inheritable it won't have |                     # If its a document that is not inheritable it won't have | ||||||
|                     # any _cls data so make it a generic reference allows |                     # any _cls data so make it a generic reference allows | ||||||
|                     # us to dereference |                     # us to dereference | ||||||
|                     meta = getattr(v, '_meta', {}) |                     meta = getattr(v, "_meta", {}) | ||||||
|                     allow_inheritance = ( |                     allow_inheritance = meta.get("allow_inheritance") | ||||||
|                         meta.get('allow_inheritance', ALLOW_INHERITANCE) |  | ||||||
|                         is True) |  | ||||||
|                     if not allow_inheritance and not self.field: |                     if not allow_inheritance and not self.field: | ||||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) |                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||||
|                     else: |                     else: | ||||||
|                         collection = v._get_collection_name() |                         collection = v._get_collection_name() | ||||||
|                         value_dict[k] = DBRef(collection, v.pk) |                         value_dict[k] = DBRef(collection, v.pk) | ||||||
|                 elif hasattr(v, 'to_mongo'): |                 elif hasattr(v, "to_mongo"): | ||||||
|                     cls = v.__class__ |                     cls = v.__class__ | ||||||
|                     val = v.to_mongo() |                     val = v.to_mongo(use_db_field, fields) | ||||||
|                     # If we its a document thats not inherited add _cls |                     # If it's a document that is not inherited add _cls | ||||||
|                     if (isinstance(v, (Document, EmbeddedDocument))): |                     if isinstance(v, (Document, EmbeddedDocument)): | ||||||
|                         val['_cls'] = cls.__name__ |                         val["_cls"] = cls.__name__ | ||||||
|                     value_dict[k] = val |                     value_dict[k] = val | ||||||
|                 else: |                 else: | ||||||
|                     value_dict[k] = self.to_mongo(v) |                     value_dict[k] = self.to_mongo(v, use_db_field, fields) | ||||||
|  |  | ||||||
|         if is_list:  # Convert back to a list |         if is_list:  # Convert back to a list | ||||||
|             return [v for k, v in sorted(value_dict.items(), |             return [ | ||||||
|                                          key=operator.itemgetter(0))] |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|         return value_dict |         return value_dict | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """If field is provided ensure the value is valid. |         """If field is provided ensure the value is valid.""" | ||||||
|         """ |  | ||||||
|         errors = {} |         errors = {} | ||||||
|         if self.field: |         if self.field: | ||||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): |             if hasattr(value, "items"): | ||||||
|                 sequence = value.iteritems() |                 sequence = value.items() | ||||||
|             else: |             else: | ||||||
|                 sequence = enumerate(value) |                 sequence = enumerate(value) | ||||||
|             for k, v in sequence: |             for k, v in sequence: | ||||||
|                 try: |                 try: | ||||||
|                     self.field._validate(v) |                     self.field._validate(v) | ||||||
|                 except ValidationError, error: |                 except ValidationError as error: | ||||||
|                     errors[k] = error.errors or error |                     errors[k] = error.errors or error | ||||||
|                 except (ValueError, AssertionError), error: |                 except (ValueError, AssertionError) as error: | ||||||
|                     errors[k] = error |                     errors[k] = error | ||||||
|  |  | ||||||
|             if errors: |             if errors: | ||||||
|                 field_class = self.field.__class__.__name__ |                 field_class = self.field.__class__.__name__ | ||||||
|                 self.error('Invalid %s item (%s)' % (field_class, value), |                 self.error(f"Invalid {field_class} item ({value})", errors=errors) | ||||||
|                            errors=errors) |  | ||||||
|         # Don't allow empty values if required |         # Don't allow empty values if required | ||||||
|         if self.required and not value: |         if self.required and not value: | ||||||
|             self.error('Field is required and cannot be empty') |             self.error("Field is required and cannot be empty") | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         return self.to_mongo(value) |         return self.to_mongo(value) | ||||||
| @@ -398,29 +496,24 @@ class ComplexBaseField(BaseField): | |||||||
|             self.field.owner_document = owner_document |             self.field.owner_document = owner_document | ||||||
|         self._owner_document = owner_document |         self._owner_document = owner_document | ||||||
|  |  | ||||||
|     def _get_owner_document(self, owner_document): |  | ||||||
|         self._owner_document = owner_document |  | ||||||
|  |  | ||||||
|     owner_document = property(_get_owner_document, _set_owner_document) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ObjectIdField(BaseField): | class ObjectIdField(BaseField): | ||||||
|  |     """A field wrapper around MongoDB's ObjectIds.""" | ||||||
|     """A field wrapper around MongoDB's ObjectIds. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         if not isinstance(value, ObjectId): |         try: | ||||||
|             value = ObjectId(value) |             if not isinstance(value, ObjectId): | ||||||
|  |                 value = ObjectId(value) | ||||||
|  |         except Exception: | ||||||
|  |             pass | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         if not isinstance(value, ObjectId): |         if not isinstance(value, ObjectId): | ||||||
|             try: |             try: | ||||||
|                 return ObjectId(unicode(value)) |                 return ObjectId(str(value)) | ||||||
|             except Exception, e: |             except Exception as e: | ||||||
|                 # e.message attribute has been deprecated since Python 2.6 |                 self.error(str(e)) | ||||||
|                 self.error(unicode(e)) |  | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
| @@ -428,46 +521,42 @@ class ObjectIdField(BaseField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         try: |         try: | ||||||
|             ObjectId(unicode(value)) |             ObjectId(str(value)) | ||||||
|         except: |         except Exception: | ||||||
|             self.error('Invalid Object ID') |             self.error("Invalid ObjectID") | ||||||
|  |  | ||||||
|  |  | ||||||
| class GeoJsonBaseField(BaseField): | class GeoJsonBaseField(BaseField): | ||||||
|  |     """A geo json field storing a geojson style object.""" | ||||||
|     """A geo json field storing a geojson style object. |  | ||||||
|  |  | ||||||
|     .. versionadded:: 0.8 |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     _geo_index = pymongo.GEOSPHERE |     _geo_index = pymongo.GEOSPHERE | ||||||
|     _type = "GeoBase" |     _type = "GeoBase" | ||||||
|  |  | ||||||
|     def __init__(self, auto_index=True, *args, **kwargs): |     def __init__(self, auto_index=True, *args, **kwargs): | ||||||
|         """ |         """ | ||||||
|         :param bool auto_index: Automatically create a "2dsphere" index.\ |         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||||
|             Defaults to `True`. |             Defaults to `True`. | ||||||
|         """ |         """ | ||||||
|         self._name = "%sField" % self._type |         self._name = "%sField" % self._type | ||||||
|         if not auto_index: |         if not auto_index: | ||||||
|             self._geo_index = False |             self._geo_index = False | ||||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) |         super().__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Validate the GeoJson object based on its type |         """Validate the GeoJson object based on its type.""" | ||||||
|         """ |  | ||||||
|         if isinstance(value, dict): |         if isinstance(value, dict): | ||||||
|             if set(value.keys()) == set(['type', 'coordinates']): |             if set(value.keys()) == {"type", "coordinates"}: | ||||||
|                 if value['type'] != self._type: |                 if value["type"] != self._type: | ||||||
|                     self.error('%s type must be "%s"' % |                     self.error(f'{self._name} type must be "{self._type}"') | ||||||
|                                (self._name, self._type)) |                 return self.validate(value["coordinates"]) | ||||||
|                 return self.validate(value['coordinates']) |  | ||||||
|             else: |             else: | ||||||
|                 self.error('%s can only accept a valid GeoJson dictionary' |                 self.error( | ||||||
|                            ' or lists of (x, y)' % self._name) |                     "%s can only accept a valid GeoJson dictionary" | ||||||
|  |                     " or lists of (x, y)" % self._name | ||||||
|  |                 ) | ||||||
|                 return |                 return | ||||||
|         elif not isinstance(value, (list, tuple)): |         elif not isinstance(value, (list, tuple)): | ||||||
|             self.error('%s can only accept lists of [x, y]' % self._name) |             self.error("%s can only accept lists of [x, y]" % self._name) | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         validate = getattr(self, "_validate_%s" % self._type.lower()) |         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||||
| @@ -477,19 +566,19 @@ class GeoJsonBaseField(BaseField): | |||||||
|  |  | ||||||
|     def _validate_polygon(self, value, top_level=True): |     def _validate_polygon(self, value, top_level=True): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'Polygons must contain list of linestrings' |             return "Polygons must contain list of linestrings" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             return "Invalid Polygon must contain at least one valid linestring" |             return "Invalid Polygon must contain at least one valid linestring" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for val in value: |         for val in value: | ||||||
|             error = self._validate_linestring(val, False) |             error = self._validate_linestring(val, False) | ||||||
|             if not error and val[0] != val[-1]: |             if not error and val[0] != val[-1]: | ||||||
|                 error = 'LineStrings must start and end at the same point' |                 error = "LineStrings must start and end at the same point" | ||||||
|             if error and error not in errors: |             if error and error not in errors: | ||||||
|                 errors.append(error) |                 errors.append(error) | ||||||
|         if errors: |         if errors: | ||||||
| @@ -499,14 +588,14 @@ class GeoJsonBaseField(BaseField): | |||||||
|                 return "%s" % ", ".join(errors) |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def _validate_linestring(self, value, top_level=True): |     def _validate_linestring(self, value, top_level=True): | ||||||
|         """Validates a linestring""" |         """Validate a linestring.""" | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'LineStrings must contain list of coordinate pairs' |             return "LineStrings must contain list of coordinate pairs" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             return "Invalid LineString must contain at least one valid point" |             return "Invalid LineString must contain at least one valid point" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
| @@ -523,21 +612,22 @@ class GeoJsonBaseField(BaseField): | |||||||
|     def _validate_point(self, value): |     def _validate_point(self, value): | ||||||
|         """Validate each set of coords""" |         """Validate each set of coords""" | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'Points must be a list of coordinate pairs' |             return "Points must be a list of coordinate pairs" | ||||||
|         elif not len(value) == 2: |         elif not len(value) == 2: | ||||||
|             return "Value (%s) must be a two-dimensional point" % repr(value) |             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||||
|         elif (not isinstance(value[0], (float, int)) or |         elif not isinstance(value[0], (float, int)) or not isinstance( | ||||||
|               not isinstance(value[1], (float, int))): |             value[1], (float, int) | ||||||
|  |         ): | ||||||
|             return "Both values (%s) in point must be float or int" % repr(value) |             return "Both values (%s) in point must be float or int" % repr(value) | ||||||
|  |  | ||||||
|     def _validate_multipoint(self, value): |     def _validate_multipoint(self, value): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiPoint must be a list of Point' |             return "MultiPoint must be a list of Point" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             return "Invalid MultiPoint must contain at least one valid point" |             return "Invalid MultiPoint must contain at least one valid point" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
| @@ -551,12 +641,12 @@ class GeoJsonBaseField(BaseField): | |||||||
|  |  | ||||||
|     def _validate_multilinestring(self, value, top_level=True): |     def _validate_multilinestring(self, value, top_level=True): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiLineString must be a list of LineString' |             return "MultiLineString must be a list of LineString" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             return "Invalid MultiLineString must contain at least one valid linestring" |             return "Invalid MultiLineString must contain at least one valid linestring" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
| @@ -573,12 +663,12 @@ class GeoJsonBaseField(BaseField): | |||||||
|  |  | ||||||
|     def _validate_multipolygon(self, value): |     def _validate_multipolygon(self, value): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiPolygon must be a list of Polygon' |             return "MultiPolygon must be a list of Polygon" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0][0] |             value[0][0][0][0] | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             return "Invalid MultiPolygon must contain at least one valid Polygon" |             return "Invalid MultiPolygon must contain at least one valid Polygon" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|   | |||||||
| @@ -1,69 +1,75 @@ | |||||||
|  | import itertools | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| import pymongo | from mongoengine.base.common import _document_registry | ||||||
|  | from mongoengine.base.fields import ( | ||||||
|  |     BaseField, | ||||||
|  |     ComplexBaseField, | ||||||
|  |     ObjectIdField, | ||||||
|  | ) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import InvalidDocumentError | from mongoengine.errors import InvalidDocumentError | ||||||
| from mongoengine.python_support import PY3 | from mongoengine.queryset import ( | ||||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, |     DO_NOTHING, | ||||||
|                                   MultipleObjectsReturned, |     DoesNotExist, | ||||||
|                                   QuerySet, QuerySetManager) |     MultipleObjectsReturned, | ||||||
|  |     QuerySetManager, | ||||||
|  | ) | ||||||
|  |  | ||||||
| from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE | __all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") | ||||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField |  | ||||||
|  |  | ||||||
| __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DocumentMetaclass(type): | class DocumentMetaclass(type): | ||||||
|  |     """Metaclass for all documents.""" | ||||||
|  |  | ||||||
|     """Metaclass for all documents. |     # TODO lower complexity of this method | ||||||
|     """ |     def __new__(mcs, name, bases, attrs): | ||||||
|  |         flattened_bases = mcs._get_bases(bases) | ||||||
|     def __new__(cls, name, bases, attrs): |         super_new = super().__new__ | ||||||
|         flattened_bases = cls._get_bases(bases) |  | ||||||
|         super_new = super(DocumentMetaclass, cls).__new__ |  | ||||||
|  |  | ||||||
|         # If a base class just call super |         # If a base class just call super | ||||||
|         metaclass = attrs.get('my_metaclass') |         metaclass = attrs.get("my_metaclass") | ||||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): |         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||||
|             return super_new(cls, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         attrs['_is_document'] = attrs.get('_is_document', False) |         attrs["_is_document"] = attrs.get("_is_document", False) | ||||||
|         attrs['_cached_reference_fields'] = [] |         attrs["_cached_reference_fields"] = [] | ||||||
|  |  | ||||||
|         # EmbeddedDocuments could have meta data for inheritance |         # EmbeddedDocuments could have meta data for inheritance | ||||||
|         if 'meta' in attrs: |         if "meta" in attrs: | ||||||
|             attrs['_meta'] = attrs.pop('meta') |             attrs["_meta"] = attrs.pop("meta") | ||||||
|  |  | ||||||
|         # EmbeddedDocuments should inherit meta data |         # EmbeddedDocuments should inherit meta data | ||||||
|         if '_meta' not in attrs: |         if "_meta" not in attrs: | ||||||
|             meta = MetaDict() |             meta = MetaDict() | ||||||
|             for base in flattened_bases[::-1]: |             for base in flattened_bases[::-1]: | ||||||
|                 # Add any mixin metadata from plain objects |                 # Add any mixin metadata from plain objects | ||||||
|                 if hasattr(base, 'meta'): |                 if hasattr(base, "meta"): | ||||||
|                     meta.merge(base.meta) |                     meta.merge(base.meta) | ||||||
|                 elif hasattr(base, '_meta'): |                 elif hasattr(base, "_meta"): | ||||||
|                     meta.merge(base._meta) |                     meta.merge(base._meta) | ||||||
|             attrs['_meta'] = meta |             attrs["_meta"] = meta | ||||||
|             attrs['_meta']['abstract'] = False  # 789: EmbeddedDocument shouldn't inherit abstract |             attrs["_meta"][ | ||||||
|  |                 "abstract" | ||||||
|  |             ] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||||
|  |  | ||||||
|         if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE): |         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||||
|             StringField = _import_class('StringField') |         if attrs["_meta"].get("allow_inheritance"): | ||||||
|             attrs['_cls'] = StringField() |             StringField = _import_class("StringField") | ||||||
|  |             attrs["_cls"] = StringField() | ||||||
|  |  | ||||||
|         # Handle document Fields |         # Handle document Fields | ||||||
|  |  | ||||||
|         # Merge all fields from subclasses |         # Merge all fields from subclasses | ||||||
|         doc_fields = {} |         doc_fields = {} | ||||||
|         for base in flattened_bases[::-1]: |         for base in flattened_bases[::-1]: | ||||||
|             if hasattr(base, '_fields'): |             if hasattr(base, "_fields"): | ||||||
|                 doc_fields.update(base._fields) |                 doc_fields.update(base._fields) | ||||||
|  |  | ||||||
|             # Standard object mixin - merge in any Fields |             # Standard object mixin - merge in any Fields | ||||||
|             if not hasattr(base, '_meta'): |             if not hasattr(base, "_meta"): | ||||||
|                 base_fields = {} |                 base_fields = {} | ||||||
|                 for attr_name, attr_value in base.__dict__.iteritems(): |                 for attr_name, attr_value in base.__dict__.items(): | ||||||
|                     if not isinstance(attr_value, BaseField): |                     if not isinstance(attr_value, BaseField): | ||||||
|                         continue |                         continue | ||||||
|                     attr_value.name = attr_name |                     attr_value.name = attr_name | ||||||
| @@ -75,7 +81,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         # Discover any document fields |         # Discover any document fields | ||||||
|         field_names = {} |         field_names = {} | ||||||
|         for attr_name, attr_value in attrs.iteritems(): |         for attr_name, attr_value in attrs.items(): | ||||||
|             if not isinstance(attr_value, BaseField): |             if not isinstance(attr_value, BaseField): | ||||||
|                 continue |                 continue | ||||||
|             attr_value.name = attr_name |             attr_value.name = attr_name | ||||||
| @@ -84,26 +90,29 @@ class DocumentMetaclass(type): | |||||||
|             doc_fields[attr_name] = attr_value |             doc_fields[attr_name] = attr_value | ||||||
|  |  | ||||||
|             # Count names to ensure no db_field redefinitions |             # Count names to ensure no db_field redefinitions | ||||||
|             field_names[attr_value.db_field] = field_names.get( |             field_names[attr_value.db_field] = ( | ||||||
|                 attr_value.db_field, 0) + 1 |                 field_names.get(attr_value.db_field, 0) + 1 | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Ensure no duplicate db_fields |         # Ensure no duplicate db_fields | ||||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] |         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||||
|         if duplicate_db_fields: |         if duplicate_db_fields: | ||||||
|             msg = ("Multiple db_fields defined for: %s " % |             msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) | ||||||
|                    ", ".join(duplicate_db_fields)) |  | ||||||
|             raise InvalidDocumentError(msg) |             raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|         # Set _fields and db_field maps |         # Set _fields and db_field maps | ||||||
|         attrs['_fields'] = doc_fields |         attrs["_fields"] = doc_fields | ||||||
|         attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) |         attrs["_db_field_map"] = { | ||||||
|                                        for k, v in doc_fields.iteritems()]) |             k: getattr(v, "db_field", k) for k, v in doc_fields.items() | ||||||
|         attrs['_reverse_db_field_map'] = dict( |         } | ||||||
|             (v, k) for k, v in attrs['_db_field_map'].iteritems()) |         attrs["_reverse_db_field_map"] = { | ||||||
|  |             v: k for k, v in attrs["_db_field_map"].items() | ||||||
|  |         } | ||||||
|  |  | ||||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( |         attrs["_fields_ordered"] = tuple( | ||||||
|                                          (v.creation_counter, v.name) |             i[1] | ||||||
|                                          for v in doc_fields.itervalues())) |             for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         # |         # | ||||||
|         # Set document hierarchy |         # Set document hierarchy | ||||||
| @@ -111,45 +120,50 @@ class DocumentMetaclass(type): | |||||||
|         superclasses = () |         superclasses = () | ||||||
|         class_name = [name] |         class_name = [name] | ||||||
|         for base in flattened_bases: |         for base in flattened_bases: | ||||||
|             if (not getattr(base, '_is_base_cls', True) and |             if not getattr(base, "_is_base_cls", True) and not getattr( | ||||||
|                     not getattr(base, '_meta', {}).get('abstract', True)): |                 base, "_meta", {} | ||||||
|                 # Collate heirarchy for _cls and _subclasses |             ).get("abstract", True): | ||||||
|  |                 # Collate hierarchy for _cls and _subclasses | ||||||
|                 class_name.append(base.__name__) |                 class_name.append(base.__name__) | ||||||
|  |  | ||||||
|             if hasattr(base, '_meta'): |             if hasattr(base, "_meta"): | ||||||
|                 # Warn if allow_inheritance isn't set and prevent |                 # Warn if allow_inheritance isn't set and prevent | ||||||
|                 # inheritance of classes where inheritance is set to False |                 # inheritance of classes where inheritance is set to False | ||||||
|                 allow_inheritance = base._meta.get('allow_inheritance', |                 allow_inheritance = base._meta.get("allow_inheritance") | ||||||
|                                                    ALLOW_INHERITANCE) |                 if not allow_inheritance and not base._meta.get("abstract"): | ||||||
|                 if (allow_inheritance is not True and |                     raise ValueError( | ||||||
|                         not base._meta.get('abstract')): |                         "Document %s may not be subclassed. " | ||||||
|                     raise ValueError('Document %s may not be subclassed' % |                         'To enable inheritance, use the "allow_inheritance" meta attribute.' | ||||||
|                                      base.__name__) |                         % base.__name__ | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|         # Get superclasses from last base superclass |         # Get superclasses from last base superclass | ||||||
|         document_bases = [b for b in flattened_bases |         document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] | ||||||
|                           if hasattr(b, '_class_name')] |  | ||||||
|         if document_bases: |         if document_bases: | ||||||
|             superclasses = document_bases[0]._superclasses |             superclasses = document_bases[0]._superclasses | ||||||
|             superclasses += (document_bases[0]._class_name, ) |             superclasses += (document_bases[0]._class_name,) | ||||||
|  |  | ||||||
|         _cls = '.'.join(reversed(class_name)) |         _cls = ".".join(reversed(class_name)) | ||||||
|         attrs['_class_name'] = _cls |         attrs["_class_name"] = _cls | ||||||
|         attrs['_superclasses'] = superclasses |         attrs["_superclasses"] = superclasses | ||||||
|         attrs['_subclasses'] = (_cls, ) |         attrs["_subclasses"] = (_cls,) | ||||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types |         attrs["_types"] = attrs["_subclasses"]  # TODO depreciate _types | ||||||
|  |  | ||||||
|         # Create the new_class |         # Create the new_class | ||||||
|         new_class = super_new(cls, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Set _subclasses |         # Set _subclasses | ||||||
|         for base in document_bases: |         for base in document_bases: | ||||||
|             if _cls not in base._subclasses: |             if _cls not in base._subclasses: | ||||||
|                 base._subclasses += (_cls,) |                 base._subclasses += (_cls,) | ||||||
|             base._types = base._subclasses   # TODO depreciate _types |             base._types = base._subclasses  # TODO depreciate _types | ||||||
|  |  | ||||||
|         (Document, EmbeddedDocument, DictField, |         ( | ||||||
|          CachedReferenceField) = cls._import_classes() |             Document, | ||||||
|  |             EmbeddedDocument, | ||||||
|  |             DictField, | ||||||
|  |             CachedReferenceField, | ||||||
|  |         ) = mcs._import_classes() | ||||||
|  |  | ||||||
|         if issubclass(new_class, Document): |         if issubclass(new_class, Document): | ||||||
|             new_class._collection = None |             new_class._collection = None | ||||||
| @@ -157,268 +171,292 @@ class DocumentMetaclass(type): | |||||||
|         # Add class to the _document_registry |         # Add class to the _document_registry | ||||||
|         _document_registry[new_class._class_name] = new_class |         _document_registry[new_class._class_name] = new_class | ||||||
|  |  | ||||||
|         # In Python 2, User-defined methods objects have special read-only |  | ||||||
|         # attributes 'im_func' and 'im_self' which contain the function obj |  | ||||||
|         # and class instance object respectively.  With Python 3 these special |  | ||||||
|         # attributes have been replaced by __func__ and __self__.  The Blinker |  | ||||||
|         # module continues to use im_func and im_self, so the code below |  | ||||||
|         # copies __func__ into im_func and __self__ into im_self for |  | ||||||
|         # classmethod objects in Document derived classes. |  | ||||||
|         if PY3: |  | ||||||
|             for key, val in new_class.__dict__.items(): |  | ||||||
|                 if isinstance(val, classmethod): |  | ||||||
|                     f = val.__get__(new_class) |  | ||||||
|                     if hasattr(f, '__func__') and not hasattr(f, 'im_func'): |  | ||||||
|                         f.__dict__.update({'im_func': getattr(f, '__func__')}) |  | ||||||
|                     if hasattr(f, '__self__') and not hasattr(f, 'im_self'): |  | ||||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) |  | ||||||
|  |  | ||||||
|         # Handle delete rules |         # Handle delete rules | ||||||
|         for field in new_class._fields.itervalues(): |         for field in new_class._fields.values(): | ||||||
|             f = field |             f = field | ||||||
|             f.owner_document = new_class |             if f.owner_document is None: | ||||||
|             delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) |                 f.owner_document = new_class | ||||||
|  |             delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) | ||||||
|             if isinstance(f, CachedReferenceField): |             if isinstance(f, CachedReferenceField): | ||||||
|  |  | ||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     raise InvalidDocumentError( |                     raise InvalidDocumentError( | ||||||
|                         "CachedReferenceFields is not allowed in EmbeddedDocuments") |                         "CachedReferenceFields is not allowed in EmbeddedDocuments" | ||||||
|                 if not f.document_type: |                     ) | ||||||
|                     raise InvalidDocumentError( |  | ||||||
|                         "Document is not avaiable to sync") |  | ||||||
|  |  | ||||||
|                 if f.auto_sync: |                 if f.auto_sync: | ||||||
|                     f.start_listener() |                     f.start_listener() | ||||||
|  |  | ||||||
|                 f.document_type._cached_reference_fields.append(f) |                 f.document_type._cached_reference_fields.append(f) | ||||||
|  |  | ||||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): |             if isinstance(f, ComplexBaseField) and hasattr(f, "field"): | ||||||
|                 delete_rule = getattr(f.field, |                 delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) | ||||||
|                                       'reverse_delete_rule', |  | ||||||
|                                       DO_NOTHING) |  | ||||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: |                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||||
|                     msg = ("Reverse delete rules are not supported " |                     msg = ( | ||||||
|                            "for %s (field: %s)" % |                         "Reverse delete rules are not supported " | ||||||
|                            (field.__class__.__name__, field.name)) |                         "for %s (field: %s)" % (field.__class__.__name__, field.name) | ||||||
|  |                     ) | ||||||
|                     raise InvalidDocumentError(msg) |                     raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|                 f = field.field |                 f = field.field | ||||||
|  |  | ||||||
|             if delete_rule != DO_NOTHING: |             if delete_rule != DO_NOTHING: | ||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     msg = ("Reverse delete rules are not supported for " |                     msg = ( | ||||||
|                            "EmbeddedDocuments (field: %s)" % field.name) |                         "Reverse delete rules are not supported for " | ||||||
|  |                         "EmbeddedDocuments (field: %s)" % field.name | ||||||
|  |                     ) | ||||||
|                     raise InvalidDocumentError(msg) |                     raise InvalidDocumentError(msg) | ||||||
|                 f.document_type.register_delete_rule(new_class, |                 f.document_type.register_delete_rule(new_class, field.name, delete_rule) | ||||||
|                                                      field.name, delete_rule) |  | ||||||
|  |  | ||||||
|             if (field.name and hasattr(Document, field.name) and |             if ( | ||||||
|                     EmbeddedDocument not in new_class.mro()): |                 field.name | ||||||
|                 msg = ("%s is a document method and not a valid " |                 and hasattr(Document, field.name) | ||||||
|                        "field name" % field.name) |                 and EmbeddedDocument not in new_class.mro() | ||||||
|  |             ): | ||||||
|  |                 msg = "%s is a document method and not a valid field name" % field.name | ||||||
|                 raise InvalidDocumentError(msg) |                 raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|     def add_to_class(self, name, value): |  | ||||||
|         setattr(self, name, value) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_bases(cls, bases): |     def _get_bases(mcs, bases): | ||||||
|         if isinstance(bases, BasesTuple): |         if isinstance(bases, BasesTuple): | ||||||
|             return bases |             return bases | ||||||
|         seen = [] |         seen = [] | ||||||
|         bases = cls.__get_bases(bases) |         bases = mcs.__get_bases(bases) | ||||||
|         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) |         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||||
|         return BasesTuple(unique_bases) |         return BasesTuple(unique_bases) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def __get_bases(cls, bases): |     def __get_bases(mcs, bases): | ||||||
|         for base in bases: |         for base in bases: | ||||||
|             if base is object: |             if base is object: | ||||||
|                 continue |                 continue | ||||||
|             yield base |             yield base | ||||||
|             for child_base in cls.__get_bases(base.__bases__): |             yield from mcs.__get_bases(base.__bases__) | ||||||
|                 yield child_base |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _import_classes(cls): |     def _import_classes(mcs): | ||||||
|         Document = _import_class('Document') |         Document = _import_class("Document") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         DictField = _import_class('DictField') |         DictField = _import_class("DictField") | ||||||
|         CachedReferenceField = _import_class('CachedReferenceField') |         CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|         return (Document, EmbeddedDocument, DictField, CachedReferenceField) |         return Document, EmbeddedDocument, DictField, CachedReferenceField | ||||||
|  |  | ||||||
|  |  | ||||||
| class TopLevelDocumentMetaclass(DocumentMetaclass): | class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||||
|  |  | ||||||
|     """Metaclass for top-level documents (i.e. documents that have their own |     """Metaclass for top-level documents (i.e. documents that have their own | ||||||
|     collection in the database. |     collection in the database. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __new__(cls, name, bases, attrs): |     def __new__(mcs, name, bases, attrs): | ||||||
|         flattened_bases = cls._get_bases(bases) |         flattened_bases = mcs._get_bases(bases) | ||||||
|         super_new = super(TopLevelDocumentMetaclass, cls).__new__ |         super_new = super().__new__ | ||||||
|  |  | ||||||
|         # Set default _meta data if base class, otherwise get user defined meta |         # Set default _meta data if base class, otherwise get user defined meta | ||||||
|         if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass): |         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||||
|             # defaults |             # defaults | ||||||
|             attrs['_meta'] = { |             attrs["_meta"] = { | ||||||
|                 'abstract': True, |                 "abstract": True, | ||||||
|                 'max_documents': None, |                 "max_documents": None, | ||||||
|                 'max_size': None, |                 "max_size": None, | ||||||
|                 'ordering': [],  # default ordering applied at runtime |                 "ordering": [],  # default ordering applied at runtime | ||||||
|                 'indexes': [],  # indexes to be ensured at runtime |                 "indexes": [],  # indexes to be ensured at runtime | ||||||
|                 'id_field': None, |                 "id_field": None, | ||||||
|                 'index_background': False, |                 "index_background": False, | ||||||
|                 'index_drop_dups': False, |                 "index_opts": None, | ||||||
|                 'index_opts': None, |                 "delete_rules": None, | ||||||
|                 'delete_rules': None, |                 # allow_inheritance can be True, False, and None. True means | ||||||
|                 'allow_inheritance': None, |                 # "allow inheritance", False means "don't allow inheritance", | ||||||
|  |                 # None means "do whatever your parent does, or don't allow | ||||||
|  |                 # inheritance if you're a top-level class". | ||||||
|  |                 "allow_inheritance": None, | ||||||
|             } |             } | ||||||
|             attrs['_is_base_cls'] = True |             attrs["_is_base_cls"] = True | ||||||
|             attrs['_meta'].update(attrs.get('meta', {})) |             attrs["_meta"].update(attrs.get("meta", {})) | ||||||
|         else: |         else: | ||||||
|             attrs['_meta'] = attrs.get('meta', {}) |             attrs["_meta"] = attrs.get("meta", {}) | ||||||
|             # Explictly set abstract to false unless set |             # Explicitly set abstract to false unless set | ||||||
|             attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) |             attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) | ||||||
|             attrs['_is_base_cls'] = False |             attrs["_is_base_cls"] = False | ||||||
|  |  | ||||||
|         # Set flag marking as document class - as opposed to an object mixin |         # Set flag marking as document class - as opposed to an object mixin | ||||||
|         attrs['_is_document'] = True |         attrs["_is_document"] = True | ||||||
|  |  | ||||||
|         # Ensure queryset_class is inherited |         # Ensure queryset_class is inherited | ||||||
|         if 'objects' in attrs: |         if "objects" in attrs: | ||||||
|             manager = attrs['objects'] |             manager = attrs["objects"] | ||||||
|             if hasattr(manager, 'queryset_class'): |             if hasattr(manager, "queryset_class"): | ||||||
|                 attrs['_meta']['queryset_class'] = manager.queryset_class |                 attrs["_meta"]["queryset_class"] = manager.queryset_class | ||||||
|  |  | ||||||
|         # Clean up top level meta |         # Clean up top level meta | ||||||
|         if 'meta' in attrs: |         if "meta" in attrs: | ||||||
|             del(attrs['meta']) |             del attrs["meta"] | ||||||
|  |  | ||||||
|         # Find the parent document class |         # Find the parent document class | ||||||
|         parent_doc_cls = [b for b in flattened_bases |         parent_doc_cls = [ | ||||||
|                           if b.__class__ == TopLevelDocumentMetaclass] |             b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass | ||||||
|  |         ] | ||||||
|         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] |         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] | ||||||
|  |  | ||||||
|         # Prevent classes setting collection different to their parents |         # Prevent classes setting collection different to their parents | ||||||
|         # If parent wasn't an abstract class |         # If parent wasn't an abstract class | ||||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) |         if ( | ||||||
|                 and not parent_doc_cls._meta.get('abstract', True)): |             parent_doc_cls | ||||||
|  |             and "collection" in attrs.get("_meta", {}) | ||||||
|  |             and not parent_doc_cls._meta.get("abstract", True) | ||||||
|  |         ): | ||||||
|             msg = "Trying to set a collection on a subclass (%s)" % name |             msg = "Trying to set a collection on a subclass (%s)" % name | ||||||
|             warnings.warn(msg, SyntaxWarning) |             warnings.warn(msg, SyntaxWarning) | ||||||
|             del(attrs['_meta']['collection']) |             del attrs["_meta"]["collection"] | ||||||
|  |  | ||||||
|         # Ensure abstract documents have abstract bases |         # Ensure abstract documents have abstract bases | ||||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): |         if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): | ||||||
|             if (parent_doc_cls and |             if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): | ||||||
|                     not parent_doc_cls._meta.get('abstract', False)): |  | ||||||
|                 msg = "Abstract document cannot have non-abstract base" |                 msg = "Abstract document cannot have non-abstract base" | ||||||
|                 raise ValueError(msg) |                 raise ValueError(msg) | ||||||
|             return super_new(cls, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Merge base class metas. |         # Merge base class metas. | ||||||
|         # Uses a special MetaDict that handles various merging rules |         # Uses a special MetaDict that handles various merging rules | ||||||
|         meta = MetaDict() |         meta = MetaDict() | ||||||
|         for base in flattened_bases[::-1]: |         for base in flattened_bases[::-1]: | ||||||
|             # Add any mixin metadata from plain objects |             # Add any mixin metadata from plain objects | ||||||
|             if hasattr(base, 'meta'): |             if hasattr(base, "meta"): | ||||||
|                 meta.merge(base.meta) |                 meta.merge(base.meta) | ||||||
|             elif hasattr(base, '_meta'): |             elif hasattr(base, "_meta"): | ||||||
|                 meta.merge(base._meta) |                 meta.merge(base._meta) | ||||||
|  |  | ||||||
|             # Set collection in the meta if its callable |             # Set collection in the meta if its callable | ||||||
|             if (getattr(base, '_is_document', False) and |             if getattr(base, "_is_document", False) and not base._meta.get("abstract"): | ||||||
|                     not base._meta.get('abstract')): |                 collection = meta.get("collection", None) | ||||||
|                 collection = meta.get('collection', None) |  | ||||||
|                 if callable(collection): |                 if callable(collection): | ||||||
|                     meta['collection'] = collection(base) |                     meta["collection"] = collection(base) | ||||||
|  |  | ||||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta |         meta.merge(attrs.get("_meta", {}))  # Top level meta | ||||||
|  |  | ||||||
|         # Only simple classes (direct subclasses of Document) |         # Only simple classes (i.e. direct subclasses of Document) may set | ||||||
|         # may set allow_inheritance to False |         # allow_inheritance to False. If the base Document allows inheritance, | ||||||
|         simple_class = all([b._meta.get('abstract') |         # none of its subclasses can override allow_inheritance to False. | ||||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) |         simple_class = all( | ||||||
|         if (not simple_class and meta['allow_inheritance'] is False and |             b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta") | ||||||
|                 not meta['abstract']): |         ) | ||||||
|             raise ValueError('Only direct subclasses of Document may set ' |         if ( | ||||||
|                              '"allow_inheritance" to False') |             not simple_class | ||||||
|  |             and meta["allow_inheritance"] is False | ||||||
|  |             and not meta["abstract"] | ||||||
|  |         ): | ||||||
|  |             raise ValueError( | ||||||
|  |                 "Only direct subclasses of Document may set " | ||||||
|  |                 '"allow_inheritance" to False' | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Set default collection name |         # Set default collection name | ||||||
|         if 'collection' not in meta: |         if "collection" not in meta: | ||||||
|             meta['collection'] = ''.join('_%s' % c if c.isupper() else c |             meta["collection"] = ( | ||||||
|                                          for c in name).strip('_').lower() |                 "".join("_%s" % c if c.isupper() else c for c in name) | ||||||
|         attrs['_meta'] = meta |                 .strip("_") | ||||||
|  |                 .lower() | ||||||
|  |             ) | ||||||
|  |         attrs["_meta"] = meta | ||||||
|  |  | ||||||
|         # Call super and get the new class |         # Call super and get the new class | ||||||
|         new_class = super_new(cls, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         meta = new_class._meta |         meta = new_class._meta | ||||||
|  |  | ||||||
|         # Set index specifications |         # Set index specifications | ||||||
|         meta['index_specs'] = new_class._build_index_specs(meta['indexes']) |         meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) | ||||||
|  |  | ||||||
|         # If collection is a callable - call it and set the value |         # If collection is a callable - call it and set the value | ||||||
|         collection = meta.get('collection') |         collection = meta.get("collection") | ||||||
|         if callable(collection): |         if callable(collection): | ||||||
|             new_class._meta['collection'] = collection(new_class) |             new_class._meta["collection"] = collection(new_class) | ||||||
|  |  | ||||||
|         # Provide a default queryset unless exists or one has been set |         # Provide a default queryset unless exists or one has been set | ||||||
|         if 'objects' not in dir(new_class): |         if "objects" not in dir(new_class): | ||||||
|             new_class.objects = QuerySetManager() |             new_class.objects = QuerySetManager() | ||||||
|  |  | ||||||
|         # Validate the fields and set primary key if needed |         # Validate the fields and set primary key if needed | ||||||
|         for field_name, field in new_class._fields.iteritems(): |         for field_name, field in new_class._fields.items(): | ||||||
|             if field.primary_key: |             if field.primary_key: | ||||||
|                 # Ensure only one primary key is set |                 # Ensure only one primary key is set | ||||||
|                 current_pk = new_class._meta.get('id_field') |                 current_pk = new_class._meta.get("id_field") | ||||||
|                 if current_pk and current_pk != field_name: |                 if current_pk and current_pk != field_name: | ||||||
|                     raise ValueError('Cannot override primary key field') |                     raise ValueError("Cannot override primary key field") | ||||||
|  |  | ||||||
|                 # Set primary key |                 # Set primary key | ||||||
|                 if not current_pk: |                 if not current_pk: | ||||||
|                     new_class._meta['id_field'] = field_name |                     new_class._meta["id_field"] = field_name | ||||||
|                     new_class.id = field |                     new_class.id = field | ||||||
|  |  | ||||||
|         # Set primary key if not defined by the document |         # If the document doesn't explicitly define a primary key field, create | ||||||
|         new_class._auto_id_field = getattr(parent_doc_cls, |         # one. Make it an ObjectIdField and give it a non-clashing name ("id" | ||||||
|                                            '_auto_id_field', False) |         # by default, but can be different if that one's taken). | ||||||
|         if not new_class._meta.get('id_field'): |         if not new_class._meta.get("id_field"): | ||||||
|             new_class._auto_id_field = True |             id_name, id_db_name = mcs.get_auto_id_names(new_class) | ||||||
|             new_class._meta['id_field'] = 'id' |             new_class._meta["id_field"] = id_name | ||||||
|             new_class._fields['id'] = ObjectIdField(db_field='_id') |             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||||
|             new_class._fields['id'].name = 'id' |             new_class._fields[id_name].name = id_name | ||||||
|             new_class.id = new_class._fields['id'] |             new_class.id = new_class._fields[id_name] | ||||||
|  |             new_class._db_field_map[id_name] = id_db_name | ||||||
|  |             new_class._reverse_db_field_map[id_db_name] = id_name | ||||||
|  |  | ||||||
|         # Prepend id field to _fields_ordered |             # Prepend the ID field to _fields_ordered (so that it's *always* | ||||||
|         if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: |             # the first field). | ||||||
|             new_class._fields_ordered = ('id', ) + new_class._fields_ordered |             new_class._fields_ordered = (id_name,) + new_class._fields_ordered | ||||||
|  |  | ||||||
|         # Merge in exceptions with parent hierarchy |         # Merge in exceptions with parent hierarchy. | ||||||
|         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) |         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||||
|         module = attrs.get('__module__') |         module = attrs.get("__module__") | ||||||
|         for exc in exceptions_to_merge: |         for exc in exceptions_to_merge: | ||||||
|             name = exc.__name__ |             name = exc.__name__ | ||||||
|             parents = tuple(getattr(base, name) for base in flattened_bases |             parents = tuple( | ||||||
|                             if hasattr(base, name)) or (exc,) |                 getattr(base, name) for base in flattened_bases if hasattr(base, name) | ||||||
|             # Create new exception and set to new_class |             ) or (exc,) | ||||||
|             exception = type(name, parents, {'__module__': module}) |  | ||||||
|  |             # Create a new exception and set it as an attribute on the new | ||||||
|  |             # class. | ||||||
|  |             exception = type(name, parents, {"__module__": module}) | ||||||
|             setattr(new_class, name, exception) |             setattr(new_class, name, exception) | ||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def get_auto_id_names(mcs, new_class): | ||||||
|  |         """Find a name for the automatic ID field for the given new class. | ||||||
|  |  | ||||||
|  |         Return a two-element tuple where the first item is the field name (i.e. | ||||||
|  |         the attribute name on the object) and the second element is the DB | ||||||
|  |         field name (i.e. the name of the key stored in MongoDB). | ||||||
|  |  | ||||||
|  |         Defaults to ('id', '_id'), or generates a non-clashing name in the form | ||||||
|  |         of ('auto_id_X', '_auto_id_X') if the default name is already taken. | ||||||
|  |         """ | ||||||
|  |         id_name, id_db_name = ("id", "_id") | ||||||
|  |         existing_fields = {field_name for field_name in new_class._fields} | ||||||
|  |         existing_db_fields = {v.db_field for v in new_class._fields.values()} | ||||||
|  |         if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|  |             return id_name, id_db_name | ||||||
|  |  | ||||||
|  |         id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) | ||||||
|  |         for i in itertools.count(): | ||||||
|  |             id_name = f"{id_basename}_{i}" | ||||||
|  |             id_db_name = f"{id_db_basename}_{i}" | ||||||
|  |             if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|  |                 return id_name, id_db_name | ||||||
|  |  | ||||||
|  |  | ||||||
| class MetaDict(dict): | class MetaDict(dict): | ||||||
|  |  | ||||||
|     """Custom dictionary for meta classes. |     """Custom dictionary for meta classes. | ||||||
|     Handles the merging of set indexes |     Handles the merging of set indexes | ||||||
|     """ |     """ | ||||||
|     _merge_options = ('indexes',) |  | ||||||
|  |     _merge_options = ("indexes",) | ||||||
|  |  | ||||||
|     def merge(self, new_options): |     def merge(self, new_options): | ||||||
|         for k, v in new_options.iteritems(): |         for k, v in new_options.items(): | ||||||
|             if k in self._merge_options: |             if k in self._merge_options: | ||||||
|                 self[k] = self.get(k, []) + v |                 self[k] = self.get(k, []) + v | ||||||
|             else: |             else: | ||||||
| @@ -426,6 +464,6 @@ class MetaDict(dict): | |||||||
|  |  | ||||||
|  |  | ||||||
| class BasesTuple(tuple): | class BasesTuple(tuple): | ||||||
|  |  | ||||||
|     """Special class to handle introspection of bases tuple in __new__""" |     """Special class to handle introspection of bases tuple in __new__""" | ||||||
|  |  | ||||||
|     pass |     pass | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | import re | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LazyRegexCompiler: | ||||||
|  |     """Descriptor to allow lazy compilation of regex""" | ||||||
|  |  | ||||||
|  |     def __init__(self, pattern, flags=0): | ||||||
|  |         self._pattern = pattern | ||||||
|  |         self._flags = flags | ||||||
|  |         self._compiled_regex = None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def compiled_regex(self): | ||||||
|  |         if self._compiled_regex is None: | ||||||
|  |             self._compiled_regex = re.compile(self._pattern, self._flags) | ||||||
|  |         return self._compiled_regex | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         return self.compiled_regex | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         raise AttributeError("Can not set attribute LazyRegexCompiler") | ||||||
| @@ -19,35 +19,44 @@ def _import_class(cls_name): | |||||||
|     if cls_name in _class_registry_cache: |     if cls_name in _class_registry_cache: | ||||||
|         return _class_registry_cache.get(cls_name) |         return _class_registry_cache.get(cls_name) | ||||||
|  |  | ||||||
|     doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', |     doc_classes = ( | ||||||
|                    'MapReduceDocument') |         "Document", | ||||||
|  |         "DynamicEmbeddedDocument", | ||||||
|  |         "EmbeddedDocument", | ||||||
|  |         "MapReduceDocument", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     # Field Classes |     # Field Classes | ||||||
|     if not _field_list_cache: |     if not _field_list_cache: | ||||||
|         from mongoengine.fields import __all__ as fields |         from mongoengine.fields import __all__ as fields | ||||||
|  |  | ||||||
|         _field_list_cache.extend(fields) |         _field_list_cache.extend(fields) | ||||||
|         from mongoengine.base.fields import __all__ as fields |         from mongoengine.base.fields import __all__ as fields | ||||||
|  |  | ||||||
|         _field_list_cache.extend(fields) |         _field_list_cache.extend(fields) | ||||||
|  |  | ||||||
|     field_classes = _field_list_cache |     field_classes = _field_list_cache | ||||||
|  |  | ||||||
|     queryset_classes = ('OperationError',) |     deref_classes = ("DeReference",) | ||||||
|     deref_classes = ('DeReference',) |  | ||||||
|  |  | ||||||
|     if cls_name in doc_classes: |     if cls_name == "BaseDocument": | ||||||
|  |         from mongoengine.base import document as module | ||||||
|  |  | ||||||
|  |         import_classes = ["BaseDocument"] | ||||||
|  |     elif cls_name in doc_classes: | ||||||
|         from mongoengine import document as module |         from mongoengine import document as module | ||||||
|  |  | ||||||
|         import_classes = doc_classes |         import_classes = doc_classes | ||||||
|     elif cls_name in field_classes: |     elif cls_name in field_classes: | ||||||
|         from mongoengine import fields as module |         from mongoengine import fields as module | ||||||
|  |  | ||||||
|         import_classes = field_classes |         import_classes = field_classes | ||||||
|     elif cls_name in queryset_classes: |  | ||||||
|         from mongoengine import queryset as module |  | ||||||
|         import_classes = queryset_classes |  | ||||||
|     elif cls_name in deref_classes: |     elif cls_name in deref_classes: | ||||||
|         from mongoengine import dereference as module |         from mongoengine import dereference as module | ||||||
|  |  | ||||||
|         import_classes = deref_classes |         import_classes = deref_classes | ||||||
|     else: |     else: | ||||||
|         raise ValueError('No import set for: ' % cls_name) |         raise ValueError("No import set for: %s" % cls_name) | ||||||
|  |  | ||||||
|     for cls in import_classes: |     for cls in import_classes: | ||||||
|         _class_registry_cache[cls] = getattr(module, cls) |         _class_registry_cache[cls] = getattr(module, cls) | ||||||
|   | |||||||
| @@ -1,146 +1,368 @@ | |||||||
| import pymongo | from pymongo import MongoClient, ReadPreference, uri_parser | ||||||
| from pymongo import MongoClient, MongoReplicaSetClient, uri_parser | from pymongo.database import _check_name | ||||||
|  |  | ||||||
|  | __all__ = [ | ||||||
|  |     "DEFAULT_CONNECTION_NAME", | ||||||
|  |     "DEFAULT_DATABASE_NAME", | ||||||
|  |     "ConnectionFailure", | ||||||
|  |     "connect", | ||||||
|  |     "disconnect", | ||||||
|  |     "disconnect_all", | ||||||
|  |     "get_connection", | ||||||
|  |     "get_db", | ||||||
|  |     "register_connection", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | DEFAULT_CONNECTION_NAME = "default" | ||||||
|            'DEFAULT_CONNECTION_NAME'] | DEFAULT_DATABASE_NAME = "test" | ||||||
|  | DEFAULT_HOST = "localhost" | ||||||
|  | DEFAULT_PORT = 27017 | ||||||
| DEFAULT_CONNECTION_NAME = 'default' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionError(Exception): |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_settings = {} | _connection_settings = {} | ||||||
| _connections = {} | _connections = {} | ||||||
| _dbs = {} | _dbs = {} | ||||||
|  |  | ||||||
|  | READ_PREFERENCE = ReadPreference.PRIMARY | ||||||
|  |  | ||||||
| def register_connection(alias, name=None, host=None, port=None, |  | ||||||
|                         read_preference=False, |  | ||||||
|                         username=None, password=None, authentication_source=None, |  | ||||||
|                         **kwargs): |  | ||||||
|     """Add a connection. |  | ||||||
|  |  | ||||||
|     :param alias: the name that will be used to refer to this connection |  | ||||||
|         throughout MongoEngine |  | ||||||
|     :param name: the name of the specific database to use |  | ||||||
|     :param host: the host name of the :program:`mongod` instance to connect to |  | ||||||
|     :param port: the port that the :program:`mongod` instance is running on |  | ||||||
|     :param read_preference: The read preference for the collection |  | ||||||
|        ** Added pymongo 2.1 |  | ||||||
|     :param username: username to authenticate with |  | ||||||
|     :param password: password to authenticate with |  | ||||||
|     :param authentication_source: database to authenticate against |  | ||||||
|     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver |  | ||||||
|  |  | ||||||
|  | class ConnectionFailure(Exception): | ||||||
|  |     """Error raised when the database connection can't be established or | ||||||
|  |     when a connection with a requested alias can't be retrieved. | ||||||
|     """ |     """ | ||||||
|     global _connection_settings |  | ||||||
|  |  | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _check_db_name(name): | ||||||
|  |     """Check if a database name is valid. | ||||||
|  |     This functionality is copied from pymongo Database class constructor. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(name, str): | ||||||
|  |         raise TypeError("name must be an instance of %s" % str) | ||||||
|  |     elif name != "$external": | ||||||
|  |         _check_name(name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_connection_settings( | ||||||
|  |     db=None, | ||||||
|  |     name=None, | ||||||
|  |     host=None, | ||||||
|  |     port=None, | ||||||
|  |     read_preference=READ_PREFERENCE, | ||||||
|  |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs, | ||||||
|  | ): | ||||||
|  |     """Get the connection settings as a dict | ||||||
|  |  | ||||||
|  |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|  |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|  |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|  |     : param read_preference: The read preference for the collection | ||||||
|  |     : param username: username to authenticate with | ||||||
|  |     : param password: password to authenticate with | ||||||
|  |     : param authentication_source: database to authenticate against | ||||||
|  |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|  |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|  |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|  |     : param is_mock: explicitly use mongomock for this connection | ||||||
|  |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|  |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|  |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|  |         for pymongo's `MongoClient` for a full list. | ||||||
|  |     """ | ||||||
|     conn_settings = { |     conn_settings = { | ||||||
|         'name': name or 'test', |         "name": name or db or DEFAULT_DATABASE_NAME, | ||||||
|         'host': host or 'localhost', |         "host": host or DEFAULT_HOST, | ||||||
|         'port': port or 27017, |         "port": port or DEFAULT_PORT, | ||||||
|         'read_preference': read_preference, |         "read_preference": read_preference, | ||||||
|         'username': username, |         "username": username, | ||||||
|         'password': password, |         "password": password, | ||||||
|         'authentication_source': authentication_source |         "authentication_source": authentication_source, | ||||||
|  |         "authentication_mechanism": authentication_mechanism, | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     # Handle uri style connections |     _check_db_name(conn_settings["name"]) | ||||||
|     if "://" in conn_settings['host']: |     conn_host = conn_settings["host"] | ||||||
|         uri_dict = uri_parser.parse_uri(conn_settings['host']) |  | ||||||
|         conn_settings.update({ |     # Host can be a list or a string, so if string, force to a list. | ||||||
|             'name': uri_dict.get('database') or name, |     if isinstance(conn_host, str): | ||||||
|             'username': uri_dict.get('username'), |         conn_host = [conn_host] | ||||||
|             'password': uri_dict.get('password'), |  | ||||||
|             'read_preference': read_preference, |     resolved_hosts = [] | ||||||
|         }) |     for entity in conn_host: | ||||||
|         if "replicaSet" in conn_settings['host']: |  | ||||||
|             conn_settings['replicaSet'] = True |         # Handle Mongomock | ||||||
|  |         if entity.startswith("mongomock://"): | ||||||
|  |             conn_settings["is_mock"] = True | ||||||
|  |             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||||
|  |             new_entity = entity.replace("mongomock://", "mongodb://", 1) | ||||||
|  |             resolved_hosts.append(new_entity) | ||||||
|  |  | ||||||
|  |             uri_dict = uri_parser.parse_uri(new_entity) | ||||||
|  |  | ||||||
|  |             database = uri_dict.get("database") | ||||||
|  |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|  |         # Handle URI style connections, only updating connection params which | ||||||
|  |         # were explicitly specified in the URI. | ||||||
|  |         elif "://" in entity: | ||||||
|  |             uri_dict = uri_parser.parse_uri(entity) | ||||||
|  |             resolved_hosts.append(entity) | ||||||
|  |  | ||||||
|  |             database = uri_dict.get("database") | ||||||
|  |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|  |             for param in ("read_preference", "username", "password"): | ||||||
|  |                 if uri_dict.get(param): | ||||||
|  |                     conn_settings[param] = uri_dict[param] | ||||||
|  |  | ||||||
|  |             uri_options = uri_dict["options"] | ||||||
|  |             if "replicaset" in uri_options: | ||||||
|  |                 conn_settings["replicaSet"] = uri_options["replicaset"] | ||||||
|  |             if "authsource" in uri_options: | ||||||
|  |                 conn_settings["authentication_source"] = uri_options["authsource"] | ||||||
|  |             if "authmechanism" in uri_options: | ||||||
|  |                 conn_settings["authentication_mechanism"] = uri_options["authmechanism"] | ||||||
|  |             if "readpreference" in uri_options: | ||||||
|  |                 read_preferences = ( | ||||||
|  |                     ReadPreference.NEAREST, | ||||||
|  |                     ReadPreference.PRIMARY, | ||||||
|  |                     ReadPreference.PRIMARY_PREFERRED, | ||||||
|  |                     ReadPreference.SECONDARY, | ||||||
|  |                     ReadPreference.SECONDARY_PREFERRED, | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |                 # Starting with PyMongo v3.5, the "readpreference" option is | ||||||
|  |                 # returned as a string (e.g. "secondaryPreferred") and not an | ||||||
|  |                 # int (e.g. 3). | ||||||
|  |                 # TODO simplify the code below once we drop support for | ||||||
|  |                 # PyMongo v3.4. | ||||||
|  |                 read_pf_mode = uri_options["readpreference"] | ||||||
|  |                 if isinstance(read_pf_mode, str): | ||||||
|  |                     read_pf_mode = read_pf_mode.lower() | ||||||
|  |                 for preference in read_preferences: | ||||||
|  |                     if ( | ||||||
|  |                         preference.name.lower() == read_pf_mode | ||||||
|  |                         or preference.mode == read_pf_mode | ||||||
|  |                     ): | ||||||
|  |                         conn_settings["read_preference"] = preference | ||||||
|  |                         break | ||||||
|  |         else: | ||||||
|  |             resolved_hosts.append(entity) | ||||||
|  |     conn_settings["host"] = resolved_hosts | ||||||
|  |  | ||||||
|     # Deprecated parameters that should not be passed on |     # Deprecated parameters that should not be passed on | ||||||
|     kwargs.pop('slaves', None) |     kwargs.pop("slaves", None) | ||||||
|     kwargs.pop('is_slave', None) |     kwargs.pop("is_slave", None) | ||||||
|  |  | ||||||
|     conn_settings.update(kwargs) |     conn_settings.update(kwargs) | ||||||
|  |     return conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def register_connection( | ||||||
|  |     alias, | ||||||
|  |     db=None, | ||||||
|  |     name=None, | ||||||
|  |     host=None, | ||||||
|  |     port=None, | ||||||
|  |     read_preference=READ_PREFERENCE, | ||||||
|  |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs, | ||||||
|  | ): | ||||||
|  |     """Register the connection settings. | ||||||
|  |  | ||||||
|  |     : param alias: the name that will be used to refer to this connection | ||||||
|  |         throughout MongoEngine | ||||||
|  |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|  |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|  |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|  |     : param read_preference: The read preference for the collection | ||||||
|  |     : param username: username to authenticate with | ||||||
|  |     : param password: password to authenticate with | ||||||
|  |     : param authentication_source: database to authenticate against | ||||||
|  |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|  |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|  |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|  |     : param is_mock: explicitly use mongomock for this connection | ||||||
|  |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|  |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|  |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|  |         for pymongo's `MongoClient` for a full list. | ||||||
|  |     """ | ||||||
|  |     conn_settings = _get_connection_settings( | ||||||
|  |         db=db, | ||||||
|  |         name=name, | ||||||
|  |         host=host, | ||||||
|  |         port=port, | ||||||
|  |         read_preference=read_preference, | ||||||
|  |         username=username, | ||||||
|  |         password=password, | ||||||
|  |         authentication_source=authentication_source, | ||||||
|  |         authentication_mechanism=authentication_mechanism, | ||||||
|  |         **kwargs, | ||||||
|  |     ) | ||||||
|     _connection_settings[alias] = conn_settings |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|     global _connections |     """Close the connection with a given alias.""" | ||||||
|     global _dbs |     from mongoengine import Document | ||||||
|  |     from mongoengine.base.common import _get_documents_by_db | ||||||
|  |  | ||||||
|     if alias in _connections: |     if alias in _connections: | ||||||
|         get_connection(alias=alias).disconnect() |         get_connection(alias=alias).close() | ||||||
|         del _connections[alias] |         del _connections[alias] | ||||||
|  |  | ||||||
|     if alias in _dbs: |     if alias in _dbs: | ||||||
|  |         # Detach all cached collections in Documents | ||||||
|  |         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): | ||||||
|  |             if issubclass(doc_cls, Document):  # Skip EmbeddedDocument | ||||||
|  |                 doc_cls._disconnect() | ||||||
|  |  | ||||||
|         del _dbs[alias] |         del _dbs[alias] | ||||||
|  |  | ||||||
|  |     if alias in _connection_settings: | ||||||
|  |         del _connection_settings[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def disconnect_all(): | ||||||
|  |     """Close all registered database.""" | ||||||
|  |     for alias in list(_connections.keys()): | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     global _connections |     """Return a connection with a given alias.""" | ||||||
|  |  | ||||||
|     # Connect to the database if not already connected |     # Connect to the database if not already connected | ||||||
|     if reconnect: |     if reconnect: | ||||||
|         disconnect(alias) |         disconnect(alias) | ||||||
|  |  | ||||||
|     if alias not in _connections: |     # If the requested alias already exists in the _connections list, return | ||||||
|         if alias not in _connection_settings: |     # it immediately. | ||||||
|  |     if alias in _connections: | ||||||
|  |         return _connections[alias] | ||||||
|  |  | ||||||
|  |     # Validate that the requested alias exists in the _connection_settings. | ||||||
|  |     # Raise ConnectionFailure if it doesn't. | ||||||
|  |     if alias not in _connection_settings: | ||||||
|  |         if alias == DEFAULT_CONNECTION_NAME: | ||||||
|  |             msg = "You have not defined a default connection" | ||||||
|  |         else: | ||||||
|             msg = 'Connection with alias "%s" has not been defined' % alias |             msg = 'Connection with alias "%s" has not been defined' % alias | ||||||
|             if alias == DEFAULT_CONNECTION_NAME: |         raise ConnectionFailure(msg) | ||||||
|                 msg = 'You have not defined a default connection' |  | ||||||
|             raise ConnectionError(msg) |  | ||||||
|         conn_settings = _connection_settings[alias].copy() |  | ||||||
|  |  | ||||||
|         conn_settings.pop('name', None) |     def _clean_settings(settings_dict): | ||||||
|         conn_settings.pop('username', None) |         irrelevant_fields_set = { | ||||||
|         conn_settings.pop('password', None) |             "name", | ||||||
|         conn_settings.pop('authentication_source', None) |             "username", | ||||||
|  |             "password", | ||||||
|  |             "authentication_source", | ||||||
|  |             "authentication_mechanism", | ||||||
|  |         } | ||||||
|  |         return { | ||||||
|  |             k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set | ||||||
|  |         } | ||||||
|  |  | ||||||
|         connection_class = MongoClient |     raw_conn_settings = _connection_settings[alias].copy() | ||||||
|         if 'replicaSet' in conn_settings: |  | ||||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) |  | ||||||
|             # Discard port since it can't be used on MongoReplicaSetClient |  | ||||||
|             conn_settings.pop('port', None) |  | ||||||
|             # Discard replicaSet if not base string |  | ||||||
|             if not isinstance(conn_settings['replicaSet'], basestring): |  | ||||||
|                 conn_settings.pop('replicaSet', None) |  | ||||||
|             connection_class = MongoReplicaSetClient |  | ||||||
|  |  | ||||||
|  |     # Retrieve a copy of the connection settings associated with the requested | ||||||
|  |     # alias and remove the database name and authentication info (we don't | ||||||
|  |     # care about them at this point). | ||||||
|  |     conn_settings = _clean_settings(raw_conn_settings) | ||||||
|  |  | ||||||
|  |     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||||
|  |     is_mock = conn_settings.pop("is_mock", False) | ||||||
|  |     if is_mock: | ||||||
|         try: |         try: | ||||||
|             connection = None |             import mongomock | ||||||
|             # check for shared connections |         except ImportError: | ||||||
|             connection_settings_iterator = ((db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems()) |             raise RuntimeError("You need mongomock installed to mock MongoEngine.") | ||||||
|             for db_alias, connection_settings in connection_settings_iterator: |         connection_class = mongomock.MongoClient | ||||||
|                 connection_settings.pop('name', None) |     else: | ||||||
|                 connection_settings.pop('username', None) |         connection_class = MongoClient | ||||||
|                 connection_settings.pop('password', None) |  | ||||||
|                 if conn_settings == connection_settings and _connections.get(db_alias, None): |  | ||||||
|                     connection = _connections[db_alias] |  | ||||||
|                     break |  | ||||||
|  |  | ||||||
|             _connections[alias] = connection if connection else connection_class(**conn_settings) |     # Re-use existing connection if one is suitable. | ||||||
|         except Exception, e: |     existing_connection = _find_existing_connection(raw_conn_settings) | ||||||
|             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) |     if existing_connection: | ||||||
|  |         connection = existing_connection | ||||||
|  |     else: | ||||||
|  |         connection = _create_connection( | ||||||
|  |             alias=alias, connection_class=connection_class, **conn_settings | ||||||
|  |         ) | ||||||
|  |     _connections[alias] = connection | ||||||
|     return _connections[alias] |     return _connections[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _create_connection(alias, connection_class, **connection_settings): | ||||||
|  |     """ | ||||||
|  |     Create the new connection for this alias. Raise | ||||||
|  |     ConnectionFailure if it can't be established. | ||||||
|  |     """ | ||||||
|  |     try: | ||||||
|  |         return connection_class(**connection_settings) | ||||||
|  |     except Exception as e: | ||||||
|  |         raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _find_existing_connection(connection_settings): | ||||||
|  |     """ | ||||||
|  |     Check if an existing connection could be reused | ||||||
|  |  | ||||||
|  |     Iterate over all of the connection settings and if an existing connection | ||||||
|  |     with the same parameters is suitable, return it | ||||||
|  |  | ||||||
|  |     :param connection_settings: the settings of the new connection | ||||||
|  |     :return: An existing connection or None | ||||||
|  |     """ | ||||||
|  |     connection_settings_bis = ( | ||||||
|  |         (db_alias, settings.copy()) | ||||||
|  |         for db_alias, settings in _connection_settings.items() | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def _clean_settings(settings_dict): | ||||||
|  |         # Only remove the name but it's important to | ||||||
|  |         # keep the username/password/authentication_source/authentication_mechanism | ||||||
|  |         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) | ||||||
|  |         return {k: v for k, v in settings_dict.items() if k != "name"} | ||||||
|  |  | ||||||
|  |     cleaned_conn_settings = _clean_settings(connection_settings) | ||||||
|  |     for db_alias, connection_settings in connection_settings_bis: | ||||||
|  |         db_conn_settings = _clean_settings(connection_settings) | ||||||
|  |         if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias): | ||||||
|  |             return _connections[db_alias] | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     global _dbs |  | ||||||
|     if reconnect: |     if reconnect: | ||||||
|         disconnect(alias) |         disconnect(alias) | ||||||
|  |  | ||||||
|     if alias not in _dbs: |     if alias not in _dbs: | ||||||
|         conn = get_connection(alias) |         conn = get_connection(alias) | ||||||
|         conn_settings = _connection_settings[alias] |         conn_settings = _connection_settings[alias] | ||||||
|         db = conn[conn_settings['name']] |         db = conn[conn_settings["name"]] | ||||||
|  |         auth_kwargs = {"source": conn_settings["authentication_source"]} | ||||||
|  |         if conn_settings["authentication_mechanism"] is not None: | ||||||
|  |             auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] | ||||||
|         # Authenticate if necessary |         # Authenticate if necessary | ||||||
|         if conn_settings['username'] and conn_settings['password']: |         if conn_settings["username"] and ( | ||||||
|             db.authenticate(conn_settings['username'], |             conn_settings["password"] | ||||||
|                             conn_settings['password'], |             or conn_settings["authentication_mechanism"] == "MONGODB-X509" | ||||||
|                             source=conn_settings['authentication_source']) |         ): | ||||||
|  |             db.authenticate( | ||||||
|  |                 conn_settings["username"], conn_settings["password"], **auth_kwargs | ||||||
|  |             ) | ||||||
|         _dbs[alias] = db |         _dbs[alias] = db | ||||||
|     return _dbs[alias] |     return _dbs[alias] | ||||||
|  |  | ||||||
| @@ -152,13 +374,26 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | |||||||
|     running on the default port on localhost. If authentication is needed, |     running on the default port on localhost. If authentication is needed, | ||||||
|     provide username and password arguments as well. |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|     Multiple databases are supported by using aliases.  Provide a separate |     Multiple databases are supported by using aliases. Provide a separate | ||||||
|     `alias` to connect to a different instance of :program:`mongod`. |     `alias` to connect to a different instance of: program: `mongod`. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.6 - added multiple database support. |     In order to replace a connection identified by a given alias, you'll | ||||||
|  |     need to call ``disconnect`` first | ||||||
|  |  | ||||||
|  |     See the docstring for `register_connection` for more details about all | ||||||
|  |     supported kwargs. | ||||||
|     """ |     """ | ||||||
|     global _connections |     if alias in _connections: | ||||||
|     if alias not in _connections: |         prev_conn_setting = _connection_settings[alias] | ||||||
|  |         new_conn_settings = _get_connection_settings(db, **kwargs) | ||||||
|  |  | ||||||
|  |         if new_conn_settings != prev_conn_setting: | ||||||
|  |             err_msg = ( | ||||||
|  |                 "A different connection with alias `{}` was already " | ||||||
|  |                 "registered. Use disconnect() first" | ||||||
|  |             ).format(alias) | ||||||
|  |             raise ConnectionFailure(err_msg) | ||||||
|  |     else: | ||||||
|         register_connection(alias, db, **kwargs) |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|     return get_connection(alias) |     return get_connection(alias) | ||||||
|   | |||||||
| @@ -1,13 +1,25 @@ | |||||||
|  | from contextlib import contextmanager | ||||||
|  |  | ||||||
|  | from pymongo.read_concern import ReadConcern | ||||||
|  | from pymongo.write_concern import WriteConcern | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
|  | from mongoengine.pymongo_support import count_documents | ||||||
|  |  | ||||||
|  | __all__ = ( | ||||||
|  |     "switch_db", | ||||||
|  |     "switch_collection", | ||||||
|  |     "no_dereference", | ||||||
|  |     "no_sub_classes", | ||||||
|  |     "query_counter", | ||||||
|  |     "set_write_concern", | ||||||
|  |     "set_read_write_concern", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ("switch_db", "switch_collection", "no_dereference", | class switch_db: | ||||||
|            "no_sub_classes", "query_counter") |     """switch_db alias context manager. | ||||||
|  |  | ||||||
|  |  | ||||||
| class switch_db(object): |  | ||||||
|     """ switch_db alias context manager. |  | ||||||
|  |  | ||||||
|     Example :: |     Example :: | ||||||
|  |  | ||||||
| @@ -18,15 +30,14 @@ class switch_db(object): | |||||||
|         class Group(Document): |         class Group(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         Group(name="test").save()  # Saves in the default db |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|         with switch_db(Group, 'testdb-1') as Group: |         with switch_db(Group, 'testdb-1') as Group: | ||||||
|             Group(name="hello testdb!").save()  # Saves in testdb-1 |             Group(name='hello testdb!').save()  # Saves in testdb-1 | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, cls, db_alias): |     def __init__(self, cls, db_alias): | ||||||
|         """ Construct the switch_db context manager |         """Construct the switch_db context manager | ||||||
|  |  | ||||||
|         :param cls: the class to change the registered db |         :param cls: the class to change the registered db | ||||||
|         :param db_alias: the name of the specific database to use |         :param db_alias: the name of the specific database to use | ||||||
| @@ -37,34 +48,33 @@ class switch_db(object): | |||||||
|         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) |         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """ change the db_alias and clear the cached collection """ |         """Change the db_alias and clear the cached collection.""" | ||||||
|         self.cls._meta["db_alias"] = self.db_alias |         self.cls._meta["db_alias"] = self.db_alias | ||||||
|         self.cls._collection = None |         self.cls._collection = None | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """ Reset the db_alias and collection """ |         """Reset the db_alias and collection.""" | ||||||
|         self.cls._meta["db_alias"] = self.ori_db_alias |         self.cls._meta["db_alias"] = self.ori_db_alias | ||||||
|         self.cls._collection = self.collection |         self.cls._collection = self.collection | ||||||
|  |  | ||||||
|  |  | ||||||
| class switch_collection(object): | class switch_collection: | ||||||
|     """ switch_collection alias context manager. |     """switch_collection alias context manager. | ||||||
|  |  | ||||||
|     Example :: |     Example :: | ||||||
|  |  | ||||||
|         class Group(Document): |         class Group(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         Group(name="test").save()  # Saves in the default db |         Group(name='test').save()  # Saves in the default db | ||||||
|  |  | ||||||
|         with switch_collection(Group, 'group1') as Group: |         with switch_collection(Group, 'group1') as Group: | ||||||
|             Group(name="hello testdb!").save()  # Saves in group1 collection |             Group(name='hello testdb!').save()  # Saves in group1 collection | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, cls, collection_name): |     def __init__(self, cls, collection_name): | ||||||
|         """ Construct the switch_collection context manager |         """Construct the switch_collection context manager. | ||||||
|  |  | ||||||
|         :param cls: the class to change the registered db |         :param cls: the class to change the registered db | ||||||
|         :param collection_name: the name of the collection to use |         :param collection_name: the name of the collection to use | ||||||
| @@ -75,7 +85,7 @@ class switch_collection(object): | |||||||
|         self.collection_name = collection_name |         self.collection_name = collection_name | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """ change the _get_collection_name and clear the cached collection """ |         """Change the _get_collection_name and clear the cached collection.""" | ||||||
|  |  | ||||||
|         @classmethod |         @classmethod | ||||||
|         def _get_collection_name(cls): |         def _get_collection_name(cls): | ||||||
| @@ -86,136 +96,182 @@ class switch_collection(object): | |||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """ Reset the collection """ |         """Reset the collection.""" | ||||||
|         self.cls._collection = self.ori_collection |         self.cls._collection = self.ori_collection | ||||||
|         self.cls._get_collection_name = self.ori_get_collection_name |         self.cls._get_collection_name = self.ori_get_collection_name | ||||||
|  |  | ||||||
|  |  | ||||||
| class no_dereference(object): | class no_dereference: | ||||||
|     """ no_dereference context manager. |     """no_dereference context manager. | ||||||
|  |  | ||||||
|     Turns off all dereferencing in Documents for the duration of the context |     Turns off all dereferencing in Documents for the duration of the context | ||||||
|     manager:: |     manager:: | ||||||
|  |  | ||||||
|         with no_dereference(Group) as Group: |         with no_dereference(Group) as Group: | ||||||
|             Group.objects.find() |             Group.objects.find() | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, cls): |     def __init__(self, cls): | ||||||
|         """ Construct the no_dereference context manager. |         """Construct the no_dereference context manager. | ||||||
|  |  | ||||||
|         :param cls: the class to turn dereferencing off on |         :param cls: the class to turn dereferencing off on | ||||||
|         """ |         """ | ||||||
|         self.cls = cls |         self.cls = cls | ||||||
|  |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class("ReferenceField") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|         ComplexBaseField = _import_class('ComplexBaseField') |         ComplexBaseField = _import_class("ComplexBaseField") | ||||||
|  |  | ||||||
|         self.deref_fields = [k for k, v in self.cls._fields.iteritems() |         self.deref_fields = [ | ||||||
|                              if isinstance(v, (ReferenceField, |             k | ||||||
|                                                GenericReferenceField, |             for k, v in self.cls._fields.items() | ||||||
|                                                ComplexBaseField))] |             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||||
|  |         ] | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """ change the objects default and _auto_dereference values""" |         """Change the objects default and _auto_dereference values.""" | ||||||
|         for field in self.deref_fields: |         for field in self.deref_fields: | ||||||
|             self.cls._fields[field]._auto_dereference = False |             self.cls._fields[field]._auto_dereference = False | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """ Reset the default and _auto_dereference values""" |         """Reset the default and _auto_dereference values.""" | ||||||
|         for field in self.deref_fields: |         for field in self.deref_fields: | ||||||
|             self.cls._fields[field]._auto_dereference = True |             self.cls._fields[field]._auto_dereference = True | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|  |  | ||||||
| class no_sub_classes(object): | class no_sub_classes: | ||||||
|     """ no_sub_classes context manager. |     """no_sub_classes context manager. | ||||||
|  |  | ||||||
|     Only returns instances of this class and no sub (inherited) classes:: |     Only returns instances of this class and no sub (inherited) classes:: | ||||||
|  |  | ||||||
|         with no_sub_classes(Group) as Group: |         with no_sub_classes(Group) as Group: | ||||||
|             Group.objects.find() |             Group.objects.find() | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, cls): |     def __init__(self, cls): | ||||||
|         """ Construct the no_sub_classes context manager. |         """Construct the no_sub_classes context manager. | ||||||
|  |  | ||||||
|         :param cls: the class to turn querying sub classes on |         :param cls: the class to turn querying sub classes on | ||||||
|         """ |         """ | ||||||
|         self.cls = cls |         self.cls = cls | ||||||
|  |         self.cls_initial_subclasses = None | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """ change the objects default and _auto_dereference values""" |         """Change the objects default and _auto_dereference values.""" | ||||||
|         self.cls._all_subclasses = self.cls._subclasses |         self.cls_initial_subclasses = self.cls._subclasses | ||||||
|         self.cls._subclasses = (self.cls,) |         self.cls._subclasses = (self.cls._class_name,) | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """ Reset the default and _auto_dereference values""" |         """Reset the default and _auto_dereference values.""" | ||||||
|         self.cls._subclasses = self.cls._all_subclasses |         self.cls._subclasses = self.cls_initial_subclasses | ||||||
|         delattr(self.cls, '_all_subclasses') |  | ||||||
|         return self.cls |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): | class query_counter: | ||||||
|     """ Query_counter context manager to get the number of queries. """ |     """Query_counter context manager to get the number of queries. | ||||||
|  |     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||||
|  |     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||||
|  |  | ||||||
|     def __init__(self): |     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes | ||||||
|         """ Construct the query_counter. """ |     can interfere with it | ||||||
|         self.counter = 0 |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def __enter__(self): |     Be aware that: | ||||||
|         """ On every with block we need to drop the profile collection. """ |     - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of | ||||||
|  |         documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) | ||||||
|  |     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, alias=DEFAULT_CONNECTION_NAME): | ||||||
|  |         """Construct the query_counter""" | ||||||
|  |         self.db = get_db(alias=alias) | ||||||
|  |         self.initial_profiling_level = None | ||||||
|  |         self._ctx_query_counter = 0  # number of queries issued by the context | ||||||
|  |  | ||||||
|  |         self._ignored_query = { | ||||||
|  |             "ns": {"$ne": "%s.system.indexes" % self.db.name}, | ||||||
|  |             "op": {"$ne": "killcursors"},  # MONGODB < 3.2 | ||||||
|  |             "command.killCursors": {"$exists": False},  # MONGODB >= 3.2 | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def _turn_on_profiling(self): | ||||||
|  |         self.initial_profiling_level = self.db.profiling_level() | ||||||
|         self.db.set_profiling_level(0) |         self.db.set_profiling_level(0) | ||||||
|         self.db.system.profile.drop() |         self.db.system.profile.drop() | ||||||
|         self.db.set_profiling_level(2) |         self.db.set_profiling_level(2) | ||||||
|  |  | ||||||
|  |     def _resets_profiling(self): | ||||||
|  |         self.db.set_profiling_level(self.initial_profiling_level) | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         self._turn_on_profiling() | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """ Reset the profiling level. """ |         self._resets_profiling() | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|  |  | ||||||
|     def __eq__(self, value): |     def __eq__(self, value): | ||||||
|         """ == Compare querycounter. """ |  | ||||||
|         counter = self._get_count() |         counter = self._get_count() | ||||||
|         return value == counter |         return value == counter | ||||||
|  |  | ||||||
|     def __ne__(self, value): |     def __ne__(self, value): | ||||||
|         """ != Compare querycounter. """ |  | ||||||
|         return not self.__eq__(value) |         return not self.__eq__(value) | ||||||
|  |  | ||||||
|     def __lt__(self, value): |     def __lt__(self, value): | ||||||
|         """ < Compare querycounter. """ |  | ||||||
|         return self._get_count() < value |         return self._get_count() < value | ||||||
|  |  | ||||||
|     def __le__(self, value): |     def __le__(self, value): | ||||||
|         """ <= Compare querycounter. """ |  | ||||||
|         return self._get_count() <= value |         return self._get_count() <= value | ||||||
|  |  | ||||||
|     def __gt__(self, value): |     def __gt__(self, value): | ||||||
|         """ > Compare querycounter. """ |  | ||||||
|         return self._get_count() > value |         return self._get_count() > value | ||||||
|  |  | ||||||
|     def __ge__(self, value): |     def __ge__(self, value): | ||||||
|         """ >= Compare querycounter. """ |  | ||||||
|         return self._get_count() >= value |         return self._get_count() >= value | ||||||
|  |  | ||||||
|     def __int__(self): |     def __int__(self): | ||||||
|         """ int representation. """ |  | ||||||
|         return self._get_count() |         return self._get_count() | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """ repr query_counter as the number of queries. """ |         """repr query_counter as the number of queries.""" | ||||||
|         return u"%s" % self._get_count() |         return "%s" % self._get_count() | ||||||
|  |  | ||||||
|     def _get_count(self): |     def _get_count(self): | ||||||
|         """ Get the number of queries. """ |         """Get the number of queries by counting the current number of entries in db.system.profile | ||||||
|         ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} |         and substracting the queries issued by this context. In fact everytime this is called, 1 query is | ||||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter |         issued so we need to balance that | ||||||
|         self.counter += 1 |         """ | ||||||
|  |         count = ( | ||||||
|  |             count_documents(self.db.system.profile, self._ignored_query) | ||||||
|  |             - self._ctx_query_counter | ||||||
|  |         ) | ||||||
|  |         self._ctx_query_counter += ( | ||||||
|  |             1  # Account for the query we just issued to gather the information | ||||||
|  |         ) | ||||||
|         return count |         return count | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @contextmanager | ||||||
|  | def set_write_concern(collection, write_concerns): | ||||||
|  |     combined_concerns = dict(collection.write_concern.document.items()) | ||||||
|  |     combined_concerns.update(write_concerns) | ||||||
|  |     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @contextmanager | ||||||
|  | def set_read_write_concern(collection, write_concerns, read_concerns): | ||||||
|  |     combined_write_concerns = dict(collection.write_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if write_concerns is not None: | ||||||
|  |         combined_write_concerns.update(write_concerns) | ||||||
|  |  | ||||||
|  |     combined_read_concerns = dict(collection.read_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if read_concerns is not None: | ||||||
|  |         combined_read_concerns.update(read_concerns) | ||||||
|  |  | ||||||
|  |     yield collection.with_options( | ||||||
|  |         write_concern=WriteConcern(**combined_write_concerns), | ||||||
|  |         read_concern=ReadConcern(**combined_read_concerns), | ||||||
|  |     ) | ||||||
|   | |||||||
| @@ -1,17 +1,25 @@ | |||||||
| from bson import DBRef, SON | from bson import SON, DBRef | ||||||
|  |  | ||||||
| from base import ( | from mongoengine.base import ( | ||||||
|     BaseDict, BaseList, EmbeddedDocumentList, |     BaseDict, | ||||||
|     TopLevelDocumentMetaclass, get_document |     BaseList, | ||||||
|  |     EmbeddedDocumentList, | ||||||
|  |     TopLevelDocumentMetaclass, | ||||||
|  |     get_document, | ||||||
| ) | ) | ||||||
| from fields import (ReferenceField, ListField, DictField, MapField) | from mongoengine.base.datastructures import LazyReference | ||||||
| from connection import get_db | from mongoengine.connection import get_db | ||||||
| from queryset import QuerySet | from mongoengine.document import Document, EmbeddedDocument | ||||||
| from document import Document, EmbeddedDocument | from mongoengine.fields import ( | ||||||
|  |     DictField, | ||||||
|  |     ListField, | ||||||
|  |     MapField, | ||||||
|  |     ReferenceField, | ||||||
|  | ) | ||||||
|  | from mongoengine.queryset import QuerySet | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeReference(object): | class DeReference: | ||||||
|  |  | ||||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): |     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||||
|         """ |         """ | ||||||
|         Cheaply dereferences the items to a set depth. |         Cheaply dereferences the items to a set depth. | ||||||
| @@ -25,7 +33,7 @@ class DeReference(object): | |||||||
|             :class:`~mongoengine.base.ComplexBaseField` |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|         :param get: A boolean determining if being called by __get__ |         :param get: A boolean determining if being called by __get__ | ||||||
|         """ |         """ | ||||||
|         if items is None or isinstance(items, basestring): |         if items is None or isinstance(items, str): | ||||||
|             return items |             return items | ||||||
|  |  | ||||||
|         # cheapest way to convert a queryset to a list |         # cheapest way to convert a queryset to a list | ||||||
| @@ -36,43 +44,59 @@ class DeReference(object): | |||||||
|         self.max_depth = max_depth |         self.max_depth = max_depth | ||||||
|         doc_type = None |         doc_type = None | ||||||
|  |  | ||||||
|         if instance and isinstance(instance, (Document, EmbeddedDocument, |         if instance and isinstance( | ||||||
|                                               TopLevelDocumentMetaclass)): |             instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) | ||||||
|  |         ): | ||||||
|             doc_type = instance._fields.get(name) |             doc_type = instance._fields.get(name) | ||||||
|             while hasattr(doc_type, 'field'): |             while hasattr(doc_type, "field"): | ||||||
|                 doc_type = doc_type.field |                 doc_type = doc_type.field | ||||||
|  |  | ||||||
|             if isinstance(doc_type, ReferenceField): |             if isinstance(doc_type, ReferenceField): | ||||||
|                 field = doc_type |                 field = doc_type | ||||||
|                 doc_type = doc_type.document_type |                 doc_type = doc_type.document_type | ||||||
|                 is_list = not hasattr(items, 'items') |                 is_list = not hasattr(items, "items") | ||||||
|  |  | ||||||
|                 if is_list and all([i.__class__ == doc_type for i in items]): |                 if is_list and all(i.__class__ == doc_type for i in items): | ||||||
|                     return items |                     return items | ||||||
|                 elif not is_list and all([i.__class__ == doc_type |                 elif not is_list and all( | ||||||
|                                          for i in items.values()]): |                     i.__class__ == doc_type for i in items.values() | ||||||
|  |                 ): | ||||||
|                     return items |                     return items | ||||||
|                 elif not field.dbref: |                 elif not field.dbref: | ||||||
|                     if not hasattr(items, 'items'): |                     # We must turn the ObjectIds into DBRefs | ||||||
|  |  | ||||||
|                         def _get_items(items): |                     # Recursively dig into the sub items of a list/dict | ||||||
|                             new_items = [] |                     # to turn the ObjectIds into DBRefs | ||||||
|                             for v in items: |                     def _get_items_from_list(items): | ||||||
|                                 if isinstance(v, list): |                         new_items = [] | ||||||
|                                     new_items.append(_get_items(v)) |                         for v in items: | ||||||
|                                 elif not isinstance(v, (DBRef, Document)): |                             value = v | ||||||
|                                     new_items.append(field.to_python(v)) |                             if isinstance(v, dict): | ||||||
|                                 else: |                                 value = _get_items_from_dict(v) | ||||||
|                                     new_items.append(v) |                             elif isinstance(v, list): | ||||||
|                             return new_items |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items.append(value) | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|                         items = _get_items(items) |                     def _get_items_from_dict(items): | ||||||
|  |                         new_items = {} | ||||||
|  |                         for k, v in items.items(): | ||||||
|  |                             value = v | ||||||
|  |                             if isinstance(v, list): | ||||||
|  |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif isinstance(v, dict): | ||||||
|  |                                 value = _get_items_from_dict(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items[k] = value | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|  |                     if not hasattr(items, "items"): | ||||||
|  |                         items = _get_items_from_list(items) | ||||||
|                     else: |                     else: | ||||||
|                         items = dict([ |                         items = _get_items_from_dict(items) | ||||||
|                             (k, field.to_python(v)) |  | ||||||
|                             if not isinstance(v, (DBRef, Document)) else (k, v) |  | ||||||
|                             for k, v in items.iteritems()] |  | ||||||
|                         ) |  | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
| @@ -90,71 +114,97 @@ class DeReference(object): | |||||||
|             return reference_map |             return reference_map | ||||||
|  |  | ||||||
|         # Determine the iterator to use |         # Determine the iterator to use | ||||||
|         if not hasattr(items, 'items'): |         if isinstance(items, dict): | ||||||
|             iterator = enumerate(items) |             iterator = items.values() | ||||||
|         else: |         else: | ||||||
|             iterator = items.iteritems() |             iterator = items | ||||||
|  |  | ||||||
|         # Recursively find dbreferences |         # Recursively find dbreferences | ||||||
|         depth += 1 |         depth += 1 | ||||||
|         for k, item in iterator: |         for item in iterator: | ||||||
|             if isinstance(item, (Document, EmbeddedDocument)): |             if isinstance(item, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in item._fields.iteritems(): |                 for field_name, field in item._fields.items(): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, (DBRef)): |                     if isinstance(v, LazyReference): | ||||||
|                         reference_map.setdefault(field.document_type, []).append(v.id) |                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                         continue | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) |                     elif isinstance(v, DBRef): | ||||||
|  |                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||||
|  |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|  |                         reference_map.setdefault(get_document(v["_cls"]), set()).add( | ||||||
|  |                             v["_ref"].id | ||||||
|  |                         ) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr( | ||||||
|  |                             getattr(field, "field", None), "document_type", None | ||||||
|  |                         ) | ||||||
|                         references = self._find_references(v, depth) |                         references = self._find_references(v, depth) | ||||||
|                         for key, refs in references.iteritems(): |                         for key, refs in references.items(): | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance( | ||||||
|  |                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||||
|  |                             ): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, []).extend(refs) |                             reference_map.setdefault(key, set()).update(refs) | ||||||
|             elif isinstance(item, (DBRef)): |             elif isinstance(item, LazyReference): | ||||||
|                 reference_map.setdefault(item.collection, []).append(item.id) |                 # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: |                 continue | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) |             elif isinstance(item, DBRef): | ||||||
|  |                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||||
|  |             elif isinstance(item, (dict, SON)) and "_ref" in item: | ||||||
|  |                 reference_map.setdefault(get_document(item["_cls"]), set()).add( | ||||||
|  |                     item["_ref"].id | ||||||
|  |                 ) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth - 1) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in references.iteritems(): |                 for key, refs in references.items(): | ||||||
|                     reference_map.setdefault(key, []).extend(refs) |                     reference_map.setdefault(key, set()).update(refs) | ||||||
|  |  | ||||||
|         return reference_map |         return reference_map | ||||||
|  |  | ||||||
|     def _fetch_objects(self, doc_type=None): |     def _fetch_objects(self, doc_type=None): | ||||||
|         """Fetch all references and convert to their document objects |         """Fetch all references and convert to their document objects""" | ||||||
|         """ |  | ||||||
|         object_map = {} |         object_map = {} | ||||||
|         for collection, dbrefs in self.reference_map.iteritems(): |         for collection, dbrefs in self.reference_map.items(): | ||||||
|             keys = object_map.keys() |  | ||||||
|             refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys])) |             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||||
|             if hasattr(collection, 'objects'):  # We have a document class for the refs |             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||||
|  |             ref_document_cls_exists = getattr(collection, "objects", None) is not None | ||||||
|  |  | ||||||
|  |             if ref_document_cls_exists: | ||||||
|  |                 col_name = collection._get_collection_name() | ||||||
|  |                 refs = [ | ||||||
|  |                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|                 references = collection.objects.in_bulk(refs) |                 references = collection.objects.in_bulk(refs) | ||||||
|                 for key, doc in references.iteritems(): |                 for key, doc in references.items(): | ||||||
|                     object_map[key] = doc |                     object_map[(col_name, key)] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 if isinstance(doc_type, (ListField, DictField, MapField,)): |                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|  |                 refs = [ | ||||||
|  |                     dbref for dbref in dbrefs if (collection, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|  |  | ||||||
|                 if doc_type: |                 if doc_type: | ||||||
|                     references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) |                     references = doc_type._get_db()[collection].find( | ||||||
|  |                         {"_id": {"$in": refs}} | ||||||
|  |                     ) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         doc = doc_type._from_son(ref) |                         doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
|                 else: |                 else: | ||||||
|                     references = get_db()[collection].find({'_id': {'$in': refs}}) |                     references = get_db()[collection].find({"_id": {"$in": refs}}) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         if '_cls' in ref: |                         if "_cls" in ref: | ||||||
|                             doc = get_document(ref["_cls"])._from_son(ref) |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|                         elif doc_type is None: |                         elif doc_type is None: | ||||||
|                             doc = get_document( |                             doc = get_document( | ||||||
|                                 ''.join(x.capitalize() |                                 "".join(x.capitalize() for x in collection.split("_")) | ||||||
|                                     for x in collection.split('_')))._from_son(ref) |                             )._from_son(ref) | ||||||
|                         else: |                         else: | ||||||
|                             doc = doc_type._from_son(ref) |                             doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
|         return object_map |         return object_map | ||||||
|  |  | ||||||
|     def _attach_objects(self, items, depth=0, instance=None, name=None): |     def _attach_objects(self, items, depth=0, instance=None, name=None): | ||||||
| @@ -179,18 +229,20 @@ class DeReference(object): | |||||||
|                     return BaseList(items, instance, name) |                     return BaseList(items, instance, name) | ||||||
|  |  | ||||||
|         if isinstance(items, (dict, SON)): |         if isinstance(items, (dict, SON)): | ||||||
|             if '_ref' in items: |             if "_ref" in items: | ||||||
|                 return self.object_map.get(items['_ref'].id, items) |                 return self.object_map.get( | ||||||
|             elif '_cls' in items: |                     (items["_ref"].collection, items["_ref"].id), items | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |                 ) | ||||||
|                 _cls = doc._data.pop('_cls', None) |             elif "_cls" in items: | ||||||
|                 del items['_cls'] |                 doc = get_document(items["_cls"])._from_son(items) | ||||||
|  |                 _cls = doc._data.pop("_cls", None) | ||||||
|  |                 del items["_cls"] | ||||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) |                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||||
|                 if _cls is not None: |                 if _cls is not None: | ||||||
|                     doc._data['_cls'] = _cls |                     doc._data["_cls"] = _cls | ||||||
|                 return doc |                 return doc | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |         if not hasattr(items, "items"): | ||||||
|             is_list = True |             is_list = True | ||||||
|             list_type = BaseList |             list_type = BaseList | ||||||
|             if isinstance(items, EmbeddedDocumentList): |             if isinstance(items, EmbeddedDocumentList): | ||||||
| @@ -200,7 +252,7 @@ class DeReference(object): | |||||||
|             data = [] |             data = [] | ||||||
|         else: |         else: | ||||||
|             is_list = False |             is_list = False | ||||||
|             iterator = items.iteritems() |             iterator = items.items() | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|         depth += 1 |         depth += 1 | ||||||
| @@ -213,21 +265,28 @@ class DeReference(object): | |||||||
|             if k in self.object_map and not is_list: |             if k in self.object_map and not is_list: | ||||||
|                 data[k] = self.object_map[k] |                 data[k] = self.object_map[k] | ||||||
|             elif isinstance(v, (Document, EmbeddedDocument)): |             elif isinstance(v, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in v._fields.iteritems(): |                 for field_name in v._fields: | ||||||
|                     v = data[k]._data.get(field_name, None) |                     v = data[k]._data.get(field_name, None) | ||||||
|                     if isinstance(v, (DBRef)): |                     if isinstance(v, DBRef): | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v.id, v) |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                             (v.collection, v.id), v | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) |                         ) | ||||||
|                     elif isinstance(v, dict) and depth <= self.max_depth: |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: |                             (v["_ref"].collection, v["_ref"].id), v | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |                         ) | ||||||
|  |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|  |                         item_name = f"{name}.{k}.{field_name}" | ||||||
|  |                         data[k]._data[field_name] = self._attach_objects( | ||||||
|  |                             v, depth, instance=instance, name=item_name | ||||||
|  |                         ) | ||||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                 item_name = '%s.%s' % (name, k) if name else name |                 item_name = f"{name}.{k}" if name else name | ||||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) |                 data[k] = self._attach_objects( | ||||||
|             elif hasattr(v, 'id'): |                     v, depth - 1, instance=instance, name=item_name | ||||||
|                 data[k] = self.object_map.get(v.id, v) |                 ) | ||||||
|  |             elif isinstance(v, DBRef) and hasattr(v, "id"): | ||||||
|  |                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||||
|  |  | ||||||
|         if instance and name: |         if instance and name: | ||||||
|             if is_list: |             if is_list: | ||||||
|   | |||||||
| @@ -1,412 +0,0 @@ | |||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from django.utils.encoding import smart_str |  | ||||||
| from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms |  | ||||||
| from django.db import models |  | ||||||
| from django.contrib.contenttypes.models import ContentTypeManager |  | ||||||
| from django.contrib import auth |  | ||||||
| from django.contrib.auth.models import AnonymousUser |  | ||||||
| from django.utils.translation import ugettext_lazy as _ |  | ||||||
|  |  | ||||||
| from .utils import datetime_now |  | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' |  | ||||||
|  |  | ||||||
| try: |  | ||||||
|     from django.contrib.auth.hashers import check_password, make_password |  | ||||||
| except ImportError: |  | ||||||
|     """Handle older versions of Django""" |  | ||||||
|     from django.utils.hashcompat import md5_constructor, sha_constructor |  | ||||||
|  |  | ||||||
|     def get_hexdigest(algorithm, salt, raw_password): |  | ||||||
|         raw_password, salt = smart_str(raw_password), smart_str(salt) |  | ||||||
|         if algorithm == 'md5': |  | ||||||
|             return md5_constructor(salt + raw_password).hexdigest() |  | ||||||
|         elif algorithm == 'sha1': |  | ||||||
|             return sha_constructor(salt + raw_password).hexdigest() |  | ||||||
|         raise ValueError('Got unknown password algorithm type in password') |  | ||||||
|  |  | ||||||
|     def check_password(raw_password, password): |  | ||||||
|         algo, salt, hash = password.split('$') |  | ||||||
|         return hash == get_hexdigest(algo, salt, raw_password) |  | ||||||
|  |  | ||||||
|     def make_password(raw_password): |  | ||||||
|         from random import random |  | ||||||
|         algo = 'sha1' |  | ||||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] |  | ||||||
|         hash = get_hexdigest(algo, salt, raw_password) |  | ||||||
|         return '%s$%s$%s' % (algo, salt, hash) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ContentType(Document): |  | ||||||
|     name = StringField(max_length=100) |  | ||||||
|     app_label = StringField(max_length=100) |  | ||||||
|     model = StringField(max_length=100, verbose_name=_('python model class name'), |  | ||||||
|                         unique_with='app_label') |  | ||||||
|     objects = ContentTypeManager() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _('content type') |  | ||||||
|         verbose_name_plural = _('content types') |  | ||||||
|         # db_table = 'django_content_type' |  | ||||||
|         # ordering = ('name',) |  | ||||||
|         # unique_together = (('app_label', 'model'),) |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |  | ||||||
|         return self.name |  | ||||||
|  |  | ||||||
|     def model_class(self): |  | ||||||
|         "Returns the Python model class for this type of content." |  | ||||||
|         from django.db import models |  | ||||||
|         return models.get_model(self.app_label, self.model) |  | ||||||
|  |  | ||||||
|     def get_object_for_this_type(self, **kwargs): |  | ||||||
|         """ |  | ||||||
|         Returns an object of this type for the keyword arguments given. |  | ||||||
|         Basically, this is a proxy around this object_type's get_object() model |  | ||||||
|         method. The ObjectNotExist exception, if thrown, will not be caught, |  | ||||||
|         so code that calls this method should catch it. |  | ||||||
|         """ |  | ||||||
|         return self.model_class()._default_manager.using(self._state.db).get(**kwargs) |  | ||||||
|  |  | ||||||
|     def natural_key(self): |  | ||||||
|         return (self.app_label, self.model) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SiteProfileNotAvailable(Exception): |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PermissionManager(models.Manager): |  | ||||||
|     def get_by_natural_key(self, codename, app_label, model): |  | ||||||
|         return self.get( |  | ||||||
|             codename=codename, |  | ||||||
|             content_type=ContentType.objects.get_by_natural_key(app_label, model) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Permission(Document): |  | ||||||
|     """The permissions system provides a way to assign permissions to specific |  | ||||||
|     users and groups of users. |  | ||||||
|  |  | ||||||
|     The permission system is used by the Django admin site, but may also be |  | ||||||
|     useful in your own code. The Django admin site uses permissions as follows: |  | ||||||
|  |  | ||||||
|         - The "add" permission limits the user's ability to view the "add" |  | ||||||
|           form and add an object. |  | ||||||
|         - The "change" permission limits a user's ability to view the change |  | ||||||
|           list, view the "change" form and change an object. |  | ||||||
|         - The "delete" permission limits the ability to delete an object. |  | ||||||
|  |  | ||||||
|     Permissions are set globally per type of object, not per specific object |  | ||||||
|     instance. It is possible to say "Mary may change news stories," but it's |  | ||||||
|     not currently possible to say "Mary may change news stories, but only the |  | ||||||
|     ones she created herself" or "Mary may only change news stories that have |  | ||||||
|     a certain status or publication date." |  | ||||||
|  |  | ||||||
|     Three basic permissions -- add, change and delete -- are automatically |  | ||||||
|     created for each Django model. |  | ||||||
|     """ |  | ||||||
|     name = StringField(max_length=50, verbose_name=_('username')) |  | ||||||
|     content_type = ReferenceField(ContentType) |  | ||||||
|     codename = StringField(max_length=100, verbose_name=_('codename')) |  | ||||||
|         # FIXME: don't access field of the other class |  | ||||||
|         # unique_with=['content_type__app_label', 'content_type__model']) |  | ||||||
|  |  | ||||||
|     objects = PermissionManager() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _('permission') |  | ||||||
|         verbose_name_plural = _('permissions') |  | ||||||
|         # unique_together = (('content_type', 'codename'),) |  | ||||||
|         # ordering = ('content_type__app_label', 'content_type__model', 'codename') |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |  | ||||||
|         return u"%s | %s | %s" % ( |  | ||||||
|             unicode(self.content_type.app_label), |  | ||||||
|             unicode(self.content_type), |  | ||||||
|             unicode(self.name)) |  | ||||||
|  |  | ||||||
|     def natural_key(self): |  | ||||||
|         return (self.codename,) + self.content_type.natural_key() |  | ||||||
|     natural_key.dependencies = ['contenttypes.contenttype'] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Group(Document): |  | ||||||
|     """Groups are a generic way of categorizing users to apply permissions, |  | ||||||
|     or some other label, to those users. A user can belong to any number of |  | ||||||
|     groups. |  | ||||||
|  |  | ||||||
|     A user in a group automatically has all the permissions granted to that |  | ||||||
|     group. For example, if the group Site editors has the permission |  | ||||||
|     can_edit_home_page, any user in that group will have that permission. |  | ||||||
|  |  | ||||||
|     Beyond permissions, groups are a convenient way to categorize users to |  | ||||||
|     apply some label, or extended functionality, to them. For example, you |  | ||||||
|     could create a group 'Special users', and you could write code that would |  | ||||||
|     do special things to those users -- such as giving them access to a |  | ||||||
|     members-only portion of your site, or sending them members-only |  | ||||||
|     e-mail messages. |  | ||||||
|     """ |  | ||||||
|     name = StringField(max_length=80, unique=True, verbose_name=_('name')) |  | ||||||
|     permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         verbose_name = _('group') |  | ||||||
|         verbose_name_plural = _('groups') |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |  | ||||||
|         return self.name |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserManager(models.Manager): |  | ||||||
|     def create_user(self, username, email, password=None): |  | ||||||
|         """ |  | ||||||
|         Creates and saves a User with the given username, e-mail and password. |  | ||||||
|         """ |  | ||||||
|         now = datetime_now() |  | ||||||
|  |  | ||||||
|         # Normalize the address by lowercasing the domain part of the email |  | ||||||
|         # address. |  | ||||||
|         try: |  | ||||||
|             email_name, domain_part = email.strip().split('@', 1) |  | ||||||
|         except ValueError: |  | ||||||
|             pass |  | ||||||
|         else: |  | ||||||
|             email = '@'.join([email_name, domain_part.lower()]) |  | ||||||
|  |  | ||||||
|         user = self.model(username=username, email=email, is_staff=False, |  | ||||||
|                           is_active=True, is_superuser=False, last_login=now, |  | ||||||
|                           date_joined=now) |  | ||||||
|  |  | ||||||
|         user.set_password(password) |  | ||||||
|         user.save(using=self._db) |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def create_superuser(self, username, email, password): |  | ||||||
|         u = self.create_user(username, email, password) |  | ||||||
|         u.is_staff = True |  | ||||||
|         u.is_active = True |  | ||||||
|         u.is_superuser = True |  | ||||||
|         u.save(using=self._db) |  | ||||||
|         return u |  | ||||||
|  |  | ||||||
|     def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'): |  | ||||||
|         "Generates a random password with the given length and given allowed_chars" |  | ||||||
|         # Note that default value of allowed_chars does not have "I" or letters |  | ||||||
|         # that look like it -- just to avoid confusion. |  | ||||||
|         from random import choice |  | ||||||
|         return ''.join([choice(allowed_chars) for i in range(length)]) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): |  | ||||||
|     """A User document that aims to mirror most of the API specified by Django |  | ||||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users |  | ||||||
|     """ |  | ||||||
|     username = StringField(max_length=30, required=True, |  | ||||||
|                            verbose_name=_('username'), |  | ||||||
|                            help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) |  | ||||||
|  |  | ||||||
|     first_name = StringField(max_length=30, |  | ||||||
|                              verbose_name=_('first name')) |  | ||||||
|  |  | ||||||
|     last_name = StringField(max_length=30, |  | ||||||
|                             verbose_name=_('last name')) |  | ||||||
|     email = EmailField(verbose_name=_('e-mail address')) |  | ||||||
|     password = StringField(max_length=128, |  | ||||||
|                            verbose_name=_('password'), |  | ||||||
|                            help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) |  | ||||||
|     is_staff = BooleanField(default=False, |  | ||||||
|                             verbose_name=_('staff status'), |  | ||||||
|                             help_text=_("Designates whether the user can log into this admin site.")) |  | ||||||
|     is_active = BooleanField(default=True, |  | ||||||
|                              verbose_name=_('active'), |  | ||||||
|                              help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) |  | ||||||
|     is_superuser = BooleanField(default=False, |  | ||||||
|                                 verbose_name=_('superuser status'), |  | ||||||
|                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) |  | ||||||
|     last_login = DateTimeField(default=datetime_now, |  | ||||||
|                                verbose_name=_('last login')) |  | ||||||
|     date_joined = DateTimeField(default=datetime_now, |  | ||||||
|                                 verbose_name=_('date joined')) |  | ||||||
|  |  | ||||||
|     user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'), |  | ||||||
|                                                 help_text=_('Permissions for the user.')) |  | ||||||
|  |  | ||||||
|     USERNAME_FIELD = 'username' |  | ||||||
|     REQUIRED_FIELDS = ['email'] |  | ||||||
|  |  | ||||||
|     meta = { |  | ||||||
|         'allow_inheritance': True, |  | ||||||
|         'indexes': [ |  | ||||||
|             {'fields': ['username'], 'unique': True, 'sparse': True} |  | ||||||
|         ] |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |  | ||||||
|         return self.username |  | ||||||
|  |  | ||||||
|     def get_full_name(self): |  | ||||||
|         """Returns the users first and last names, separated by a space. |  | ||||||
|         """ |  | ||||||
|         full_name = u'%s %s' % (self.first_name or '', self.last_name or '') |  | ||||||
|         return full_name.strip() |  | ||||||
|  |  | ||||||
|     def is_anonymous(self): |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def is_authenticated(self): |  | ||||||
|         return True |  | ||||||
|  |  | ||||||
|     def set_password(self, raw_password): |  | ||||||
|         """Sets the user's password - always use this rather than directly |  | ||||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the |  | ||||||
|         password is hashed before storage. |  | ||||||
|         """ |  | ||||||
|         self.password = make_password(raw_password) |  | ||||||
|         self.save() |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def check_password(self, raw_password): |  | ||||||
|         """Checks the user's password against a provided password - always use |  | ||||||
|         this rather than directly comparing to |  | ||||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is |  | ||||||
|         hashed before storage. |  | ||||||
|         """ |  | ||||||
|         return check_password(raw_password, self.password) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def create_user(cls, username, password, email=None): |  | ||||||
|         """Create (and save) a new user with the given username, password and |  | ||||||
|         email address. |  | ||||||
|         """ |  | ||||||
|         now = datetime_now() |  | ||||||
|  |  | ||||||
|         # Normalize the address by lowercasing the domain part of the email |  | ||||||
|         # address. |  | ||||||
|         if email is not None: |  | ||||||
|             try: |  | ||||||
|                 email_name, domain_part = email.strip().split('@', 1) |  | ||||||
|             except ValueError: |  | ||||||
|                 pass |  | ||||||
|             else: |  | ||||||
|                 email = '@'.join([email_name, domain_part.lower()]) |  | ||||||
|  |  | ||||||
|         user = cls(username=username, email=email, date_joined=now) |  | ||||||
|         user.set_password(password) |  | ||||||
|         user.save() |  | ||||||
|         return user |  | ||||||
|  |  | ||||||
|     def get_group_permissions(self, obj=None): |  | ||||||
|         """ |  | ||||||
|         Returns a list of permission strings that this user has through his/her |  | ||||||
|         groups. This method queries all available auth backends. If an object |  | ||||||
|         is passed in, only permissions matching this object are returned. |  | ||||||
|         """ |  | ||||||
|         permissions = set() |  | ||||||
|         for backend in auth.get_backends(): |  | ||||||
|             if hasattr(backend, "get_group_permissions"): |  | ||||||
|                 permissions.update(backend.get_group_permissions(self, obj)) |  | ||||||
|         return permissions |  | ||||||
|  |  | ||||||
|     def get_all_permissions(self, obj=None): |  | ||||||
|         return _user_get_all_permissions(self, obj) |  | ||||||
|  |  | ||||||
|     def has_perm(self, perm, obj=None): |  | ||||||
|         """ |  | ||||||
|         Returns True if the user has the specified permission. This method |  | ||||||
|         queries all available auth backends, but returns immediately if any |  | ||||||
|         backend returns True. Thus, a user who has permission from a single |  | ||||||
|         auth backend is assumed to have permission in general. If an object is |  | ||||||
|         provided, permissions for this specific object are checked. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         # Active superusers have all permissions. |  | ||||||
|         if self.is_active and self.is_superuser: |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         # Otherwise we need to check the backends. |  | ||||||
|         return _user_has_perm(self, perm, obj) |  | ||||||
|  |  | ||||||
|     def has_module_perms(self, app_label): |  | ||||||
|         """ |  | ||||||
|         Returns True if the user has any permissions in the given app label. |  | ||||||
|         Uses pretty much the same logic as has_perm, above. |  | ||||||
|         """ |  | ||||||
|         # Active superusers have all permissions. |  | ||||||
|         if self.is_active and self.is_superuser: |  | ||||||
|             return True |  | ||||||
|  |  | ||||||
|         return _user_has_module_perms(self, app_label) |  | ||||||
|  |  | ||||||
|     def email_user(self, subject, message, from_email=None): |  | ||||||
|         "Sends an e-mail to this User." |  | ||||||
|         from django.core.mail import send_mail |  | ||||||
|         send_mail(subject, message, from_email, [self.email]) |  | ||||||
|  |  | ||||||
|     def get_profile(self): |  | ||||||
|         """ |  | ||||||
|         Returns site-specific profile for this user. Raises |  | ||||||
|         SiteProfileNotAvailable if this site does not allow profiles. |  | ||||||
|         """ |  | ||||||
|         if not hasattr(self, '_profile_cache'): |  | ||||||
|             from django.conf import settings |  | ||||||
|             if not getattr(settings, 'AUTH_PROFILE_MODULE', False): |  | ||||||
|                 raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO' |  | ||||||
|                                               'DULE in your project settings') |  | ||||||
|             try: |  | ||||||
|                 app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') |  | ||||||
|             except ValueError: |  | ||||||
|                 raise SiteProfileNotAvailable('app_label and model_name should' |  | ||||||
|                         ' be separated by a dot in the AUTH_PROFILE_MODULE set' |  | ||||||
|                         'ting') |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 model = models.get_model(app_label, model_name) |  | ||||||
|                 if model is None: |  | ||||||
|                     raise SiteProfileNotAvailable('Unable to load the profile ' |  | ||||||
|                         'model, check AUTH_PROFILE_MODULE in your project sett' |  | ||||||
|                         'ings') |  | ||||||
|                 self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id) |  | ||||||
|                 self._profile_cache.user = self |  | ||||||
|             except (ImportError, ImproperlyConfigured): |  | ||||||
|                 raise SiteProfileNotAvailable |  | ||||||
|         return self._profile_cache |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineBackend(object): |  | ||||||
|     """Authenticate using MongoEngine and mongoengine.django.auth.User. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     supports_object_permissions = False |  | ||||||
|     supports_anonymous_user = False |  | ||||||
|     supports_inactive_user = False |  | ||||||
|     _user_doc = False |  | ||||||
|  |  | ||||||
|     def authenticate(self, username=None, password=None): |  | ||||||
|         user = self.user_document.objects(username=username).first() |  | ||||||
|         if user: |  | ||||||
|             if password and user.check_password(password): |  | ||||||
|                 backend = auth.get_backends()[0] |  | ||||||
|                 user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__) |  | ||||||
|                 return user |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def get_user(self, user_id): |  | ||||||
|         return self.user_document.objects.with_id(user_id) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def user_document(self): |  | ||||||
|         if self._user_doc is False: |  | ||||||
|             from .mongo_auth.models import get_user_document |  | ||||||
|             self._user_doc = get_user_document() |  | ||||||
|         return self._user_doc |  | ||||||
|  |  | ||||||
| def get_user(userid): |  | ||||||
|     """Returns a User object from an id (User.id). Django's equivalent takes |  | ||||||
|     request, but taking an id instead leaves it up to the developer to store |  | ||||||
|     the id in any way they want (session, signed cookie, etc.) |  | ||||||
|     """ |  | ||||||
|     if not userid: |  | ||||||
|         return AnonymousUser() |  | ||||||
|     return MongoEngineBackend().get_user(userid) or AnonymousUser() |  | ||||||
| @@ -1,119 +0,0 @@ | |||||||
| from django.conf import settings |  | ||||||
| from django.contrib.auth.hashers import make_password |  | ||||||
| from django.contrib.auth.models import UserManager |  | ||||||
| from django.core.exceptions import ImproperlyConfigured |  | ||||||
| from django.db import models |  | ||||||
| try: |  | ||||||
|     from django.utils.module_loading import import_module |  | ||||||
| except ImportError: |  | ||||||
|     """Handle older versions of Django""" |  | ||||||
|     from django.utils.importlib import import_module |  | ||||||
| from django.utils.translation import ugettext_lazy as _ |  | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ( |  | ||||||
|     'get_user_document', |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| MONGOENGINE_USER_DOCUMENT = getattr( |  | ||||||
|     settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user_document(): |  | ||||||
|     """Get the user document class used for authentication. |  | ||||||
|  |  | ||||||
|     This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which |  | ||||||
|     defaults to `mongoengine.django.auth.User`. |  | ||||||
|  |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     name = MONGOENGINE_USER_DOCUMENT |  | ||||||
|     dot = name.rindex('.') |  | ||||||
|     module = import_module(name[:dot]) |  | ||||||
|     return getattr(module, name[dot + 1:]) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoUserManager(UserManager): |  | ||||||
|     """A User manager wich allows the use of MongoEngine documents in Django. |  | ||||||
|  |  | ||||||
|     To use the manager, you must tell django.contrib.auth to use MongoUser as |  | ||||||
|     the user model. In you settings.py, you need: |  | ||||||
|  |  | ||||||
|         INSTALLED_APPS = ( |  | ||||||
|             ... |  | ||||||
|             'django.contrib.auth', |  | ||||||
|             'mongoengine.django.mongo_auth', |  | ||||||
|             ... |  | ||||||
|         ) |  | ||||||
|         AUTH_USER_MODEL = 'mongo_auth.MongoUser' |  | ||||||
|  |  | ||||||
|     Django will use the model object to access the custom Manager, which will |  | ||||||
|     replace the original queryset with MongoEngine querysets. |  | ||||||
|  |  | ||||||
|     By default, mongoengine.django.auth.User will be used to store users. You |  | ||||||
|     can specify another document class in MONGOENGINE_USER_DOCUMENT in your |  | ||||||
|     settings.py. |  | ||||||
|  |  | ||||||
|     The User Document class has the same requirements as a standard custom user |  | ||||||
|     model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/ |  | ||||||
|  |  | ||||||
|     In particular, the User Document class must define USERNAME_FIELD and |  | ||||||
|     REQUIRED_FIELDS. |  | ||||||
|  |  | ||||||
|     `AUTH_USER_MODEL` has been added in Django 1.5. |  | ||||||
|  |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def contribute_to_class(self, model, name): |  | ||||||
|         super(MongoUserManager, self).contribute_to_class(model, name) |  | ||||||
|         self.dj_model = self.model |  | ||||||
|         self.model = get_user_document() |  | ||||||
|  |  | ||||||
|         self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD |  | ||||||
|         username = models.CharField(_('username'), max_length=30, unique=True) |  | ||||||
|         username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) |  | ||||||
|  |  | ||||||
|         self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS |  | ||||||
|         for name in self.dj_model.REQUIRED_FIELDS: |  | ||||||
|             field = models.CharField(_(name), max_length=30) |  | ||||||
|             field.contribute_to_class(self.dj_model, name) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     def get(self, *args, **kwargs): |  | ||||||
|         try: |  | ||||||
|             return self.get_query_set().get(*args, **kwargs) |  | ||||||
|         except self.model.DoesNotExist: |  | ||||||
|             # ModelBackend expects this exception |  | ||||||
|             raise self.dj_model.DoesNotExist |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def db(self): |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     def get_empty_query_set(self): |  | ||||||
|         return self.model.objects.none() |  | ||||||
|  |  | ||||||
|     def get_query_set(self): |  | ||||||
|         return self.model.objects |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoUser(models.Model): |  | ||||||
|     """"Dummy user model for Django. |  | ||||||
|  |  | ||||||
|     MongoUser is used to replace Django's UserManager with MongoUserManager. |  | ||||||
|     The actual user document class is mongoengine.django.auth.User or any |  | ||||||
|     other document class specified in MONGOENGINE_USER_DOCUMENT. |  | ||||||
|  |  | ||||||
|     To get the user document class, use `get_user_document()`. |  | ||||||
|  |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     objects = MongoUserManager() |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         app_label = 'mongo_auth' |  | ||||||
|  |  | ||||||
|     def set_password(self, password): |  | ||||||
|         """Doesn't do anything, but works around the issue with Django 1.6.""" |  | ||||||
|         make_password(password) |  | ||||||
| @@ -1,124 +0,0 @@ | |||||||
| from bson import json_util |  | ||||||
| from django.conf import settings |  | ||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError |  | ||||||
| from django.core.exceptions import SuspiciousOperation |  | ||||||
| try: |  | ||||||
|     from django.utils.encoding import force_unicode |  | ||||||
| except ImportError: |  | ||||||
|     from django.utils.encoding import force_text as force_unicode |  | ||||||
|  |  | ||||||
| from mongoengine.document import Document |  | ||||||
| from mongoengine import fields |  | ||||||
| from mongoengine.queryset import OperationError |  | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME |  | ||||||
|  |  | ||||||
| from .utils import datetime_now |  | ||||||
|  |  | ||||||
|  |  | ||||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( |  | ||||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', |  | ||||||
|     DEFAULT_CONNECTION_NAME) |  | ||||||
|  |  | ||||||
| # a setting for the name of the collection used to store sessions |  | ||||||
| MONGOENGINE_SESSION_COLLECTION = getattr( |  | ||||||
|     settings, 'MONGOENGINE_SESSION_COLLECTION', |  | ||||||
|     'django_session') |  | ||||||
|  |  | ||||||
| # a setting for whether session data is stored encoded or not |  | ||||||
| MONGOENGINE_SESSION_DATA_ENCODE = getattr( |  | ||||||
|     settings, 'MONGOENGINE_SESSION_DATA_ENCODE', |  | ||||||
|     True) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoSession(Document): |  | ||||||
|     session_key = fields.StringField(primary_key=True, max_length=40) |  | ||||||
|     session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ |  | ||||||
|                                         else fields.DictField() |  | ||||||
|     expire_date = fields.DateTimeField() |  | ||||||
|  |  | ||||||
|     meta = { |  | ||||||
|         'collection': MONGOENGINE_SESSION_COLLECTION, |  | ||||||
|         'db_alias': MONGOENGINE_SESSION_DB_ALIAS, |  | ||||||
|         'allow_inheritance': False, |  | ||||||
|         'indexes': [ |  | ||||||
|             { |  | ||||||
|                 'fields': ['expire_date'], |  | ||||||
|                 'expireAfterSeconds': 0 |  | ||||||
|             } |  | ||||||
|         ] |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     def get_decoded(self): |  | ||||||
|         return SessionStore().decode(self.session_data) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): |  | ||||||
|     """A MongoEngine-based session store for Django. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def _get_session(self, *args, **kwargs): |  | ||||||
|         sess = super(SessionStore, self)._get_session(*args, **kwargs) |  | ||||||
|         if sess.get('_auth_user_id', None): |  | ||||||
|             sess['_auth_user_id'] = str(sess.get('_auth_user_id')) |  | ||||||
|         return sess |  | ||||||
|  |  | ||||||
|     def load(self): |  | ||||||
|         try: |  | ||||||
|             s = MongoSession.objects(session_key=self.session_key, |  | ||||||
|                                      expire_date__gt=datetime_now)[0] |  | ||||||
|             if MONGOENGINE_SESSION_DATA_ENCODE: |  | ||||||
|                 return self.decode(force_unicode(s.session_data)) |  | ||||||
|             else: |  | ||||||
|                 return s.session_data |  | ||||||
|         except (IndexError, SuspiciousOperation): |  | ||||||
|             self.create() |  | ||||||
|             return {} |  | ||||||
|  |  | ||||||
|     def exists(self, session_key): |  | ||||||
|         return bool(MongoSession.objects(session_key=session_key).first()) |  | ||||||
|  |  | ||||||
|     def create(self): |  | ||||||
|         while True: |  | ||||||
|             self._session_key = self._get_new_session_key() |  | ||||||
|             try: |  | ||||||
|                 self.save(must_create=True) |  | ||||||
|             except CreateError: |  | ||||||
|                 continue |  | ||||||
|             self.modified = True |  | ||||||
|             self._session_cache = {} |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |  | ||||||
|         if self.session_key is None: |  | ||||||
|             self._session_key = self._get_new_session_key() |  | ||||||
|         s = MongoSession(session_key=self.session_key) |  | ||||||
|         if MONGOENGINE_SESSION_DATA_ENCODE: |  | ||||||
|             s.session_data = self.encode(self._get_session(no_load=must_create)) |  | ||||||
|         else: |  | ||||||
|             s.session_data = self._get_session(no_load=must_create) |  | ||||||
|         s.expire_date = self.get_expiry_date() |  | ||||||
|         try: |  | ||||||
|             s.save(force_insert=must_create) |  | ||||||
|         except OperationError: |  | ||||||
|             if must_create: |  | ||||||
|                 raise CreateError |  | ||||||
|             raise |  | ||||||
|  |  | ||||||
|     def delete(self, session_key=None): |  | ||||||
|         if session_key is None: |  | ||||||
|             if self.session_key is None: |  | ||||||
|                 return |  | ||||||
|             session_key = self.session_key |  | ||||||
|         MongoSession.objects(session_key=session_key).delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BSONSerializer(object): |  | ||||||
|     """ |  | ||||||
|     Serializer that can handle BSON types (eg ObjectId). |  | ||||||
|     """ |  | ||||||
|     def dumps(self, obj): |  | ||||||
|         return json_util.dumps(obj, separators=(',', ':')).encode('ascii') |  | ||||||
|  |  | ||||||
|     def loads(self, data): |  | ||||||
|         return json_util.loads(data.decode('ascii')) |  | ||||||
|  |  | ||||||
| @@ -1,47 +0,0 @@ | |||||||
| from mongoengine.queryset import QuerySet |  | ||||||
| from mongoengine.base import BaseDocument |  | ||||||
| from mongoengine.errors import ValidationError |  | ||||||
|  |  | ||||||
| def _get_queryset(cls): |  | ||||||
|     """Inspired by django.shortcuts.*""" |  | ||||||
|     if isinstance(cls, QuerySet): |  | ||||||
|         return cls |  | ||||||
|     else: |  | ||||||
|         return cls.objects |  | ||||||
|  |  | ||||||
| def get_document_or_404(cls, *args, **kwargs): |  | ||||||
|     """ |  | ||||||
|     Uses get() to return an document, or raises a Http404 exception if the document |  | ||||||
|     does not exist. |  | ||||||
|  |  | ||||||
|     cls may be a Document or QuerySet object. All other passed |  | ||||||
|     arguments and keyword arguments are used in the get() query. |  | ||||||
|  |  | ||||||
|     Note: Like with get(), an MultipleObjectsReturned will be raised if more than one |  | ||||||
|     object is found. |  | ||||||
|  |  | ||||||
|     Inspired by django.shortcuts.* |  | ||||||
|     """ |  | ||||||
|     queryset = _get_queryset(cls) |  | ||||||
|     try: |  | ||||||
|         return queryset.get(*args, **kwargs) |  | ||||||
|     except (queryset._document.DoesNotExist, ValidationError): |  | ||||||
|         from django.http import Http404 |  | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |  | ||||||
|  |  | ||||||
| def get_list_or_404(cls, *args, **kwargs): |  | ||||||
|     """ |  | ||||||
|     Uses filter() to return a list of documents, or raise a Http404 exception if |  | ||||||
|     the list is empty. |  | ||||||
|  |  | ||||||
|     cls may be a Document or QuerySet object. All other passed |  | ||||||
|     arguments and keyword arguments are used in the filter() query. |  | ||||||
|  |  | ||||||
|     Inspired by django.shortcuts.* |  | ||||||
|     """ |  | ||||||
|     queryset = _get_queryset(cls) |  | ||||||
|     obj_list = list(queryset.filter(*args, **kwargs)) |  | ||||||
|     if not obj_list: |  | ||||||
|         from django.http import Http404 |  | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |  | ||||||
|     return obj_list |  | ||||||
| @@ -1,112 +0,0 @@ | |||||||
| import os |  | ||||||
| import itertools |  | ||||||
| import urlparse |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from django.conf import settings |  | ||||||
| from django.core.files.storage import Storage |  | ||||||
| from django.core.exceptions import ImproperlyConfigured |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FileDocument(Document): |  | ||||||
|     """A document used to store a single file in GridFS. |  | ||||||
|     """ |  | ||||||
|     file = FileField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class GridFSStorage(Storage): |  | ||||||
|     """A custom storage backend to store files in GridFS |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self, base_url=None): |  | ||||||
|  |  | ||||||
|         if base_url is None: |  | ||||||
|             base_url = settings.MEDIA_URL |  | ||||||
|         self.base_url = base_url |  | ||||||
|         self.document = FileDocument |  | ||||||
|         self.field = 'file' |  | ||||||
|  |  | ||||||
|     def delete(self, name): |  | ||||||
|         """Deletes the specified file from the storage system. |  | ||||||
|         """ |  | ||||||
|         if self.exists(name): |  | ||||||
|             doc = self.document.objects.first() |  | ||||||
|             field = getattr(doc, self.field) |  | ||||||
|             self._get_doc_with_name(name).delete()  # Delete the FileField |  | ||||||
|             field.delete()                          # Delete the FileDocument |  | ||||||
|  |  | ||||||
|     def exists(self, name): |  | ||||||
|         """Returns True if a file referened by the given name already exists in the |  | ||||||
|         storage system, or False if the name is available for a new file. |  | ||||||
|         """ |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             field = getattr(doc, self.field) |  | ||||||
|             return bool(field.name) |  | ||||||
|         else: |  | ||||||
|             return False |  | ||||||
|  |  | ||||||
|     def listdir(self, path=None): |  | ||||||
|         """Lists the contents of the specified path, returning a 2-tuple of lists; |  | ||||||
|         the first item being directories, the second item being files. |  | ||||||
|         """ |  | ||||||
|         def name(doc): |  | ||||||
|             return getattr(doc, self.field).name |  | ||||||
|         docs = self.document.objects |  | ||||||
|         return [], [name(d) for d in docs if name(d)] |  | ||||||
|  |  | ||||||
|     def size(self, name): |  | ||||||
|         """Returns the total size, in bytes, of the file specified by name. |  | ||||||
|         """ |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             return getattr(doc, self.field).length |  | ||||||
|         else: |  | ||||||
|             raise ValueError("No such file or directory: '%s'" % name) |  | ||||||
|  |  | ||||||
|     def url(self, name): |  | ||||||
|         """Returns an absolute URL where the file's contents can be accessed |  | ||||||
|         directly by a web browser. |  | ||||||
|         """ |  | ||||||
|         if self.base_url is None: |  | ||||||
|             raise ValueError("This file is not accessible via a URL.") |  | ||||||
|         return urlparse.urljoin(self.base_url, name).replace('\\', '/') |  | ||||||
|  |  | ||||||
|     def _get_doc_with_name(self, name): |  | ||||||
|         """Find the documents in the store with the given name |  | ||||||
|         """ |  | ||||||
|         docs = self.document.objects |  | ||||||
|         doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name] |  | ||||||
|         if doc: |  | ||||||
|             return doc[0] |  | ||||||
|         else: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|     def _open(self, name, mode='rb'): |  | ||||||
|         doc = self._get_doc_with_name(name) |  | ||||||
|         if doc: |  | ||||||
|             return getattr(doc, self.field) |  | ||||||
|         else: |  | ||||||
|             raise ValueError("No file found with the name '%s'." % name) |  | ||||||
|  |  | ||||||
|     def get_available_name(self, name): |  | ||||||
|         """Returns a filename that's free on the target storage system, and |  | ||||||
|         available for new content to be written to. |  | ||||||
|         """ |  | ||||||
|         file_root, file_ext = os.path.splitext(name) |  | ||||||
|         # If the filename already exists, add an underscore and a number (before |  | ||||||
|         # the file extension, if one exists) to the filename until the generated |  | ||||||
|         # filename doesn't exist. |  | ||||||
|         count = itertools.count(1) |  | ||||||
|         while self.exists(name): |  | ||||||
|             # file_ext includes the dot. |  | ||||||
|             name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext)) |  | ||||||
|  |  | ||||||
|         return name |  | ||||||
|  |  | ||||||
|     def _save(self, name, content): |  | ||||||
|         doc = self.document() |  | ||||||
|         getattr(doc, self.field).put(content, filename=name) |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         return name |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #coding: utf-8 |  | ||||||
|  |  | ||||||
| from unittest import TestCase |  | ||||||
|  |  | ||||||
| from mongoengine import connect |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoTestCase(TestCase): |  | ||||||
|     """ |  | ||||||
|     TestCase class that clear the collection between the tests |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def db_name(self): |  | ||||||
|         from django.conf import settings |  | ||||||
|         return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy') |  | ||||||
|  |  | ||||||
|     def __init__(self, methodName='runtest'): |  | ||||||
|         connect(self.db_name) |  | ||||||
|         self.db = get_db() |  | ||||||
|         super(MongoTestCase, self).__init__(methodName) |  | ||||||
|  |  | ||||||
|     def dropCollections(self): |  | ||||||
|         for collection in self.db.collection_names(): |  | ||||||
|             if collection.startswith('system.'): |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         self.dropCollections() |  | ||||||
| @@ -1,6 +0,0 @@ | |||||||
| try: |  | ||||||
|     # django >= 1.4 |  | ||||||
|     from django.utils.timezone import now as datetime_now |  | ||||||
| except ImportError: |  | ||||||
|     from datetime import datetime |  | ||||||
|     datetime_now = datetime.now |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,12 +1,20 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  |  | ||||||
| from mongoengine.python_support import txt_type | __all__ = ( | ||||||
|  |     "NotRegistered", | ||||||
|  |     "InvalidDocumentError", | ||||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', |     "LookUpError", | ||||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', |     "DoesNotExist", | ||||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', |     "MultipleObjectsReturned", | ||||||
|            'ValidationError') |     "InvalidQueryError", | ||||||
|  |     "OperationError", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "BulkWriteError", | ||||||
|  |     "FieldDoesNotExist", | ||||||
|  |     "ValidationError", | ||||||
|  |     "SaveConditionError", | ||||||
|  |     "DeprecatedError", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class NotRegistered(Exception): | class NotRegistered(Exception): | ||||||
| @@ -41,10 +49,25 @@ class NotUniqueError(OperationError): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class FieldDoesNotExist(Exception): | class BulkWriteError(OperationError): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SaveConditionError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FieldDoesNotExist(Exception): | ||||||
|  |     """Raised when trying to set a field | ||||||
|  |     not declared in a :class:`~mongoengine.Document` | ||||||
|  |     or an :class:`~mongoengine.EmbeddedDocument`. | ||||||
|  |  | ||||||
|  |     To avoid this behavior on data loading, | ||||||
|  |     you should set the :attr:`strict` to ``False`` | ||||||
|  |     in the :attr:`meta` dictionary. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |  | ||||||
| class ValidationError(AssertionError): | class ValidationError(AssertionError): | ||||||
|     """Validation exception. |     """Validation exception. | ||||||
|  |  | ||||||
| @@ -61,23 +84,24 @@ class ValidationError(AssertionError): | |||||||
|     _message = None |     _message = None | ||||||
|  |  | ||||||
|     def __init__(self, message="", **kwargs): |     def __init__(self, message="", **kwargs): | ||||||
|         self.errors = kwargs.get('errors', {}) |         super().__init__(message) | ||||||
|         self.field_name = kwargs.get('field_name') |         self.errors = kwargs.get("errors", {}) | ||||||
|  |         self.field_name = kwargs.get("field_name") | ||||||
|         self.message = message |         self.message = message | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return txt_type(self.message) |         return str(self.message) | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) |         return f"{self.__class__.__name__}({self.message},)" | ||||||
|  |  | ||||||
|     def __getattribute__(self, name): |     def __getattribute__(self, name): | ||||||
|         message = super(ValidationError, self).__getattribute__(name) |         message = super().__getattribute__(name) | ||||||
|         if name == 'message': |         if name == "message": | ||||||
|             if self.field_name: |             if self.field_name: | ||||||
|                 message = '%s' % message |                 message = "%s" % message | ||||||
|             if self.errors: |             if self.errors: | ||||||
|                 message = '%s(%s)' % (message, self._format_errors()) |                 message = f"{message}({self._format_errors()})" | ||||||
|         return message |         return message | ||||||
|  |  | ||||||
|     def _get_message(self): |     def _get_message(self): | ||||||
| @@ -98,34 +122,40 @@ class ValidationError(AssertionError): | |||||||
|  |  | ||||||
|         def build_dict(source): |         def build_dict(source): | ||||||
|             errors_dict = {} |             errors_dict = {} | ||||||
|             if not source: |  | ||||||
|                 return errors_dict |  | ||||||
|             if isinstance(source, dict): |             if isinstance(source, dict): | ||||||
|                 for field_name, error in source.iteritems(): |                 for field_name, error in source.items(): | ||||||
|                     errors_dict[field_name] = build_dict(error) |                     errors_dict[field_name] = build_dict(error) | ||||||
|             elif isinstance(source, ValidationError) and source.errors: |             elif isinstance(source, ValidationError) and source.errors: | ||||||
|                 return build_dict(source.errors) |                 return build_dict(source.errors) | ||||||
|             else: |             else: | ||||||
|                 return unicode(source) |                 return str(source) | ||||||
|  |  | ||||||
|             return errors_dict |             return errors_dict | ||||||
|  |  | ||||||
|         if not self.errors: |         if not self.errors: | ||||||
|             return {} |             return {} | ||||||
|  |  | ||||||
|         return build_dict(self.errors) |         return build_dict(self.errors) | ||||||
|  |  | ||||||
|     def _format_errors(self): |     def _format_errors(self): | ||||||
|         """Returns a string listing all errors within a document""" |         """Returns a string listing all errors within a document""" | ||||||
|  |  | ||||||
|         def generate_key(value, prefix=''): |         def generate_key(value, prefix=""): | ||||||
|             if isinstance(value, list): |             if isinstance(value, list): | ||||||
|                 value = ' '.join([generate_key(k) for k in value]) |                 value = " ".join([generate_key(k) for k in value]) | ||||||
|             if isinstance(value, dict): |             elif isinstance(value, dict): | ||||||
|                 value = ' '.join( |                 value = " ".join([generate_key(v, k) for k, v in value.items()]) | ||||||
|                         [generate_key(v, k) for k, v in value.iteritems()]) |  | ||||||
|  |  | ||||||
|             results = "%s.%s" % (prefix, value) if prefix else value |             results = f"{prefix}.{value}" if prefix else value | ||||||
|             return results |             return results | ||||||
|  |  | ||||||
|         error_dict = defaultdict(list) |         error_dict = defaultdict(list) | ||||||
|         for k, v in self.to_dict().iteritems(): |         for k, v in self.to_dict().items(): | ||||||
|             error_dict[generate_key(v)].append(k) |             error_dict[generate_key(v)].append(k) | ||||||
|         return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) |         return " ".join([f"{k}: {v}" for k, v in error_dict.items()]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DeprecatedError(Exception): | ||||||
|  |     """Raise when a user uses a feature that has been Deprecated""" | ||||||
|  |  | ||||||
|  |     pass | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										20
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with MongoDB version support | ||||||
|  | """ | ||||||
|  | from mongoengine.connection import get_connection | ||||||
|  |  | ||||||
|  | # Constant that can be used to compare the version retrieved with | ||||||
|  | # get_mongodb_version() | ||||||
|  | MONGODB_34 = (3, 4) | ||||||
|  | MONGODB_36 = (3, 6) | ||||||
|  | MONGODB_42 = (4, 2) | ||||||
|  | MONGODB_44 = (4, 4) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_mongodb_version(): | ||||||
|  |     """Return the version of the default connected mongoDB (first 2 digits) | ||||||
|  |  | ||||||
|  |     :return: tuple(int, int) | ||||||
|  |     """ | ||||||
|  |     version_list = get_connection().server_info()["versionArray"][:2]  # e.g: (3, 2) | ||||||
|  |     return tuple(version_list) | ||||||
							
								
								
									
										60
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | ||||||
|  | """ | ||||||
|  | import pymongo | ||||||
|  | from pymongo.errors import OperationFailure | ||||||
|  |  | ||||||
|  | _PYMONGO_37 = (3, 7) | ||||||
|  |  | ||||||
|  | PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) | ||||||
|  |  | ||||||
|  | IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def count_documents( | ||||||
|  |     collection, filter, skip=None, limit=None, hint=None, collation=None | ||||||
|  | ): | ||||||
|  |     """Pymongo>3.7 deprecates count in favour of count_documents""" | ||||||
|  |     if limit == 0: | ||||||
|  |         return 0  # Pymongo raises an OperationFailure if called with limit=0 | ||||||
|  |  | ||||||
|  |     kwargs = {} | ||||||
|  |     if skip is not None: | ||||||
|  |         kwargs["skip"] = skip | ||||||
|  |     if limit is not None: | ||||||
|  |         kwargs["limit"] = limit | ||||||
|  |     if hint not in (-1, None): | ||||||
|  |         kwargs["hint"] = hint | ||||||
|  |     if collation is not None: | ||||||
|  |         kwargs["collation"] = collation | ||||||
|  |  | ||||||
|  |     # count_documents appeared in pymongo 3.7 | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         try: | ||||||
|  |             return collection.count_documents(filter=filter, **kwargs) | ||||||
|  |         except OperationFailure: | ||||||
|  |             # OperationFailure - accounts for some operators that used to work | ||||||
|  |             # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) | ||||||
|  |             # fallback to deprecated Cursor.count | ||||||
|  |             # Keeping this should be reevaluated the day pymongo removes .count entirely | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |     cursor = collection.find(filter) | ||||||
|  |     for option, option_value in kwargs.items(): | ||||||
|  |         cursor_method = getattr(cursor, option) | ||||||
|  |         cursor = cursor_method(option_value) | ||||||
|  |     with_limit_and_skip = "skip" in kwargs or "limit" in kwargs | ||||||
|  |     return cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def list_collection_names(db, include_system_collections=False): | ||||||
|  |     """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         collections = db.list_collection_names() | ||||||
|  |     else: | ||||||
|  |         collections = db.collection_names() | ||||||
|  |  | ||||||
|  |     if not include_system_collections: | ||||||
|  |         collections = [c for c in collections if not c.startswith("system.")] | ||||||
|  |  | ||||||
|  |     return collections | ||||||
| @@ -1,29 +0,0 @@ | |||||||
| """Helper functions and types to aid with Python 2.5 - 3 support.""" |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
|  |  | ||||||
| PY3 = sys.version_info[0] == 3 |  | ||||||
|  |  | ||||||
| if PY3: |  | ||||||
|     import codecs |  | ||||||
|     from io import BytesIO as StringIO |  | ||||||
|     # return s converted to binary.  b('test') should be equivalent to b'test' |  | ||||||
|     def b(s): |  | ||||||
|         return codecs.latin_1_encode(s)[0] |  | ||||||
|  |  | ||||||
|     bin_type = bytes |  | ||||||
|     txt_type   = str |  | ||||||
| else: |  | ||||||
|     try: |  | ||||||
|         from cStringIO import StringIO |  | ||||||
|     except ImportError: |  | ||||||
|         from StringIO import StringIO |  | ||||||
|  |  | ||||||
|     # Conversion to binary only necessary in Python 3 |  | ||||||
|     def b(s): |  | ||||||
|         return s |  | ||||||
|  |  | ||||||
|     bin_type = str |  | ||||||
|     txt_type = unicode |  | ||||||
|  |  | ||||||
| str_types = (bin_type, txt_type) |  | ||||||
| @@ -1,11 +1,28 @@ | |||||||
| from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, | from mongoengine.errors import * | ||||||
|                                 InvalidQueryError, OperationError, |  | ||||||
|                                 NotUniqueError) |  | ||||||
| from mongoengine.queryset.field_list import * | from mongoengine.queryset.field_list import * | ||||||
| from mongoengine.queryset.manager import * | from mongoengine.queryset.manager import * | ||||||
| from mongoengine.queryset.queryset import * | from mongoengine.queryset.queryset import * | ||||||
| from mongoengine.queryset.transform import * | from mongoengine.queryset.transform import * | ||||||
| from mongoengine.queryset.visitor import * | from mongoengine.queryset.visitor import * | ||||||
|  |  | ||||||
| __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + | # Expose just the public subset of all imported objects and constants. | ||||||
|            transform.__all__ + visitor.__all__) | __all__ = ( | ||||||
|  |     "QuerySet", | ||||||
|  |     "QuerySetNoCache", | ||||||
|  |     "Q", | ||||||
|  |     "queryset_manager", | ||||||
|  |     "QuerySetManager", | ||||||
|  |     "QueryFieldList", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|  |     # Errors that might be related to a queryset, mostly here for backward | ||||||
|  |     # compatibility | ||||||
|  |     "DoesNotExist", | ||||||
|  |     "InvalidQueryError", | ||||||
|  |     "MultipleObjectsReturned", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "OperationError", | ||||||
|  | ) | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,13 +1,15 @@ | |||||||
|  | __all__ = ("QueryFieldList",) | ||||||
| __all__ = ('QueryFieldList',) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QueryFieldList(object): | class QueryFieldList: | ||||||
|     """Object that handles combinations of .only() and .exclude() calls""" |     """Object that handles combinations of .only() and .exclude() calls""" | ||||||
|  |  | ||||||
|     ONLY = 1 |     ONLY = 1 | ||||||
|     EXCLUDE = 0 |     EXCLUDE = 0 | ||||||
|  |  | ||||||
|     def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): |     def __init__( | ||||||
|  |         self, fields=None, value=ONLY, always_include=None, _only_called=False | ||||||
|  |     ): | ||||||
|         """The QueryFieldList builder |         """The QueryFieldList builder | ||||||
|  |  | ||||||
|         :param fields: A list of fields used in `.only()` or `.exclude()` |         :param fields: A list of fields used in `.only()` or `.exclude()` | ||||||
| @@ -50,7 +52,7 @@ class QueryFieldList(object): | |||||||
|             self.fields = f.fields - self.fields |             self.fields = f.fields - self.fields | ||||||
|             self._clean_slice() |             self._clean_slice() | ||||||
|  |  | ||||||
|         if '_id' in f.fields: |         if "_id" in f.fields: | ||||||
|             self._id = f.value |             self._id = f.value | ||||||
|  |  | ||||||
|         if self.always_include: |         if self.always_include: | ||||||
| @@ -60,23 +62,23 @@ class QueryFieldList(object): | |||||||
|             else: |             else: | ||||||
|                 self.fields -= self.always_include |                 self.fields -= self.always_include | ||||||
|  |  | ||||||
|         if getattr(f, '_only_called', False): |         if getattr(f, "_only_called", False): | ||||||
|             self._only_called = True |             self._only_called = True | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __nonzero__(self): |     def __bool__(self): | ||||||
|         return bool(self.fields) |         return bool(self.fields) | ||||||
|  |  | ||||||
|     def as_dict(self): |     def as_dict(self): | ||||||
|         field_list = dict((field, self.value) for field in self.fields) |         field_list = {field: self.value for field in self.fields} | ||||||
|         if self.slice: |         if self.slice: | ||||||
|             field_list.update(self.slice) |             field_list.update(self.slice) | ||||||
|         if self._id is not None: |         if self._id is not None: | ||||||
|             field_list['_id'] = self._id |             field_list["_id"] = self._id | ||||||
|         return field_list |         return field_list | ||||||
|  |  | ||||||
|     def reset(self): |     def reset(self): | ||||||
|         self.fields = set([]) |         self.fields = set() | ||||||
|         self.slice = {} |         self.slice = {} | ||||||
|         self.value = self.ONLY |         self.value = self.ONLY | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,10 +1,11 @@ | |||||||
| from functools import partial | from functools import partial | ||||||
|  |  | ||||||
| from mongoengine.queryset.queryset import QuerySet | from mongoengine.queryset.queryset import QuerySet | ||||||
|  |  | ||||||
| __all__ = ('queryset_manager', 'QuerySetManager') | __all__ = ("queryset_manager", "QuerySetManager") | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetManager(object): | class QuerySetManager: | ||||||
|     """ |     """ | ||||||
|     The default QuerySet Manager. |     The default QuerySet Manager. | ||||||
|  |  | ||||||
| @@ -29,14 +30,14 @@ class QuerySetManager(object): | |||||||
|         Document.objects is accessed. |         Document.objects is accessed. | ||||||
|         """ |         """ | ||||||
|         if instance is not None: |         if instance is not None: | ||||||
|             # Document class being used rather than a document object |             # Document object being used rather than a document class | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         # owner is the document that contains the QuerySetManager |         # owner is the document that contains the QuerySetManager | ||||||
|         queryset_class = owner._meta.get('queryset_class', self.default) |         queryset_class = owner._meta.get("queryset_class", self.default) | ||||||
|         queryset = queryset_class(owner, owner._get_collection()) |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|         if self.get_queryset: |         if self.get_queryset: | ||||||
|             arg_count = self.get_queryset.func_code.co_argcount |             arg_count = self.get_queryset.__code__.co_argcount | ||||||
|             if arg_count == 1: |             if arg_count == 1: | ||||||
|                 queryset = self.get_queryset(queryset) |                 queryset = self.get_queryset(queryset) | ||||||
|             elif arg_count == 2: |             elif arg_count == 2: | ||||||
|   | |||||||
| @@ -1,9 +1,22 @@ | |||||||
| from mongoengine.errors import OperationError | from mongoengine.errors import OperationError | ||||||
| from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, | from mongoengine.queryset.base import ( | ||||||
|                                        CASCADE, DENY, PULL) |     CASCADE, | ||||||
|  |     DENY, | ||||||
|  |     DO_NOTHING, | ||||||
|  |     NULLIFY, | ||||||
|  |     PULL, | ||||||
|  |     BaseQuerySet, | ||||||
|  | ) | ||||||
|  |  | ||||||
| __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', | __all__ = ( | ||||||
|            'DENY', 'PULL') |     "QuerySet", | ||||||
|  |     "QuerySetNoCache", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|  | ) | ||||||
|  |  | ||||||
| # The maximum number of items to display in a QuerySet.__repr__ | # The maximum number of items to display in a QuerySet.__repr__ | ||||||
| REPR_OUTPUT_SIZE = 20 | REPR_OUTPUT_SIZE = 20 | ||||||
| @@ -27,9 +40,10 @@ class QuerySet(BaseQuerySet): | |||||||
|         in batches of ``ITER_CHUNK_SIZE``. |         in batches of ``ITER_CHUNK_SIZE``. | ||||||
|  |  | ||||||
|         If ``self._has_more`` the cursor hasn't been exhausted so cache then |         If ``self._has_more`` the cursor hasn't been exhausted so cache then | ||||||
|         batch.  Otherwise iterate the result_cache. |         batch. Otherwise iterate the result_cache. | ||||||
|         """ |         """ | ||||||
|         self._iter = True |         self._iter = True | ||||||
|  |  | ||||||
|         if self._has_more: |         if self._has_more: | ||||||
|             return self._iter_results() |             return self._iter_results() | ||||||
|  |  | ||||||
| @@ -38,45 +52,60 @@ class QuerySet(BaseQuerySet): | |||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         """Since __len__ is called quite frequently (for example, as part of |         """Since __len__ is called quite frequently (for example, as part of | ||||||
|         list(qs) we populate the result cache and cache the length. |         list(qs)), we populate the result cache and cache the length. | ||||||
|         """ |         """ | ||||||
|         if self._len is not None: |         if self._len is not None: | ||||||
|             return self._len |             return self._len | ||||||
|  |  | ||||||
|  |         # Populate the result cache with *all* of the docs in the cursor | ||||||
|         if self._has_more: |         if self._has_more: | ||||||
|             # populate the cache |  | ||||||
|             list(self._iter_results()) |             list(self._iter_results()) | ||||||
|  |  | ||||||
|  |         # Cache the length of the complete result cache and return it | ||||||
|         self._len = len(self._result_cache) |         self._len = len(self._result_cache) | ||||||
|         return self._len |         return self._len | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """Provides the string representation of the QuerySet |         """Provide a string representation of the QuerySet""" | ||||||
|         """ |  | ||||||
|         if self._iter: |         if self._iter: | ||||||
|             return '.. queryset mid-iteration ..' |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|         self._populate_cache() |         self._populate_cache() | ||||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] |         data = self._result_cache[: REPR_OUTPUT_SIZE + 1] | ||||||
|         if len(data) > REPR_OUTPUT_SIZE: |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|             data[-1] = "...(remaining elements truncated)..." |             data[-1] = "...(remaining elements truncated)..." | ||||||
|         return repr(data) |         return repr(data) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def _iter_results(self): |     def _iter_results(self): | ||||||
|         """A generator for iterating over the result cache. |         """A generator for iterating over the result cache. | ||||||
|  |  | ||||||
|         Also populates the cache if there are more possible results to yield. |         Also populates the cache if there are more possible results to | ||||||
|         Raises StopIteration when there are no more results""" |         yield. Raises StopIteration when there are no more results. | ||||||
|  |         """ | ||||||
|         if self._result_cache is None: |         if self._result_cache is None: | ||||||
|             self._result_cache = [] |             self._result_cache = [] | ||||||
|  |  | ||||||
|         pos = 0 |         pos = 0 | ||||||
|         while True: |         while True: | ||||||
|             upper = len(self._result_cache) |  | ||||||
|             while pos < upper: |             # For all positions lower than the length of the current result | ||||||
|  |             # cache, serve the docs straight from the cache w/o hitting the | ||||||
|  |             # database. | ||||||
|  |             # XXX it's VERY important to compute the len within the `while` | ||||||
|  |             # condition because the result cache might expand mid-iteration | ||||||
|  |             # (e.g. if we call len(qs) inside a loop that iterates over the | ||||||
|  |             # queryset). Fortunately len(list) is O(1) in Python, so this | ||||||
|  |             # doesn't cause performance issues. | ||||||
|  |             while pos < len(self._result_cache): | ||||||
|                 yield self._result_cache[pos] |                 yield self._result_cache[pos] | ||||||
|                 pos = pos + 1 |                 pos += 1 | ||||||
|  |  | ||||||
|  |             # return if we already established there were no more | ||||||
|  |             # docs in the db cursor. | ||||||
|             if not self._has_more: |             if not self._has_more: | ||||||
|                 raise StopIteration |                 return | ||||||
|  |  | ||||||
|  |             # Otherwise, populate more of the cache and repeat. | ||||||
|             if len(self._result_cache) <= pos: |             if len(self._result_cache) <= pos: | ||||||
|                 self._populate_cache() |                 self._populate_cache() | ||||||
|  |  | ||||||
| @@ -87,12 +116,22 @@ class QuerySet(BaseQuerySet): | |||||||
|         """ |         """ | ||||||
|         if self._result_cache is None: |         if self._result_cache is None: | ||||||
|             self._result_cache = [] |             self._result_cache = [] | ||||||
|         if self._has_more: |  | ||||||
|             try: |         # Skip populating the cache if we already established there are no | ||||||
|                 for i in xrange(ITER_CHUNK_SIZE): |         # more docs to pull from the database. | ||||||
|                     self._result_cache.append(self.next()) |         if not self._has_more: | ||||||
|             except StopIteration: |             return | ||||||
|                 self._has_more = False |  | ||||||
|  |         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||||
|  |         # the result cache. | ||||||
|  |         try: | ||||||
|  |             for _ in range(ITER_CHUNK_SIZE): | ||||||
|  |                 self._result_cache.append(next(self)) | ||||||
|  |         except StopIteration: | ||||||
|  |             # Getting this exception means there are no more docs in the | ||||||
|  |             # db cursor. Set _has_more to False so that we can use that | ||||||
|  |             # information in other places. | ||||||
|  |             self._has_more = False | ||||||
|  |  | ||||||
|     def count(self, with_limit_and_skip=False): |     def count(self, with_limit_and_skip=False): | ||||||
|         """Count the selected elements in the query. |         """Count the selected elements in the query. | ||||||
| @@ -102,47 +141,41 @@ class QuerySet(BaseQuerySet): | |||||||
|             getting the count |             getting the count | ||||||
|         """ |         """ | ||||||
|         if with_limit_and_skip is False: |         if with_limit_and_skip is False: | ||||||
|             return super(QuerySet, self).count(with_limit_and_skip) |             return super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         if self._len is None: |         if self._len is None: | ||||||
|             self._len = super(QuerySet, self).count(with_limit_and_skip) |             # cache the length | ||||||
|  |             self._len = super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         return self._len |         return self._len | ||||||
|  |  | ||||||
|     def no_cache(self): |     def no_cache(self): | ||||||
|         """Convert to a non_caching queryset |         """Convert to a non-caching queryset""" | ||||||
|  |  | ||||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset |  | ||||||
|         """ |  | ||||||
|         if self._result_cache is not None: |         if self._result_cache is not None: | ||||||
|             raise OperationError("QuerySet already cached") |             raise OperationError("QuerySet already cached") | ||||||
|         return self.clone_into(QuerySetNoCache(self._document, self._collection)) |  | ||||||
|  |         return self._clone_into(QuerySetNoCache(self._document, self._collection)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetNoCache(BaseQuerySet): | class QuerySetNoCache(BaseQuerySet): | ||||||
|     """A non caching QuerySet""" |     """A non caching QuerySet""" | ||||||
|  |  | ||||||
|     def cache(self): |     def cache(self): | ||||||
|         """Convert to a caching queryset |         """Convert to a caching queryset""" | ||||||
|  |         return self._clone_into(QuerySet(self._document, self._collection)) | ||||||
|         .. versionadded:: 0.8.3 Convert to caching queryset |  | ||||||
|         """ |  | ||||||
|         return self.clone_into(QuerySet(self._document, self._collection)) |  | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """Provides the string representation of the QuerySet |         """Provides the string representation of the QuerySet""" | ||||||
|  |  | ||||||
|         .. versionchanged:: 0.6.13 Now doesnt modify the cursor |  | ||||||
|         """ |  | ||||||
|         if self._iter: |         if self._iter: | ||||||
|             return '.. queryset mid-iteration ..' |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|         data = [] |         data = [] | ||||||
|         for i in xrange(REPR_OUTPUT_SIZE + 1): |         for _ in range(REPR_OUTPUT_SIZE + 1): | ||||||
|             try: |             try: | ||||||
|                 data.append(self.next()) |                 data.append(next(self)) | ||||||
|             except StopIteration: |             except StopIteration: | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
|         if len(data) > REPR_OUTPUT_SIZE: |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|             data[-1] = "...(remaining elements truncated)..." |             data[-1] = "...(remaining elements truncated)..." | ||||||
|  |  | ||||||
| @@ -155,10 +188,3 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|             queryset = self.clone() |             queryset = self.clone() | ||||||
|         queryset.rewind() |         queryset.rewind() | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetNoDeRef(QuerySet): |  | ||||||
|     """Special no_dereference QuerySet""" |  | ||||||
|  |  | ||||||
|     def __dereference(items, max_depth=1, instance=None, name=None): |  | ||||||
|         return items |  | ||||||
| @@ -1,45 +1,74 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
| from bson import SON | from bson import SON, ObjectId | ||||||
|  | from bson.dbref import DBRef | ||||||
|  |  | ||||||
| from mongoengine.connection import get_connection | from mongoengine.base import UPDATE_OPERATORS | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import InvalidQueryError, LookUpError | from mongoengine.errors import InvalidQueryError | ||||||
|  |  | ||||||
| __all__ = ('query', 'update') | __all__ = ("query", "update", "STRING_OPERATORS") | ||||||
|  |  | ||||||
|  | COMPARISON_OPERATORS = ( | ||||||
|  |     "ne", | ||||||
|  |     "gt", | ||||||
|  |     "gte", | ||||||
|  |     "lt", | ||||||
|  |     "lte", | ||||||
|  |     "in", | ||||||
|  |     "nin", | ||||||
|  |     "mod", | ||||||
|  |     "all", | ||||||
|  |     "size", | ||||||
|  |     "exists", | ||||||
|  |     "not", | ||||||
|  |     "elemMatch", | ||||||
|  |     "type", | ||||||
|  | ) | ||||||
|  | GEO_OPERATORS = ( | ||||||
|  |     "within_distance", | ||||||
|  |     "within_spherical_distance", | ||||||
|  |     "within_box", | ||||||
|  |     "within_polygon", | ||||||
|  |     "near", | ||||||
|  |     "near_sphere", | ||||||
|  |     "max_distance", | ||||||
|  |     "min_distance", | ||||||
|  |     "geo_within", | ||||||
|  |     "geo_within_box", | ||||||
|  |     "geo_within_polygon", | ||||||
|  |     "geo_within_center", | ||||||
|  |     "geo_within_sphere", | ||||||
|  |     "geo_intersects", | ||||||
|  | ) | ||||||
|  | STRING_OPERATORS = ( | ||||||
|  |     "contains", | ||||||
|  |     "icontains", | ||||||
|  |     "startswith", | ||||||
|  |     "istartswith", | ||||||
|  |     "endswith", | ||||||
|  |     "iendswith", | ||||||
|  |     "exact", | ||||||
|  |     "iexact", | ||||||
|  | ) | ||||||
|  | CUSTOM_OPERATORS = ("match",) | ||||||
|  | MATCH_OPERATORS = ( | ||||||
|  |     COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | # TODO make this less complex | ||||||
|                         'all', 'size', 'exists', 'not', 'elemMatch', 'type') | def query(_doc_cls=None, **kwargs): | ||||||
| GEO_OPERATORS = ('within_distance', 'within_spherical_distance', |     """Transform a query from Django-style format to Mongo format.""" | ||||||
|                  'within_box', 'within_polygon', 'near', 'near_sphere', |  | ||||||
|                  'max_distance', 'geo_within', 'geo_within_box', |  | ||||||
|                  'geo_within_polygon', 'geo_within_center', |  | ||||||
|                  'geo_within_sphere', 'geo_intersects') |  | ||||||
| STRING_OPERATORS = ('contains', 'icontains', 'startswith', |  | ||||||
|                     'istartswith', 'endswith', 'iendswith', |  | ||||||
|                     'exact', 'iexact') |  | ||||||
| CUSTOM_OPERATORS = ('match',) |  | ||||||
| MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + |  | ||||||
|                    STRING_OPERATORS + CUSTOM_OPERATORS) |  | ||||||
|  |  | ||||||
| UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push', |  | ||||||
|                     'push_all', 'pull', 'pull_all', 'add_to_set', |  | ||||||
|                     'set_on_insert', 'min', 'max') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def query(_doc_cls=None, _field_operation=False, **query): |  | ||||||
|     """Transform a query from Django-style format to Mongo format. |  | ||||||
|     """ |  | ||||||
|     mongo_query = {} |     mongo_query = {} | ||||||
|     merge_query = defaultdict(list) |     merge_query = defaultdict(list) | ||||||
|     for key, value in sorted(query.items()): |     for key, value in sorted(kwargs.items()): | ||||||
|         if key == "__raw__": |         if key == "__raw__": | ||||||
|             mongo_query.update(value) |             mongo_query.update(value) | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         parts = key.rsplit('__') |         parts = key.rsplit("__") | ||||||
|         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] |         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||||
|         parts = [part for part in parts if not part.isdigit()] |         parts = [part for part in parts if not part.isdigit()] | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
| @@ -47,8 +76,12 @@ def query(_doc_cls=None, _field_operation=False, **query): | |||||||
|         if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: |         if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: | ||||||
|             op = parts.pop() |             op = parts.pop() | ||||||
|  |  | ||||||
|  |         # Allow to escape operator-like field name by __ | ||||||
|  |         if len(parts) > 1 and parts[-1] == "": | ||||||
|  |             parts.pop() | ||||||
|  |  | ||||||
|         negate = False |         negate = False | ||||||
|         if len(parts) > 1 and parts[-1] == 'not': |         if len(parts) > 1 and parts[-1] == "not": | ||||||
|             parts.pop() |             parts.pop() | ||||||
|             negate = True |             negate = True | ||||||
|  |  | ||||||
| @@ -56,21 +89,22 @@ def query(_doc_cls=None, _field_operation=False, **query): | |||||||
|             # Switch field names to proper names [set in Field(name='foo')] |             # Switch field names to proper names [set in Field(name='foo')] | ||||||
|             try: |             try: | ||||||
|                 fields = _doc_cls._lookup_field(parts) |                 fields = _doc_cls._lookup_field(parts) | ||||||
|             except Exception, e: |             except Exception as e: | ||||||
|                 raise InvalidQueryError(e) |                 raise InvalidQueryError(e) | ||||||
|             parts = [] |             parts = [] | ||||||
|  |  | ||||||
|             CachedReferenceField = _import_class('CachedReferenceField') |             CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|  |             GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|             cleaned_fields = [] |             cleaned_fields = [] | ||||||
|             for field in fields: |             for field in fields: | ||||||
|                 append_field = True |                 append_field = True | ||||||
|                 if isinstance(field, basestring): |                 if isinstance(field, str): | ||||||
|                     parts.append(field) |                     parts.append(field) | ||||||
|                     append_field = False |                     append_field = False | ||||||
|                 # is last and CachedReferenceField |                 # is last and CachedReferenceField | ||||||
|                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: |                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: | ||||||
|                     parts.append('%s._id' % field.db_field) |                     parts.append("%s._id" % field.db_field) | ||||||
|                 else: |                 else: | ||||||
|                     parts.append(field.db_field) |                     parts.append(field.db_field) | ||||||
|  |  | ||||||
| @@ -80,74 +114,95 @@ def query(_doc_cls=None, _field_operation=False, **query): | |||||||
|             # Convert value to proper value |             # Convert value to proper value | ||||||
|             field = cleaned_fields[-1] |             field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] |             singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] | ||||||
|             singular_ops += STRING_OPERATORS |             singular_ops += STRING_OPERATORS | ||||||
|             if op in singular_ops: |             if op in singular_ops: | ||||||
|                 if isinstance(field, basestring): |                 value = field.prepare_query_value(op, value) | ||||||
|                     if (op in STRING_OPERATORS and |  | ||||||
|                             isinstance(value, basestring)): |  | ||||||
|                         StringField = _import_class('StringField') |  | ||||||
|                         value = StringField.prepare_query_value(op, value) |  | ||||||
|                     else: |  | ||||||
|                         value = field |  | ||||||
|                 else: |  | ||||||
|                     value = field.prepare_query_value(op, value) |  | ||||||
|  |  | ||||||
|                     if isinstance(field, CachedReferenceField) and value: |                 if isinstance(field, CachedReferenceField) and value: | ||||||
|                         value = value['_id'] |                     value = value["_id"] | ||||||
|  |  | ||||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): |             elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): | ||||||
|                 # 'in', 'nin' and 'all' require a list of values |                 # Raise an error if the in/nin/all/near param is not iterable. | ||||||
|                 value = [field.prepare_query_value(op, v) for v in value] |                 value = _prepare_query_for_iterable(field, op, value) | ||||||
|  |  | ||||||
|  |             # If we're querying a GenericReferenceField, we need to alter the | ||||||
|  |             # key depending on the value: | ||||||
|  |             # * If the value is a DBRef, the key should be "field_name._ref". | ||||||
|  |             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||||
|  |             if isinstance(field, GenericReferenceField): | ||||||
|  |                 if isinstance(value, DBRef): | ||||||
|  |                     parts[-1] += "._ref" | ||||||
|  |                 elif isinstance(value, ObjectId): | ||||||
|  |                     parts[-1] += "._ref.$id" | ||||||
|  |  | ||||||
|         # if op and op not in COMPARISON_OPERATORS: |         # if op and op not in COMPARISON_OPERATORS: | ||||||
|         if op: |         if op: | ||||||
|             if op in GEO_OPERATORS: |             if op in GEO_OPERATORS: | ||||||
|                 value = _geo_operator(field, op, value) |                 value = _geo_operator(field, op, value) | ||||||
|             elif op in CUSTOM_OPERATORS: |             elif op in ("match", "elemMatch"): | ||||||
|                 if op in ('elem_match', 'match'): |                 ListField = _import_class("ListField") | ||||||
|                     value = field.prepare_query_value(op, value) |                 EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||||
|                     value = {"$elemMatch": value} |                 if ( | ||||||
|  |                     isinstance(value, dict) | ||||||
|  |                     and isinstance(field, ListField) | ||||||
|  |                     and isinstance(field.field, EmbeddedDocumentField) | ||||||
|  |                 ): | ||||||
|  |                     value = query(field.field.document_type, **value) | ||||||
|                 else: |                 else: | ||||||
|                     NotImplementedError("Custom method '%s' has not " |                     value = field.prepare_query_value(op, value) | ||||||
|                                         "been implemented" % op) |                 value = {"$elemMatch": value} | ||||||
|  |             elif op in CUSTOM_OPERATORS: | ||||||
|  |                 NotImplementedError( | ||||||
|  |                     'Custom method "%s" has not ' "been implemented" % op | ||||||
|  |                 ) | ||||||
|             elif op not in STRING_OPERATORS: |             elif op not in STRING_OPERATORS: | ||||||
|                 value = {'$' + op: value} |                 value = {"$" + op: value} | ||||||
|  |  | ||||||
|         if negate: |         if negate: | ||||||
|             value = {'$not': value} |             value = {"$not": value} | ||||||
|  |  | ||||||
|         for i, part in indices: |         for i, part in indices: | ||||||
|             parts.insert(i, part) |             parts.insert(i, part) | ||||||
|         key = '.'.join(parts) |  | ||||||
|         if op is None or key not in mongo_query: |  | ||||||
|             mongo_query[key] = value |  | ||||||
|         elif key in mongo_query: |  | ||||||
|             if key in mongo_query and isinstance(mongo_query[key], dict): |  | ||||||
|                 mongo_query[key].update(value) |  | ||||||
|                 # $maxDistance needs to come last - convert to SON |  | ||||||
|                 value_dict = mongo_query[key] |  | ||||||
|                 if ('$maxDistance' in value_dict and '$near' in value_dict): |  | ||||||
|                     value_son = SON() |  | ||||||
|                     if isinstance(value_dict['$near'], dict): |  | ||||||
|                         for k, v in value_dict.iteritems(): |  | ||||||
|                             if k == '$maxDistance': |  | ||||||
|                                 continue |  | ||||||
|                             value_son[k] = v |  | ||||||
|                         if (get_connection().max_wire_version <= 1): |  | ||||||
|                             value_son['$maxDistance'] = value_dict[ |  | ||||||
|                                 '$maxDistance'] |  | ||||||
|                         else: |  | ||||||
|                             value_son['$near'] = SON(value_son['$near']) |  | ||||||
|                             value_son['$near'][ |  | ||||||
|                                 '$maxDistance'] = value_dict['$maxDistance'] |  | ||||||
|                     else: |  | ||||||
|                         for k, v in value_dict.iteritems(): |  | ||||||
|                             if k == '$maxDistance': |  | ||||||
|                                 continue |  | ||||||
|                             value_son[k] = v |  | ||||||
|                         value_son['$maxDistance'] = value_dict['$maxDistance'] |  | ||||||
|  |  | ||||||
|  |         key = ".".join(parts) | ||||||
|  |  | ||||||
|  |         if key not in mongo_query: | ||||||
|  |             mongo_query[key] = value | ||||||
|  |         else: | ||||||
|  |             if isinstance(mongo_query[key], dict) and isinstance(value, dict): | ||||||
|  |                 mongo_query[key].update(value) | ||||||
|  |                 # $max/minDistance needs to come last - convert to SON | ||||||
|  |                 value_dict = mongo_query[key] | ||||||
|  |                 if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( | ||||||
|  |                     "$near" in value_dict or "$nearSphere" in value_dict | ||||||
|  |                 ): | ||||||
|  |                     value_son = SON() | ||||||
|  |                     for k, v in value_dict.items(): | ||||||
|  |                         if k == "$maxDistance" or k == "$minDistance": | ||||||
|  |                             continue | ||||||
|  |                         value_son[k] = v | ||||||
|  |                     # Required for MongoDB >= 2.6, may fail when combining | ||||||
|  |                     # PyMongo 3+ and MongoDB < 2.6 | ||||||
|  |                     near_embedded = False | ||||||
|  |                     for near_op in ("$near", "$nearSphere"): | ||||||
|  |                         if isinstance(value_dict.get(near_op), dict): | ||||||
|  |                             value_son[near_op] = SON(value_son[near_op]) | ||||||
|  |                             if "$maxDistance" in value_dict: | ||||||
|  |                                 value_son[near_op]["$maxDistance"] = value_dict[ | ||||||
|  |                                     "$maxDistance" | ||||||
|  |                                 ] | ||||||
|  |                             if "$minDistance" in value_dict: | ||||||
|  |                                 value_son[near_op]["$minDistance"] = value_dict[ | ||||||
|  |                                     "$minDistance" | ||||||
|  |                                 ] | ||||||
|  |                             near_embedded = True | ||||||
|  |  | ||||||
|  |                     if not near_embedded: | ||||||
|  |                         if "$maxDistance" in value_dict: | ||||||
|  |                             value_son["$maxDistance"] = value_dict["$maxDistance"] | ||||||
|  |                         if "$minDistance" in value_dict: | ||||||
|  |                             value_son["$minDistance"] = value_dict["$minDistance"] | ||||||
|                     mongo_query[key] = value_son |                     mongo_query[key] = value_son | ||||||
|             else: |             else: | ||||||
|                 # Store for manually merging later |                 # Store for manually merging later | ||||||
| @@ -159,53 +214,64 @@ def query(_doc_cls=None, _field_operation=False, **query): | |||||||
|         del mongo_query[k] |         del mongo_query[k] | ||||||
|         if isinstance(v, list): |         if isinstance(v, list): | ||||||
|             value = [{k: val} for val in v] |             value = [{k: val} for val in v] | ||||||
|             if '$and' in mongo_query.keys(): |             if "$and" in mongo_query.keys(): | ||||||
|                 mongo_query['$and'].extend(value) |                 mongo_query["$and"].extend(value) | ||||||
|             else: |             else: | ||||||
|                 mongo_query['$and'] = value |                 mongo_query["$and"] = value | ||||||
|  |  | ||||||
|     return mongo_query |     return mongo_query | ||||||
|  |  | ||||||
|  |  | ||||||
| def update(_doc_cls=None, **update): | def update(_doc_cls=None, **update): | ||||||
|     """Transform an update spec from Django-style format to Mongo format. |     """Transform an update spec from Django-style format to Mongo | ||||||
|  |     format. | ||||||
|     """ |     """ | ||||||
|     mongo_update = {} |     mongo_update = {} | ||||||
|  |  | ||||||
|     for key, value in update.items(): |     for key, value in update.items(): | ||||||
|         if key == "__raw__": |         if key == "__raw__": | ||||||
|             mongo_update.update(value) |             mongo_update.update(value) | ||||||
|             continue |             continue | ||||||
|         parts = key.split('__') |  | ||||||
|         # if there is no operator, default to "set" |         parts = key.split("__") | ||||||
|  |  | ||||||
|  |         # if there is no operator, default to 'set' | ||||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: |         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||||
|             parts.insert(0, 'set') |             parts.insert(0, "set") | ||||||
|  |  | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
|         op = None |         op = None | ||||||
|         if parts[0] in UPDATE_OPERATORS: |         if parts[0] in UPDATE_OPERATORS: | ||||||
|             op = parts.pop(0) |             op = parts.pop(0) | ||||||
|             # Convert Pythonic names to Mongo equivalents |             # Convert Pythonic names to Mongo equivalents | ||||||
|             if op in ('push_all', 'pull_all'): |             operator_map = { | ||||||
|                 op = op.replace('_all', 'All') |                 "push_all": "pushAll", | ||||||
|             elif op == 'dec': |                 "pull_all": "pullAll", | ||||||
|  |                 "dec": "inc", | ||||||
|  |                 "add_to_set": "addToSet", | ||||||
|  |                 "set_on_insert": "setOnInsert", | ||||||
|  |             } | ||||||
|  |             if op == "dec": | ||||||
|                 # Support decrement by flipping a positive value's sign |                 # Support decrement by flipping a positive value's sign | ||||||
|                 # and using 'inc' |                 # and using 'inc' | ||||||
|                 op = 'inc' |                 value = -value | ||||||
|                 if value > 0: |             # If the operator doesn't found from operator map, the op value | ||||||
|                     value = -value |             # will stay unchanged | ||||||
|             elif op == 'add_to_set': |             op = operator_map.get(op, op) | ||||||
|                 op = 'addToSet' |  | ||||||
|             elif op == 'set_on_insert': |  | ||||||
|                 op = "setOnInsert" |  | ||||||
|  |  | ||||||
|         match = None |         match = None | ||||||
|         if parts[-1] in COMPARISON_OPERATORS: |         if parts[-1] in COMPARISON_OPERATORS: | ||||||
|             match = parts.pop() |             match = parts.pop() | ||||||
|  |  | ||||||
|  |         # Allow to escape operator-like field name by __ | ||||||
|  |         if len(parts) > 1 and parts[-1] == "": | ||||||
|  |             parts.pop() | ||||||
|  |  | ||||||
|         if _doc_cls: |         if _doc_cls: | ||||||
|             # Switch field names to proper names [set in Field(name='foo')] |             # Switch field names to proper names [set in Field(name='foo')] | ||||||
|             try: |             try: | ||||||
|                 fields = _doc_cls._lookup_field(parts) |                 fields = _doc_cls._lookup_field(parts) | ||||||
|             except Exception, e: |             except Exception as e: | ||||||
|                 raise InvalidQueryError(e) |                 raise InvalidQueryError(e) | ||||||
|             parts = [] |             parts = [] | ||||||
|  |  | ||||||
| @@ -213,10 +279,10 @@ def update(_doc_cls=None, **update): | |||||||
|             appended_sub_field = False |             appended_sub_field = False | ||||||
|             for field in fields: |             for field in fields: | ||||||
|                 append_field = True |                 append_field = True | ||||||
|                 if isinstance(field, basestring): |                 if isinstance(field, str): | ||||||
|                     # Convert the S operator to $ |                     # Convert the S operator to $ | ||||||
|                     if field == 'S': |                     if field == "S": | ||||||
|                         field = '$' |                         field = "$" | ||||||
|                     parts.append(field) |                     parts.append(field) | ||||||
|                     append_field = False |                     append_field = False | ||||||
|                 else: |                 else: | ||||||
| @@ -224,7 +290,7 @@ def update(_doc_cls=None, **update): | |||||||
|                 if append_field: |                 if append_field: | ||||||
|                     appended_sub_field = False |                     appended_sub_field = False | ||||||
|                     cleaned_fields.append(field) |                     cleaned_fields.append(field) | ||||||
|                     if hasattr(field, 'field'): |                     if hasattr(field, "field"): | ||||||
|                         cleaned_fields.append(field.field) |                         cleaned_fields.append(field.field) | ||||||
|                         appended_sub_field = True |                         appended_sub_field = True | ||||||
|  |  | ||||||
| @@ -238,46 +304,58 @@ def update(_doc_cls=None, **update): | |||||||
|             if isinstance(field, GeoJsonBaseField): |             if isinstance(field, GeoJsonBaseField): | ||||||
|                 value = field.to_mongo(value) |                 value = field.to_mongo(value) | ||||||
|  |  | ||||||
|             if op in (None, 'set', 'push', 'pull'): |             if op == "pull": | ||||||
|  |                 if field.required or value is not None: | ||||||
|  |                     if match in ("in", "nin") and not isinstance(value, dict): | ||||||
|  |                         value = _prepare_query_for_iterable(field, op, value) | ||||||
|  |                     else: | ||||||
|  |                         value = field.prepare_query_value(op, value) | ||||||
|  |             elif op == "push" and isinstance(value, (list, tuple, set)): | ||||||
|  |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |             elif op in (None, "set", "push"): | ||||||
|                 if field.required or value is not None: |                 if field.required or value is not None: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op in ('pushAll', 'pullAll'): |             elif op in ("pushAll", "pullAll"): | ||||||
|                 value = [field.prepare_query_value(op, v) for v in value] |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|             elif op in ('addToSet', 'setOnInsert'): |             elif op in ("addToSet", "setOnInsert"): | ||||||
|                 if isinstance(value, (list, tuple, set)): |                 if isinstance(value, (list, tuple, set)): | ||||||
|                     value = [field.prepare_query_value(op, v) for v in value] |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|                 elif field.required or value is not None: |                 elif field.required or value is not None: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op == "unset": |             elif op == "unset": | ||||||
|                 value = 1 |                 value = 1 | ||||||
|  |             elif op == "inc": | ||||||
|  |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|         if match: |         if match: | ||||||
|             match = '$' + match |             match = "$" + match | ||||||
|             value = {match: value} |             value = {match: value} | ||||||
|  |  | ||||||
|         key = '.'.join(parts) |         key = ".".join(parts) | ||||||
|  |  | ||||||
|         if not op: |         if "pull" in op and "." in key: | ||||||
|             raise InvalidQueryError("Updates must supply an operation " |  | ||||||
|                                     "eg: set__FIELD=value") |  | ||||||
|  |  | ||||||
|         if 'pull' in op and '.' in key: |  | ||||||
|             # Dot operators don't work on pull operations |             # Dot operators don't work on pull operations | ||||||
|             # unless they point to a list field |             # unless they point to a list field | ||||||
|             # Otherwise it uses nested dict syntax |             # Otherwise it uses nested dict syntax | ||||||
|             if op == 'pullAll': |             if op == "pullAll": | ||||||
|                 raise InvalidQueryError("pullAll operations only support " |                 raise InvalidQueryError( | ||||||
|                                         "a single field depth") |                     "pullAll operations only support a single field depth" | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|             # Look for the last list field and use dot notation until there |             # Look for the last list field and use dot notation until there | ||||||
|             field_classes = [c.__class__ for c in cleaned_fields] |             field_classes = [c.__class__ for c in cleaned_fields] | ||||||
|             field_classes.reverse() |             field_classes.reverse() | ||||||
|             ListField = _import_class('ListField') |             ListField = _import_class("ListField") | ||||||
|             if ListField in field_classes: |             EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|                 # Join all fields via dot notation to the last ListField |             if ListField in field_classes or EmbeddedDocumentListField in field_classes: | ||||||
|  |                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField | ||||||
|                 # Then process as normal |                 # Then process as normal | ||||||
|                 last_listField = len( |                 if ListField in field_classes: | ||||||
|                     cleaned_fields) - field_classes.index(ListField) |                     _check_field = ListField | ||||||
|  |                 else: | ||||||
|  |                     _check_field = EmbeddedDocumentListField | ||||||
|  |  | ||||||
|  |                 last_listField = len(cleaned_fields) - field_classes.index(_check_field) | ||||||
|                 key = ".".join(parts[:last_listField]) |                 key = ".".join(parts[:last_listField]) | ||||||
|                 parts = parts[last_listField:] |                 parts = parts[last_listField:] | ||||||
|                 parts.insert(0, key) |                 parts.insert(0, key) | ||||||
| @@ -285,12 +363,28 @@ def update(_doc_cls=None, **update): | |||||||
|             parts.reverse() |             parts.reverse() | ||||||
|             for key in parts: |             for key in parts: | ||||||
|                 value = {key: value} |                 value = {key: value} | ||||||
|         elif op == 'addToSet' and isinstance(value, list): |         elif op == "addToSet" and isinstance(value, list): | ||||||
|             value = {key: {"$each": value}} |             value = {key: {"$each": value}} | ||||||
|  |         elif op in ("push", "pushAll"): | ||||||
|  |             if parts[-1].isdigit(): | ||||||
|  |                 key = ".".join(parts[0:-1]) | ||||||
|  |                 position = int(parts[-1]) | ||||||
|  |                 # $position expects an iterable. If pushing a single value, | ||||||
|  |                 # wrap it in a list. | ||||||
|  |                 if not isinstance(value, (set, tuple, list)): | ||||||
|  |                     value = [value] | ||||||
|  |                 value = {key: {"$each": value, "$position": position}} | ||||||
|  |             else: | ||||||
|  |                 if op == "pushAll": | ||||||
|  |                     op = "push"  # convert to non-deprecated keyword | ||||||
|  |                     if not isinstance(value, (set, tuple, list)): | ||||||
|  |                         value = [value] | ||||||
|  |                     value = {key: {"$each": value}} | ||||||
|  |                 else: | ||||||
|  |                     value = {key: value} | ||||||
|         else: |         else: | ||||||
|             value = {key: value} |             value = {key: value} | ||||||
|         key = '$' + op |         key = "$" + op | ||||||
|  |  | ||||||
|         if key not in mongo_update: |         if key not in mongo_update: | ||||||
|             mongo_update[key] = value |             mongo_update[key] = value | ||||||
|         elif key in mongo_update and isinstance(mongo_update[key], dict): |         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||||
| @@ -300,25 +394,28 @@ def update(_doc_cls=None, **update): | |||||||
|  |  | ||||||
|  |  | ||||||
| def _geo_operator(field, op, value): | def _geo_operator(field, op, value): | ||||||
|     """Helper to return the query for a given geo query""" |     """Helper to return the query for a given geo query.""" | ||||||
|     if field._geo_index == pymongo.GEO2D: |     if op == "max_distance": | ||||||
|  |         value = {"$maxDistance": value} | ||||||
|  |     elif op == "min_distance": | ||||||
|  |         value = {"$minDistance": value} | ||||||
|  |     elif field._geo_index == pymongo.GEO2D: | ||||||
|         if op == "within_distance": |         if op == "within_distance": | ||||||
|             value = {'$within': {'$center': value}} |             value = {"$within": {"$center": value}} | ||||||
|         elif op == "within_spherical_distance": |         elif op == "within_spherical_distance": | ||||||
|             value = {'$within': {'$centerSphere': value}} |             value = {"$within": {"$centerSphere": value}} | ||||||
|         elif op == "within_polygon": |         elif op == "within_polygon": | ||||||
|             value = {'$within': {'$polygon': value}} |             value = {"$within": {"$polygon": value}} | ||||||
|         elif op == "near": |         elif op == "near": | ||||||
|             value = {'$near': value} |             value = {"$near": value} | ||||||
|         elif op == "near_sphere": |         elif op == "near_sphere": | ||||||
|             value = {'$nearSphere': value} |             value = {"$nearSphere": value} | ||||||
|         elif op == 'within_box': |         elif op == "within_box": | ||||||
|             value = {'$within': {'$box': value}} |             value = {"$within": {"$box": value}} | ||||||
|         elif op == "max_distance": |  | ||||||
|             value = {'$maxDistance': value} |  | ||||||
|         else: |         else: | ||||||
|             raise NotImplementedError("Geo method '%s' has not " |             raise NotImplementedError( | ||||||
|                                       "been implemented for a GeoPointField" % op) |                 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op | ||||||
|  |             ) | ||||||
|     else: |     else: | ||||||
|         if op == "geo_within": |         if op == "geo_within": | ||||||
|             value = {"$geoWithin": _infer_geometry(value)} |             value = {"$geoWithin": _infer_geometry(value)} | ||||||
| @@ -333,40 +430,73 @@ def _geo_operator(field, op, value): | |||||||
|         elif op == "geo_intersects": |         elif op == "geo_intersects": | ||||||
|             value = {"$geoIntersects": _infer_geometry(value)} |             value = {"$geoIntersects": _infer_geometry(value)} | ||||||
|         elif op == "near": |         elif op == "near": | ||||||
|             value = {'$near': _infer_geometry(value)} |             value = {"$near": _infer_geometry(value)} | ||||||
|         elif op == "max_distance": |  | ||||||
|             value = {'$maxDistance': value} |  | ||||||
|         else: |         else: | ||||||
|             raise NotImplementedError("Geo method '%s' has not " |             raise NotImplementedError( | ||||||
|                                       "been implemented for a %s " % (op, field._name)) |                 'Geo method "{}" has not been implemented for a {} '.format( | ||||||
|  |                     op, field._name | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|     return value |     return value | ||||||
|  |  | ||||||
|  |  | ||||||
| def _infer_geometry(value): | def _infer_geometry(value): | ||||||
|     """Helper method that tries to infer the $geometry shape for a given value""" |     """Helper method that tries to infer the $geometry shape for a | ||||||
|  |     given value. | ||||||
|  |     """ | ||||||
|     if isinstance(value, dict): |     if isinstance(value, dict): | ||||||
|         if "$geometry" in value: |         if "$geometry" in value: | ||||||
|             return value |             return value | ||||||
|         elif 'coordinates' in value and 'type' in value: |         elif "coordinates" in value and "type" in value: | ||||||
|             return {"$geometry": value} |             return {"$geometry": value} | ||||||
|         raise InvalidQueryError("Invalid $geometry dictionary should have " |         raise InvalidQueryError( | ||||||
|                                 "type and coordinates keys") |             "Invalid $geometry dictionary should have type and coordinates keys" | ||||||
|  |         ) | ||||||
|     elif isinstance(value, (list, set)): |     elif isinstance(value, (list, set)): | ||||||
|  |         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|             return {"$geometry": {"type": "Polygon", "coordinates": value}} |             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|             return {"$geometry": {"type": "LineString", "coordinates": value}} |             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0] |             value[0] | ||||||
|             return {"$geometry": {"type": "Point", "coordinates": value}} |             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||||
|         except: |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|     raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " |     raise InvalidQueryError( | ||||||
|                             "or (nested) lists of coordinate(s)") |         "Invalid $geometry data. Can be either a " | ||||||
|  |         "dictionary or (nested) lists of coordinate(s)" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _prepare_query_for_iterable(field, op, value): | ||||||
|  |     # We need a special check for BaseDocument, because - although it's iterable - using | ||||||
|  |     # it as such in the context of this method is most definitely a mistake. | ||||||
|  |     BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|  |     if isinstance(value, BaseDocument): | ||||||
|  |         raise TypeError( | ||||||
|  |             "When using the `in`, `nin`, `all`, or " | ||||||
|  |             "`near`-operators you can't use a " | ||||||
|  |             "`Document`, you must wrap your object " | ||||||
|  |             "in a list (object -> [object])." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     if not hasattr(value, "__iter__"): | ||||||
|  |         raise TypeError( | ||||||
|  |             "The `in`, `nin`, `all`, or " | ||||||
|  |             "`near`-operators must be applied to an " | ||||||
|  |             "iterable (e.g. a list)." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     return [field.prepare_query_value(op, v) for v in value] | ||||||
|   | |||||||
| @@ -1,26 +1,26 @@ | |||||||
| import copy | import copy | ||||||
|  | import warnings | ||||||
| from itertools import product |  | ||||||
| from functools import reduce |  | ||||||
|  |  | ||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
|  |  | ||||||
| __all__ = ('Q',) | __all__ = ("Q", "QNode") | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNodeVisitor(object): | def warn_empty_is_deprecated(): | ||||||
|     """Base visitor class for visiting Q-object nodes in a query tree. |     msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||||
|     """ |     warnings.warn(msg, DeprecationWarning, stacklevel=2) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QNodeVisitor: | ||||||
|  |     """Base visitor class for visiting Q-object nodes in a query tree.""" | ||||||
|  |  | ||||||
|     def visit_combination(self, combination): |     def visit_combination(self, combination): | ||||||
|         """Called by QCombination objects. |         """Called by QCombination objects.""" | ||||||
|         """ |  | ||||||
|         return combination |         return combination | ||||||
|  |  | ||||||
|     def visit_query(self, query): |     def visit_query(self, query): | ||||||
|         """Called by (New)Q objects. |         """Called by (New)Q objects.""" | ||||||
|         """ |  | ||||||
|         return query |         return query | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -46,8 +46,7 @@ class SimplificationVisitor(QNodeVisitor): | |||||||
|         return combination |         return combination | ||||||
|  |  | ||||||
|     def _query_conjunction(self, queries): |     def _query_conjunction(self, queries): | ||||||
|         """Merges query dicts - effectively &ing them together. |         """Merges query dicts - effectively &ing them together.""" | ||||||
|         """ |  | ||||||
|         query_ops = set() |         query_ops = set() | ||||||
|         combined_query = {} |         combined_query = {} | ||||||
|         for query in queries: |         for query in queries: | ||||||
| @@ -81,9 +80,8 @@ class QueryCompilerVisitor(QNodeVisitor): | |||||||
|         return transform.query(self.document, **query.query) |         return transform.query(self.document, **query.query) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNode(object): | class QNode: | ||||||
|     """Base class for nodes in query trees. |     """Base class for nodes in query trees.""" | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     AND = 0 |     AND = 0 | ||||||
|     OR = 1 |     OR = 1 | ||||||
| @@ -97,18 +95,22 @@ class QNode(object): | |||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     def _combine(self, other, operation): |     def _combine(self, other, operation): | ||||||
|         """Combine this node with another node into a QCombination object. |         """Combine this node with another node into a QCombination | ||||||
|  |         object. | ||||||
|         """ |         """ | ||||||
|         if getattr(other, 'empty', True): |         # If the other Q() is empty, ignore it and just use `self`. | ||||||
|  |         if not bool(other): | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         if self.empty: |         # Or if this Q is empty, ignore it and just use `other`. | ||||||
|  |         if not bool(self): | ||||||
|             return other |             return other | ||||||
|  |  | ||||||
|         return QCombination(operation, [self, other]) |         return QCombination(operation, [self, other]) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|         return False |         return False | ||||||
|  |  | ||||||
|     def __or__(self, other): |     def __or__(self, other): | ||||||
| @@ -119,8 +121,8 @@ class QNode(object): | |||||||
|  |  | ||||||
|  |  | ||||||
| class QCombination(QNode): | class QCombination(QNode): | ||||||
|     """Represents the combination of several conditions by a given logical |     """Represents the combination of several conditions by a given | ||||||
|     operator. |     logical operator. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, operation, children): |     def __init__(self, operation, children): | ||||||
| @@ -134,6 +136,13 @@ class QCombination(QNode): | |||||||
|             else: |             else: | ||||||
|                 self.children.append(node) |                 self.children.append(node) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         op = " & " if self.operation is self.AND else " | " | ||||||
|  |         return "(%s)" % op.join([repr(node) for node in self.children]) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.children) | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         for i in range(len(self.children)): |         for i in range(len(self.children)): | ||||||
|             if isinstance(self.children[i], QNode): |             if isinstance(self.children[i], QNode): | ||||||
| @@ -143,8 +152,16 @@ class QCombination(QNode): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|         return not bool(self.children) |         return not bool(self.children) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return ( | ||||||
|  |             self.__class__ == other.__class__ | ||||||
|  |             and self.operation == other.operation | ||||||
|  |             and self.children == other.children | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Q(QNode): | class Q(QNode): | ||||||
|     """A simple query object, used in a query tree to build up more complex |     """A simple query object, used in a query tree to build up more complex | ||||||
| @@ -154,9 +171,19 @@ class Q(QNode): | |||||||
|     def __init__(self, **query): |     def __init__(self, **query): | ||||||
|         self.query = query |         self.query = query | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return "Q(**%s)" % repr(self.query) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.query) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return self.__class__ == other.__class__ and self.query == other.query | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         return visitor.visit_query(self) |         return visitor.visit_query(self) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|         return not bool(self.query) |         return not bool(self.query) | ||||||
|   | |||||||
| @@ -1,18 +1,25 @@ | |||||||
| # -*- coding: utf-8 -*- | __all__ = ( | ||||||
|  |     "pre_init", | ||||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', |     "post_init", | ||||||
|            'post_save', 'pre_delete', 'post_delete'] |     "pre_save", | ||||||
|  |     "pre_save_post_validation", | ||||||
|  |     "post_save", | ||||||
|  |     "pre_delete", | ||||||
|  |     "post_delete", | ||||||
|  | ) | ||||||
|  |  | ||||||
| signals_available = False | signals_available = False | ||||||
| try: | try: | ||||||
|     from blinker import Namespace |     from blinker import Namespace | ||||||
|  |  | ||||||
|     signals_available = True |     signals_available = True | ||||||
| except ImportError: | except ImportError: | ||||||
|     class Namespace(object): |  | ||||||
|  |     class Namespace: | ||||||
|         def signal(self, name, doc=None): |         def signal(self, name, doc=None): | ||||||
|             return _FakeSignal(name, doc) |             return _FakeSignal(name, doc) | ||||||
|  |  | ||||||
|     class _FakeSignal(object): |     class _FakeSignal: | ||||||
|         """If blinker is unavailable, create a fake class with the same |         """If blinker is unavailable, create a fake class with the same | ||||||
|         interface that allows sending of signals but will fail with an |         interface that allows sending of signals but will fail with an | ||||||
|         error on anything else.  Instead of doing anything on send, it |         error on anything else.  Instead of doing anything on send, it | ||||||
| @@ -24,24 +31,29 @@ except ImportError: | |||||||
|             self.__doc__ = doc |             self.__doc__ = doc | ||||||
|  |  | ||||||
|         def _fail(self, *args, **kwargs): |         def _fail(self, *args, **kwargs): | ||||||
|             raise RuntimeError('signalling support is unavailable ' |             raise RuntimeError( | ||||||
|                                'because the blinker library is ' |                 "signalling support is unavailable " | ||||||
|                                'not installed.') |                 "because the blinker library is " | ||||||
|         send = lambda *a, **kw: None |                 "not installed." | ||||||
|         connect = disconnect = has_receivers_for = receivers_for = \ |             ) | ||||||
|             temporarily_connected_to = _fail |  | ||||||
|  |         send = lambda *a, **kw: None  # noqa | ||||||
|  |         connect = ( | ||||||
|  |             disconnect | ||||||
|  |         ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail | ||||||
|         del _fail |         del _fail | ||||||
|  |  | ||||||
|  |  | ||||||
| # the namespace for code signals.  If you are not mongoengine code, do | # the namespace for code signals.  If you are not mongoengine code, do | ||||||
| # not put signals in here.  Create your own namespace instead. | # not put signals in here.  Create your own namespace instead. | ||||||
| _signals = Namespace() | _signals = Namespace() | ||||||
|  |  | ||||||
| pre_init = _signals.signal('pre_init') | pre_init = _signals.signal("pre_init") | ||||||
| post_init = _signals.signal('post_init') | post_init = _signals.signal("post_init") | ||||||
| pre_save = _signals.signal('pre_save') | pre_save = _signals.signal("pre_save") | ||||||
| pre_save_post_validation = _signals.signal('pre_save_post_validation') | pre_save_post_validation = _signals.signal("pre_save_post_validation") | ||||||
| post_save = _signals.signal('post_save') | post_save = _signals.signal("post_save") | ||||||
| pre_delete = _signals.signal('pre_delete') | pre_delete = _signals.signal("pre_delete") | ||||||
| post_delete = _signals.signal('post_delete') | post_delete = _signals.signal("post_delete") | ||||||
| pre_bulk_insert = _signals.signal('pre_bulk_insert') | pre_bulk_insert = _signals.signal("pre_bulk_insert") | ||||||
| post_bulk_insert = _signals.signal('post_bulk_insert') | post_bulk_insert = _signals.signal("post_bulk_insert") | ||||||
|   | |||||||
| @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT | |||||||
| # %{python_sitearch}/* | # %{python_sitearch}/* | ||||||
|  |  | ||||||
| %changelog | %changelog | ||||||
| * See: http://docs.mongoengine.org/en/latest/changelog.html | * See: http://docs.mongoengine.org/en/latest/changelog.html | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | black | ||||||
|  | flake8 | ||||||
|  | pre-commit | ||||||
|  | pytest | ||||||
|  | ipdb | ||||||
|  | ipython | ||||||
|  | tox | ||||||
| @@ -1,2 +0,0 @@ | |||||||
| pymongo>=2.7.1 |  | ||||||
| nose |  | ||||||
							
								
								
									
										29
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,11 +1,18 @@ | |||||||
| [nosetests] | [flake8] | ||||||
| verbosity = 3 | ignore=E501,F403,F405,I201,I202,W504,W605,W503,B007 | ||||||
| detailed-errors = 1 | exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||||
| #with-coverage = 1 | max-complexity=47 | ||||||
| #cover-erase = 1 |  | ||||||
| #cover-html = 1 | [tool:pytest] | ||||||
| #cover-html-dir = ../htmlcov | # Limits the discovery to tests directory | ||||||
| #cover-package = mongoengine | # avoids that it runs for instance the benchmark | ||||||
| py3where = build | testpaths = tests | ||||||
| where = tests |  | ||||||
| #tests =  document/__init__.py | [isort] | ||||||
|  | known_first_party = mongoengine,tests | ||||||
|  | default_section = THIRDPARTY | ||||||
|  | multi_line_output = 3 | ||||||
|  | include_trailing_comma = True | ||||||
|  | combine_as_imports = True | ||||||
|  | line_length = 70 | ||||||
|  | ensure_newline_before_comments = 1 | ||||||
|   | |||||||
							
								
								
									
										175
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										175
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,83 +1,146 @@ | |||||||
| import os | import os | ||||||
| import sys | import sys | ||||||
| from setuptools import setup, find_packages |  | ||||||
|  | from pkg_resources import normalize_path | ||||||
|  | from setuptools import find_packages, setup | ||||||
|  | from setuptools.command.test import test as TestCommand | ||||||
|  |  | ||||||
| # Hack to silence atexit traceback in newer python versions | # Hack to silence atexit traceback in newer python versions | ||||||
| try: | try: | ||||||
|     import multiprocessing |     import multiprocessing  # noqa: F401 | ||||||
| except ImportError: | except ImportError: | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
| DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ | DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." | ||||||
| 'Mapper for working with MongoDB.' |  | ||||||
| LONG_DESCRIPTION = None |  | ||||||
| try: | try: | ||||||
|     LONG_DESCRIPTION = open('README.rst').read() |     with open("README.rst") as fin: | ||||||
| except: |         LONG_DESCRIPTION = fin.read() | ||||||
|     pass | except Exception: | ||||||
|  |     LONG_DESCRIPTION = None | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(version_tuple): | def get_version(version_tuple): | ||||||
|     if not isinstance(version_tuple[-1], int): |     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||||
|         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] |     return '0.10.7'. | ||||||
|     return '.'.join(map(str, version_tuple)) |     """ | ||||||
|  |     return ".".join(map(str, version_tuple)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PyTest(TestCommand): | ||||||
|  |     """Will force pytest to search for tests inside the build directory | ||||||
|  |     for 2to3 converted code (used by tox), instead of the current directory. | ||||||
|  |     Required as long as we need 2to3 | ||||||
|  |  | ||||||
|  |     Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations | ||||||
|  |     Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands | ||||||
|  |     # Allows to provide pytest command argument through the test runner command `python setup.py test` | ||||||
|  |     # e.g: `python setup.py test -a "-k=test"` | ||||||
|  |     # This only works for 1 argument though | ||||||
|  |     user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] | ||||||
|  |  | ||||||
|  |     def initialize_options(self): | ||||||
|  |         TestCommand.initialize_options(self) | ||||||
|  |         self.pytest_args = "" | ||||||
|  |  | ||||||
|  |     def finalize_options(self): | ||||||
|  |         TestCommand.finalize_options(self) | ||||||
|  |         self.test_args = ["tests"] | ||||||
|  |         self.test_suite = True | ||||||
|  |  | ||||||
|  |     def run_tests(self): | ||||||
|  |         # import here, cause outside the eggs aren't loaded | ||||||
|  |         import pytest | ||||||
|  |         from pkg_resources import _namespace_packages | ||||||
|  |  | ||||||
|  |         # Purge modules under test from sys.modules. The test loader will | ||||||
|  |         # re-import them from the build location. Required when 2to3 is used | ||||||
|  |         # with namespace packages. | ||||||
|  |         if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): | ||||||
|  |             module = self.test_args[-1].split(".")[0] | ||||||
|  |             if module in _namespace_packages: | ||||||
|  |                 del_modules = [] | ||||||
|  |                 if module in sys.modules: | ||||||
|  |                     del_modules.append(module) | ||||||
|  |                 module += "." | ||||||
|  |                 for name in sys.modules: | ||||||
|  |                     if name.startswith(module): | ||||||
|  |                         del_modules.append(name) | ||||||
|  |                 map(sys.modules.__delitem__, del_modules) | ||||||
|  |  | ||||||
|  |             # Run on the build directory for 2to3-built code | ||||||
|  |             # This will prevent the old 2.x code from being found | ||||||
|  |             # by py.test discovery mechanism, that apparently | ||||||
|  |             # ignores sys.path.. | ||||||
|  |             ei_cmd = self.get_finalized_command("egg_info") | ||||||
|  |             self.test_args = [normalize_path(ei_cmd.egg_base)] | ||||||
|  |  | ||||||
|  |         cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) | ||||||
|  |         errno = pytest.main(cmd_args) | ||||||
|  |         sys.exit(errno) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") | ||||||
| version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] | ||||||
|  |  | ||||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | VERSION = get_version(eval(version_line.split("=")[-1])) | ||||||
| print(VERSION) |  | ||||||
|  |  | ||||||
| CLASSIFIERS = [ | CLASSIFIERS = [ | ||||||
|     'Development Status :: 4 - Beta', |     "Development Status :: 5 - Production/Stable", | ||||||
|     'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|     'License :: OSI Approved :: MIT License', |     "License :: OSI Approved :: MIT License", | ||||||
|     'Operating System :: OS Independent', |     "Operating System :: OS Independent", | ||||||
|     'Programming Language :: Python', |     "Programming Language :: Python", | ||||||
|     "Programming Language :: Python :: 2", |  | ||||||
|     "Programming Language :: Python :: 2.6", |  | ||||||
|     "Programming Language :: Python :: 2.7", |  | ||||||
|     "Programming Language :: Python :: 3", |     "Programming Language :: Python :: 3", | ||||||
|     "Programming Language :: Python :: 3.2", |     "Programming Language :: Python :: 3.6", | ||||||
|     "Programming Language :: Python :: 3.3", |     "Programming Language :: Python :: 3.7", | ||||||
|     "Programming Language :: Python :: 3.4", |     "Programming Language :: Python :: 3.8", | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: Implementation :: PyPy", |     "Programming Language :: Python :: Implementation :: PyPy", | ||||||
|     'Topic :: Database', |     "Topic :: Database", | ||||||
|     'Topic :: Software Development :: Libraries :: Python Modules', |     "Topic :: Software Development :: Libraries :: Python Modules", | ||||||
| ] | ] | ||||||
|  |  | ||||||
| extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} | extra_opts = { | ||||||
| if sys.version_info[0] == 3: |     "packages": find_packages(exclude=["tests", "tests.*"]), | ||||||
|     extra_opts['use_2to3'] = True |     "tests_require": [ | ||||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1'] |         "pytest<5.0", | ||||||
|     if "test" in sys.argv or "nosetests" in sys.argv: |         "pytest-cov", | ||||||
|         extra_opts['packages'] = find_packages() |         "coverage", | ||||||
|         extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} |         "blinker", | ||||||
| else: |         "Pillow>=7.0.0", | ||||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil'] |     ], | ||||||
|  | } | ||||||
|  |  | ||||||
|     if sys.version_info[0] == 2 and sys.version_info[1] == 6: | if "test" in sys.argv: | ||||||
|         extra_opts['tests_require'].append('unittest2') |     extra_opts["packages"] = find_packages() | ||||||
|  |     extra_opts["package_data"] = { | ||||||
|  |         "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] | ||||||
|  |     } | ||||||
|  |  | ||||||
| setup(name='mongoengine', | setup( | ||||||
|       version=VERSION, |     name="mongoengine", | ||||||
|       author='Harry Marr', |     version=VERSION, | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |     author="Harry Marr", | ||||||
|       maintainer="Ross Lawley", |     author_email="harry.marr@gmail.com", | ||||||
|       maintainer_email="ross.lawley@{nospam}gmail.com", |     maintainer="Stefan Wojcik", | ||||||
|       url='http://mongoengine.org/', |     maintainer_email="wojcikstefan@gmail.com", | ||||||
|       download_url='https://github.com/MongoEngine/mongoengine/tarball/master', |     url="http://mongoengine.org/", | ||||||
|       license='MIT', |     download_url="https://github.com/MongoEngine/mongoengine/tarball/master", | ||||||
|       include_package_data=True, |     license="MIT", | ||||||
|       description=DESCRIPTION, |     include_package_data=True, | ||||||
|       long_description=LONG_DESCRIPTION, |     description=DESCRIPTION, | ||||||
|       platforms=['any'], |     long_description=LONG_DESCRIPTION, | ||||||
|       classifiers=CLASSIFIERS, |     platforms=["any"], | ||||||
|       install_requires=['pymongo>=2.7.1'], |     classifiers=CLASSIFIERS, | ||||||
|       test_suite='nose.collector', |     python_requires=">=3.6", | ||||||
|       **extra_opts |     install_requires=["pymongo>=3.4, <4.0"], | ||||||
|  |     cmdclass={"test": PyTest}, | ||||||
|  |     **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,5 +0,0 @@ | |||||||
| from all_warnings import AllWarnings |  | ||||||
| from document import * |  | ||||||
| from queryset import * |  | ||||||
| from fields import * |  | ||||||
| from migration import * |  | ||||||
|   | |||||||
| @@ -1,44 +0,0 @@ | |||||||
| """ |  | ||||||
| This test has been put into a module.  This is because it tests warnings that |  | ||||||
| only get triggered on first hit.  This way we can ensure its imported into the |  | ||||||
| top level and called first by the test suite. |  | ||||||
| """ |  | ||||||
| import sys |  | ||||||
| sys.path[0:0] = [""] |  | ||||||
| import unittest |  | ||||||
| import warnings |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('AllWarnings', ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AllWarnings(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.warning_list = [] |  | ||||||
|         self.showwarning_default = warnings.showwarning |  | ||||||
|         warnings.showwarning = self.append_to_warning_list |  | ||||||
|  |  | ||||||
|     def append_to_warning_list(self, message, category, *args): |  | ||||||
|         self.warning_list.append({"message": message, |  | ||||||
|                                   "category": category}) |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         # restore default handling of warnings |  | ||||||
|         warnings.showwarning = self.showwarning_default |  | ||||||
|  |  | ||||||
|     def test_document_collection_syntax_warning(self): |  | ||||||
|  |  | ||||||
|         class NonAbstractBase(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class InheritedDocumentFailTest(NonAbstractBase): |  | ||||||
|             meta = {'collection': 'fail'} |  | ||||||
|  |  | ||||||
|         warning = self.warning_list[0] |  | ||||||
|         self.assertEqual(SyntaxWarning, warning["category"]) |  | ||||||
|         self.assertEqual('non_abstract_base', |  | ||||||
|                          InheritedDocumentFailTest._get_collection_name()) |  | ||||||
|   | |||||||
							
								
								
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | """ | ||||||
|  | This test has been put into a module.  This is because it tests warnings that | ||||||
|  | only get triggered on first hit.  This way we can ensure its imported into the | ||||||
|  | top level and called first by the test suite. | ||||||
|  | """ | ||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestAllWarnings(unittest.TestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db="mongoenginetest") | ||||||
|  |         self.warning_list = [] | ||||||
|  |         self.showwarning_default = warnings.showwarning | ||||||
|  |         warnings.showwarning = self.append_to_warning_list | ||||||
|  |  | ||||||
|  |     def append_to_warning_list(self, message, category, *args): | ||||||
|  |         self.warning_list.append({"message": message, "category": category}) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         # restore default handling of warnings | ||||||
|  |         warnings.showwarning = self.showwarning_default | ||||||
|  |  | ||||||
|  |     def test_document_collection_syntax_warning(self): | ||||||
|  |         class NonAbstractBase(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class InheritedDocumentFailTest(NonAbstractBase): | ||||||
|  |             meta = {"collection": "fail"} | ||||||
|  |  | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         assert SyntaxWarning == warning["category"] | ||||||
|  |         assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| import sys |  | ||||||
| sys.path[0:0] = [""] |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| from class_methods import * |  | ||||||
| from delta import * |  | ||||||
| from dynamic import * |  | ||||||
| from indexes import * |  | ||||||
| from inheritance import * |  | ||||||
| from instance import * |  | ||||||
| from json_serialisation import * |  | ||||||
| from validation import * |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user