Compare commits
	
		
			609 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 0da694b845 | ||
|  | 6d5e7d9e81 | ||
|  | bc08bea284 | ||
|  | 0e5a0661e1 | ||
|  | a839bd428f | ||
|  | 0277062693 | ||
|  | 7affa5ab69 | ||
|  | ed22af4e73 | ||
|  | 63ebb6998e | ||
|  | 7914cd47ca | ||
|  | 708dbac70e | ||
|  | 1b62dd5c40 | ||
|  | 4911545843 | ||
|  | c5cc4b7867 | ||
|  | a02c820c2d | ||
|  | 516591fe88 | ||
|  | d2941a9110 | ||
|  | f7302f710b | ||
|  | 6a02ac7e80 | ||
|  | d1b86fdef5 | ||
|  | 57ac38ddca | ||
|  | 7a73a92074 | ||
|  | d1b30f4792 | ||
|  | 16dcf78cab | ||
|  | d868cfdeb0 | ||
|  | c074f4d925 | ||
|  | 453024c58d | ||
|  | fe8340617a | ||
|  | b024dd913d | ||
|  | a2a698ab0e | ||
|  | bb56f92213 | ||
|  | 8dcd998945 | ||
|  | bcbbbe4046 | ||
|  | 7200a8cb84 | ||
|  | 6925344807 | ||
|  | 60ceeb0ddd | ||
|  | 06caabf333 | ||
|  | 954131bd51 | ||
|  | 855efe7fe8 | ||
|  | d902a74ab0 | ||
|  | 499e11f730 | ||
|  | 6db59a9c31 | ||
|  | 6465726008 | ||
|  | 3a3b96e0be | ||
|  | 992c91dc0c | ||
|  | 809473c15c | ||
|  | d79a5ec3d6 | ||
|  | 237469ceaf | ||
|  | c28d9135d9 | ||
|  | 48a5679087 | ||
|  | 7c938712f2 | ||
|  | 4df12bebc2 | ||
|  | dfe8987aaa | ||
|  | 02dbe401d8 | ||
|  | c18f8c92e7 | ||
|  | 11d4f6499a | ||
|  | f2c25b4744 | ||
|  | 27b846717f | ||
|  | 9ed138f896 | ||
|  | 1978dc80eb | ||
|  | fc4b247f4f | ||
|  | ebf7056f4a | ||
|  | eb975d7e13 | ||
|  | a2dd8cb6b9 | ||
|  | 7c254c6136 | ||
|  | c8a33b83f1 | ||
|  | 1145c72b01 | ||
|  | 7fc45fb711 | ||
|  | e146262c38 | ||
|  | 6f808bd06e | ||
|  | 0b6ab49325 | ||
|  | 66d9182e50 | ||
|  | 654cca82a9 | ||
|  | 89785da1c5 | ||
|  | 2f9964e46e | ||
|  | 168ecd67b0 | ||
|  | bcbe740598 | ||
|  | 86c8929d77 | ||
|  | 6738a9433b | ||
|  | 23843ec86e | ||
|  | f4db0da585 | ||
|  | 9ee3b796cd | ||
|  | f57569f553 | ||
|  | fffd0e8990 | ||
|  | 200e52bab5 | ||
|  | a0ef649dd8 | ||
|  | 0dd01bda01 | ||
|  | a707598042 | ||
|  | 8a3171308a | ||
|  | 29c887f30b | ||
|  | 661398d891 | ||
|  | 2cd722d751 | ||
|  | 49f5b4fa5c | ||
|  | 67baf465f4 | ||
|  | ee7666ddea | ||
|  | 02fc41ff1c | ||
|  | d07a9d2ef8 | ||
|  | 3622ebfabd | ||
|  | 70b320633f | ||
|  | f30208f345 | ||
|  | 5bcc454678 | ||
|  | 473110568f | ||
|  | 88ca0f8196 | ||
|  | a171005010 | ||
|  | f56ad2fa58 | ||
|  | a0d255369a | ||
|  | 40b0a15b35 | ||
|  | b98b06ff79 | ||
|  | a448c9aebf | ||
|  | b3f462a39d | ||
|  | 7ce34ca019 | ||
|  | 719bb53c3a | ||
|  | 214415969f | ||
|  | 7431b1f123 | ||
|  | d8ffa843a9 | ||
|  | a69db231cc | ||
|  | c17f94422f | ||
|  | b4777f7f4f | ||
|  | a57d9a9303 | ||
|  | 5e70e1bcb2 | ||
|  | 0c43787996 | ||
|  | dc310b99f9 | ||
|  | e98c5e10bc | ||
|  | f1b1090263 | ||
|  | 6efd6faa3f | ||
|  | 1e4d48d371 | ||
|  | 93a2adb3e6 | ||
|  | a66d516777 | ||
|  | 7a97d42338 | ||
|  | b66cdc8fa0 | ||
|  | 67f43b2aad | ||
|  | d143e50238 | ||
|  | e27439be6a | ||
|  | 2ad5ffbda2 | ||
|  | dae9e662a5 | ||
|  | f22737d6a4 | ||
|  | a458d5a176 | ||
|  | d92ed04538 | ||
|  | 80b3df8953 | ||
|  | bcf83ec761 | ||
|  | e44e72bce3 | ||
|  | 35f2781518 | ||
|  | dc5512e403 | ||
|  | 48ef176e28 | ||
|  | 1aa2b86df3 | ||
|  | 73026047e9 | ||
|  | 6c2c33cac8 | ||
|  | d593f7e04b | ||
|  | 6c599ef506 | ||
|  | f48a0b7b7d | ||
|  | d9f538170b | ||
|  | 1785ced655 | ||
|  | e155e1fa86 | ||
|  | e28fab0550 | ||
|  | fb0dd2c1ca | ||
|  | 6e89e736b7 | ||
|  | 634b874c46 | ||
|  | 9d16364394 | ||
|  | daeecef59e | ||
|  | 8131f0a752 | ||
|  | f4ea1ad517 | ||
|  | f34e8a0ff6 | ||
|  | 4209d61b13 | ||
|  | fa83fba637 | ||
|  | af86aee970 | ||
|  | f26f1a526c | ||
|  | 7cb46d0761 | ||
|  | 0cb4070364 | ||
|  | bc008c2597 | ||
|  | a1d142d3a4 | ||
|  | aa00dc1031 | ||
|  | 592c654916 | ||
|  | 5021b10535 | ||
|  | 43d6e64cfa | ||
|  | 8d21e5f3c1 | ||
|  | fbe5df84c0 | ||
|  | caff44c663 | ||
|  | d6edef98c6 | ||
|  | e0d2fab3c3 | ||
|  | 9867e918fa | ||
|  | e6374ab425 | ||
|  | e116bb9227 | ||
|  | f1a1aa54d8 | ||
|  | 574f3c23d3 | ||
|  | c31d6a6898 | ||
|  | 44a2a164c0 | ||
|  | ede9fcfb00 | ||
|  | a3d43b77ca | ||
|  | e2b32b4bb3 | ||
|  | 025c16c95d | ||
|  | 000eff73cc | ||
|  | 254efdde79 | ||
|  | f0d4e76418 | ||
|  | ba7101ff92 | ||
|  | a2457df45e | ||
|  | 305540f0fd | ||
|  | c2928d8a57 | ||
|  | 7451244cd2 | ||
|  | d935b5764a | ||
|  | f3af76e38c | ||
|  | a7631223a3 | ||
|  | 8aae4f0ed0 | ||
|  | 542049f252 | ||
|  | 9f3394dc6d | ||
|  | 06f5dc6ad7 | ||
|  | dc3b09c218 | ||
|  | ad15781d8f | ||
|  | ea53612822 | ||
|  | c3a065dd33 | ||
|  | 5cb2812231 | ||
|  | f8904a5504 | ||
|  | eb1df23e68 | ||
|  | e5648a4af9 | ||
|  | a246154961 | ||
|  | ce44843e27 | ||
|  | 1a54dad643 | ||
|  | 940dfff625 | ||
|  | c2b15183cb | ||
|  | 27e8aa9c68 | ||
|  | e1d8c6516a | ||
|  | eba81e368b | ||
|  | 74a3fd7596 | ||
|  | eeb5a83e98 | ||
|  | d47134bbf1 | ||
|  | ee725354db | ||
|  | 985bfd22de | ||
|  | 0d35e3a3e9 | ||
|  | d94a191656 | ||
|  | 0eafa4acd8 | ||
|  | f27a53653b | ||
|  | 3b60adc8da | ||
|  | 626a3369b5 | ||
|  | 4244e7569b | ||
|  | ef4b32aca7 | ||
|  | dcd23a0b4d | ||
|  | 5447c6e947 | ||
|  | f1b97fbc8b | ||
|  | 4c8dfc3fc2 | ||
|  | ceece5a7e2 | ||
|  | 7e6b035ca2 | ||
|  | fbc46a52af | ||
|  | 8d2e7b4372 | ||
|  | e7da9144f5 | ||
|  | 2128e169f3 | ||
|  | 8410d64daa | ||
|  | b2f78fadd9 | ||
|  | 3656323f25 | ||
|  | 2fe1c20475 | ||
|  | 0fb976a80a | ||
|  | 3cf62de753 | ||
|  | 06119b306d | ||
|  | 0493bbbc76 | ||
|  | 4c9e90732e | ||
|  | 35f084ba76 | ||
|  | f28f336026 | ||
|  | 122d75f677 | ||
|  | 12f6a3f5a3 | ||
|  | 5d44e1d6ca | ||
|  | 04592c876b | ||
|  | c0571beec8 | ||
|  | 1302316eb0 | ||
|  | 18d8008b89 | ||
|  | 4670f09a67 | ||
|  | 159ef12ed7 | ||
|  | 7a760f5640 | ||
|  | 2b6c42a56c | ||
|  | ab4ff99105 | ||
|  | 774895ec8c | ||
|  | c5ce96c391 | ||
|  | b4a98a4000 | ||
|  | 5f0d86f509 | ||
|  | c96a1b00cf | ||
|  | 1eb6436682 | ||
|  | a84e1f17bb | ||
|  | 3ffc9dffc2 | ||
|  | 048c84ab95 | ||
|  | a7470360d2 | ||
|  | 50f1ca91d4 | ||
|  | 0d37e1cd98 | ||
|  | 9aa77bb3c9 | ||
|  | fd11244966 | ||
|  | d060da094f | ||
|  | 306f9c5ffd | ||
|  | 5ef5611682 | ||
|  | ebdd2d730c | ||
|  | 1ddf8b3159 | ||
|  | a6bc870815 | ||
|  | 56cd73823e | ||
|  | 6299015039 | ||
|  | 11b7cfb5ff | ||
|  | 367f49ce1c | ||
|  | 8165131419 | ||
|  | e402157b4d | ||
|  | 967da7944f | ||
|  | 89f1c21f20 | ||
|  | 7e706190a5 | ||
|  | 36a3770673 | ||
|  | bc92f78afb | ||
|  | f7e22d2b8b | ||
|  | 0b1e11ba1f | ||
|  | 10e0b1daec | ||
|  | 731d8fc6be | ||
|  | f6d0b53ae5 | ||
|  | 0efb90deb6 | ||
|  | b16eabd2b6 | ||
|  | f8350409ad | ||
|  | 5b498bd8d6 | ||
|  | 941042d0ba | ||
|  | 9251ce312b | ||
|  | 96a964a183 | ||
|  | 9e513e08ae | ||
|  | 9dfee83e68 | ||
|  | 7cde979736 | ||
|  | 870ff1d4d9 | ||
|  | 52c162a478 | ||
|  | ddd11c7ed2 | ||
|  | 2c119dea47 | ||
|  | ebd1561682 | ||
|  | 3ccc495c75 | ||
|  | 0eda7a5a3c | ||
|  | f2c16452c6 | ||
|  | a2c429a4a5 | ||
|  | 4a71c5b424 | ||
|  | 268dd80cd0 | ||
|  | 3002e79c98 | ||
|  | 5eab348e82 | ||
|  | 1cdbade761 | ||
|  | 8c9afbd278 | ||
|  | cd73654683 | ||
|  | 9654fe0d8d | ||
|  | 3d49c33c6a | ||
|  | e58b3390aa | ||
|  | 92a1f5736b | ||
|  | 00a57f6cea | ||
|  | 1c345edc49 | ||
|  | 7aa1f47378 | ||
|  | 473d5ead7b | ||
|  | 68f760b563 | ||
|  | 9c1cd81adb | ||
|  | 85b81fb12a | ||
|  | 5d7444c115 | ||
|  | b0c1ec04b5 | ||
|  | 5cfd8909a8 | ||
|  | 6e2d2f33de | ||
|  | 5e65d27832 | ||
|  | 36993097b4 | ||
|  | 2447349383 | ||
|  | 7765f272ac | ||
|  | 13d8dfdb5f | ||
|  | 5e94637adc | ||
|  | ac6e793bbe | ||
|  | d0d9c3ea26 | ||
|  | f7bc58a767 | ||
|  | bafdf0381a | ||
|  | 3fc5dc8523 | ||
|  | df4dc3492c | ||
|  | 10731b0fd8 | ||
|  | cb9166aba4 | ||
|  | fe62c3aacb | ||
|  | c60ea40828 | ||
|  | c59ea26845 | ||
|  | 9bd8b3e9a5 | ||
|  | 5271f3b4a0 | ||
|  | 8a7b619b77 | ||
|  | 88f96b0838 | ||
|  | 1e1e48732a | ||
|  | 3537897fc5 | ||
|  | 3653981416 | ||
|  | 94d1e566c0 | ||
|  | a692316293 | ||
|  | e2f3406e89 | ||
|  | 81c7007f80 | ||
|  | e4f38b5665 | ||
|  | 14b6c471cf | ||
|  | 0d0befe23e | ||
|  | efad628a87 | ||
|  | c16e6d74e6 | ||
|  | 80db9e7716 | ||
|  | 7cf2a3e978 | ||
|  | 681b74a41c | ||
|  | d39d10b9fb | ||
|  | dff44ef74e | ||
|  | 485047f20b | ||
|  | 6affbbe865 | ||
|  | e3600ef4de | ||
|  | f0eaec98c7 | ||
|  | 6dcd7006d0 | ||
|  | 5de4812477 | ||
|  | d5b28356bc | ||
|  | 76fddd0db0 | ||
|  | 1108586303 | ||
|  | 3f49923298 | ||
|  | c277be8b6b | ||
|  | 6e083fa6a1 | ||
|  | 073091a06e | ||
|  | 03bfd01862 | ||
|  | 539f01d08e | ||
|  | dcf3c86dce | ||
|  | ec639cd6e9 | ||
|  | 420376d036 | ||
|  | 51e50bf0a9 | ||
|  | c2d77f51bb | ||
|  | b4d87d9128 | ||
|  | 4401a309ee | ||
|  | b562e209d1 | ||
|  | 3a85422e8f | ||
|  | e45397c975 | ||
|  | 1f9ec0c888 | ||
|  | f8ee470e70 | ||
|  | d02de0798f | ||
|  | 6fe074fb13 | ||
|  | 4db339c5f4 | ||
|  | a525764359 | ||
|  | f970d5878a | ||
|  | cc0a2cbc6f | ||
|  | add0b463f5 | ||
|  | d80b1a7749 | ||
|  | 6186691259 | ||
|  | b451cc567d | ||
|  | 757ff31661 | ||
|  | 97a98f0045 | ||
|  | 8f05896bc9 | ||
|  | da7a8939df | ||
|  | b6977a88ea | ||
|  | eafbc7f20d | ||
|  | d92f992c01 | ||
|  | 20a5d9051d | ||
|  | c9a5710554 | ||
|  | f10e946896 | ||
|  | 2f19b22bb2 | ||
|  | d134e11c6d | ||
|  | 63edd16a92 | ||
|  | 37740dc010 | ||
|  | 04b85ddbf2 | ||
|  | 836dc96f67 | ||
|  | 49a7542b14 | ||
|  | a84ffce5a0 | ||
|  | 210b3e5192 | ||
|  | 5f1d5ea056 | ||
|  | 19a7372ff9 | ||
|  | cc5b60b004 | ||
|  | b06f9dbf8d | ||
|  | d9b8ee7895 | ||
|  | e9ff655b0e | ||
|  | d58341d7ae | ||
|  | 669d21a114 | ||
|  | 7e980a16d0 | ||
|  | 47df8deb58 | ||
|  | dd006a502e | ||
|  | 782d48594a | ||
|  | 07d3e52e6a | ||
|  | fc1ce6d39b | ||
|  | 32d5c0c946 | ||
|  | dfabfce01b | ||
|  | 74f3f4eb15 | ||
|  | 20cb0285f0 | ||
|  | faf840f924 | ||
|  | 165bea5bb9 | ||
|  | f7515cfca8 | ||
|  | a762a10dec | ||
|  | a192029901 | ||
|  | 67182713d9 | ||
|  | e9464e32db | ||
|  | 2d6ae16912 | ||
|  | f9cd8b1841 | ||
|  | 41a698b442 | ||
|  | 9f58bc9207 | ||
|  | d36f6e7f24 | ||
|  | eeb672feb9 | ||
|  | 063a162ce0 | ||
|  | 3e4a900279 | ||
|  | 43327ea4e1 | ||
|  | 0d2e84b16b | ||
|  | 3c78757778 | ||
|  | d0245bb5ba | ||
|  | 3477b0107a | ||
|  | 8df9ff90cb | ||
|  | d6b4ca7a98 | ||
|  | 2e18199eb2 | ||
|  | 025e17701b | ||
|  | 156ca44a13 | ||
|  | 39dac7d4db | ||
|  | 9ca632d518 | ||
|  | 4177fc6df2 | ||
|  | d90890c08e | ||
|  | 1ca098c402 | ||
|  | 3208a7f15d | ||
|  | 8eda52e8e0 | ||
|  | 5b161b7445 | ||
|  | 8c1f8e54cd | ||
|  | 03d3c26a99 | ||
|  | 0cbd3663e4 | ||
|  | f182daa85e | ||
|  | de2f774e85 | ||
|  | 9d9a4afee9 | ||
|  | 0ea363c7fc | ||
|  | d7ee47ee25 | ||
|  | eb1b6e34c7 | ||
|  | 621b2b3f72 | ||
|  | 83da08ef7d | ||
|  | 9f551121fb | ||
|  | ba48dfb4bf | ||
|  | ed2ea24b75 | ||
|  | eefbd3f597 | ||
|  | e38bf63be0 | ||
|  | e7ba5eb160 | ||
|  | fff27f9b87 | ||
|  | d58f594c17 | ||
|  | 9797d7a7fb | ||
|  | c8b65317ef | ||
|  | 3a6dc77d36 | ||
|  | 4f70c27b56 | ||
|  | ea46edf50a | ||
|  | e5e88d792e | ||
|  | 6d68ad735c | ||
|  | c44b98a7e1 | ||
|  | 445f9453c4 | ||
|  | 3364e040c8 | ||
|  | 692f00864d | ||
|  | 344dc64df8 | ||
|  | 473425a36a | ||
|  | 3ba58ebaae | ||
|  | 2c7b12c022 | ||
|  | 17eeeb7536 | ||
|  | de5fbfde2c | ||
|  | f5d02e1b10 | ||
|  | e508625935 | ||
|  | 0b177ec4c1 | ||
|  | 87c965edd3 | ||
|  | 72dd9daa23 | ||
|  | a68529fba8 | ||
|  | 06681a453f | ||
|  | 5907dde4a8 | ||
|  | 8e038dd563 | ||
|  | 50905ab459 | ||
|  | 7bb9c7d47f | ||
|  | 5c45eee817 | ||
|  | 0f9e4ef352 | ||
|  | 85173d188b | ||
|  | d9ed33d1b1 | ||
|  | e6ac8cab53 | ||
|  | f890ebd0f4 | ||
|  | e537369d98 | ||
|  | 9bbd8dbe62 | ||
|  | 09a5f5c8f3 | ||
|  | b9e0f52526 | ||
|  | 1cdf71b647 | ||
|  | 3aff461039 | ||
|  | bf74d7537c | ||
|  | 0c2fb6807e | ||
|  | b9c9d127a2 | ||
|  | 286beca6c5 | ||
|  | 3a1521a34e | ||
|  | c5b047d0cd | ||
|  | 485b811bd0 | ||
|  | f335591045 | ||
|  | 1c10f3020b | ||
|  | 3074dad293 | ||
|  | 42f506adc6 | ||
|  | 50b755db0c | ||
|  | 420c3e0073 | ||
|  | 4a57fc33e4 | ||
|  | 25cdf16cc0 | ||
|  | 7f732459a1 | ||
|  | 9cc02d4dbe | ||
|  | c528ac09d6 | ||
|  | 1a131ff120 | ||
|  | accdd82970 | ||
|  | 3e8f02c64b | ||
|  | 3425264077 | ||
|  | 148f8b8a3a | ||
|  | f6f7c12f0e | ||
|  | 219b28c97b | ||
|  | 3598fe0fb4 | ||
|  | f9dd051ec9 | ||
|  | 68e4a27aaf | ||
|  | b849c719a8 | ||
|  | 59e7617e82 | ||
|  | b5e868655e | ||
|  | 027b3d36de | ||
|  | 003454573c | ||
|  | aa5a9ff1f4 | ||
|  | 28ef54986d | ||
|  | dfdc0d92c3 | ||
|  | f265915aa2 | ||
|  | 4228d06934 | ||
|  | 1a93b9b226 | ||
|  | 363e50abbe | ||
|  | b8d53a6f0d | ||
|  | 4b45c0cd14 | ||
|  | e7c0da38c2 | ||
|  | 8706fbe461 | ||
|  | 9ca96e4e17 | ||
|  | 99fe1da345 | ||
|  | 1986e82783 | ||
|  | 7073b9d395 | ||
|  | f2049e9c18 | ||
|  | f0f1308465 | ||
|  | 7d90aa76ff | ||
|  | 3cc2c617fd | ||
|  | 3d5b6ae332 | ||
|  | 59826c8cfd | ||
|  | 6f29d12386 | ||
|  | 0a89899ad0 | ||
|  | e4af0e361a | ||
|  | 0bfc96e459 | ||
|  | 3425574ddc | ||
|  | 6a31736644 | ||
|  | 500eb920e4 | 
							
								
								
									
										30
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -2,22 +2,27 @@ | ||||
| language: python | ||||
| services: mongodb | ||||
| python: | ||||
|     - 2.5 | ||||
|     - 2.6 | ||||
|     - 2.7 | ||||
|     - 3.1 | ||||
|     - 3.2 | ||||
|     - "2.6" | ||||
|     - "2.7" | ||||
|     - "3.2" | ||||
|     - "3.3" | ||||
| env: | ||||
|   - PYMONGO=dev | ||||
|   - PYMONGO=2.3 | ||||
|   - PYMONGO=2.2 | ||||
|   - PYMONGO=dev DJANGO=1.6 | ||||
|   - PYMONGO=dev DJANGO=1.5.5 | ||||
|   - PYMONGO=dev DJANGO=1.4.10 | ||||
|   - PYMONGO=2.5 DJANGO=1.6 | ||||
|   - PYMONGO=2.5 DJANGO=1.5.5 | ||||
|   - PYMONGO=2.5 DJANGO=1.4.10 | ||||
|   - PYMONGO=3.2 DJANGO=1.6 | ||||
|   - PYMONGO=3.2 DJANGO=1.5.5 | ||||
|   - PYMONGO=3.3 DJANGO=1.6 | ||||
|   - PYMONGO=3.3 DJANGO=1.5.5 | ||||
| install: | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi | ||||
|     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||
|     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||
|     - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b | ||||
|     - python setup.py install | ||||
| script: | ||||
|     - python setup.py test | ||||
| @@ -26,4 +31,3 @@ notifications: | ||||
| branches: | ||||
|   only: | ||||
|     - master | ||||
|     - 0.8 | ||||
							
								
								
									
										69
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										69
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -16,8 +16,6 @@ Dervived from the git logs, inevitably incomplete but all of whom and others | ||||
| have submitted patches, reported bugs and generally helped make MongoEngine | ||||
| that much better: | ||||
|  | ||||
|  * Harry Marr | ||||
|  * Ross Lawley | ||||
|  * blackbrrr | ||||
|  * Florian Schlachter | ||||
|  * Vincent Driessen | ||||
| @@ -25,7 +23,7 @@ that much better: | ||||
|  * flosch | ||||
|  * Deepak Thukral | ||||
|  * Colin Howe | ||||
|  * Wilson Júnior | ||||
|  * Wilson Júnior (https://github.com/wpjunior) | ||||
|  * Alistair Roche | ||||
|  * Dan Crosta | ||||
|  * Viktor Kerkez | ||||
| @@ -77,7 +75,7 @@ that much better: | ||||
|  * Adam Parrish | ||||
|  * jpfarias | ||||
|  * jonrscott | ||||
|  * Alice Zoë Bevan-McGregor | ||||
|  * Alice Zoë Bevan-McGregor (https://github.com/amcgregor/) | ||||
|  * Stephen Young | ||||
|  * tkloc | ||||
|  * aid | ||||
| @@ -124,7 +122,70 @@ that much better: | ||||
|  * Stefan Wójcik | ||||
|  * dimonb | ||||
|  * Garry Polley | ||||
|  * James Slagle | ||||
|  * Adrian Scott | ||||
|  * Peter Teichman | ||||
|  * Jakub Kot | ||||
|  * Jorge Bastida | ||||
|  * Aleksandr Sorokoumov | ||||
|  * Yohan Graterol | ||||
|  * bool-dev | ||||
|  * Russ Weeks | ||||
|  * Paul Swartz | ||||
|  * Sundar Raman | ||||
|  * Benoit Louy | ||||
|  * Loic Raucy (https://github.com/lraucy) | ||||
|  * hellysmile | ||||
|  * Jaepil Jeong | ||||
|  * Daniil Sharou | ||||
|  * Stefan Wójcik | ||||
|  * Pete Campton | ||||
|  * Martyn Smith | ||||
|  * Marcelo Anton | ||||
|  * Aleksey Porfirov | ||||
|  * Nicolas Trippar | ||||
|  * Manuel Hermann | ||||
|  * Gustavo Gawryszewski | ||||
|  * Max Countryman | ||||
|  * caitifbrito | ||||
|  * lcya86 刘春洋 | ||||
|  * Martin Alderete (https://github.com/malderete) | ||||
|  * Nick Joyce | ||||
|  * Jared Forsyth | ||||
|  * Kenneth Falck | ||||
|  * Lukasz Balcerzak | ||||
|  * Nicolas Cortot | ||||
|  * Alex (https://github.com/kelsta) | ||||
|  * Jin Zhang | ||||
|  * Daniel Axtens | ||||
|  * Leo-Naeka | ||||
|  * Ryan Witt (https://github.com/ryanwitt) | ||||
|  * Jiequan (https://github.com/Jiequan) | ||||
|  * hensom (https://github.com/hensom) | ||||
|  * zhy0216 (https://github.com/zhy0216) | ||||
|  * istinspring (https://github.com/istinspring) | ||||
|  * Massimo Santini (https://github.com/mapio) | ||||
|  * Nigel McNie (https://github.com/nigelmcnie) | ||||
|  * ygbourhis (https://github.com/ygbourhis) | ||||
|  * Bob Dickinson (https://github.com/BobDickinson) | ||||
|  * Michael Bartnett (https://github.com/michaelbartnett) | ||||
|  * Alon Horev (https://github.com/alonho) | ||||
|  * Kelvin Hammond (https://github.com/kelvinhammond) | ||||
|  * Jatin- (https://github.com/jatin-) | ||||
|  * Paul Uithol (https://github.com/PaulUithol) | ||||
|  * Thom Knowles (https://github.com/fleat) | ||||
|  * Paul (https://github.com/squamous) | ||||
|  * Olivier Cortès (https://github.com/Karmak23) | ||||
|  * crazyzubr (https://github.com/crazyzubr) | ||||
|  * FrankSomething (https://github.com/FrankSomething) | ||||
|  * Alexandr Morozov (https://github.com/LK4D4) | ||||
|  * mishudark (https://github.com/mishudark) | ||||
|  * Joe Friedl (https://github.com/grampajoe) | ||||
|  * Daniel Ward (https://github.com/danielward) | ||||
|  * Aniket Deshpande (https://github.com/anicake) | ||||
|  * rfkrocktk (https://github.com/rfkrocktk) | ||||
|  * Gustavo Andrés Angulo (https://github.com/woakas) | ||||
|  * Dmytro Popovych (https://github.com/drudim) | ||||
|  * Tom (https://github.com/tomprimozic) | ||||
|  * j0hnsmith (https://github.com/j0hnsmith) | ||||
|  * Damien Churchill (https://github.com/damoxc) | ||||
|   | ||||
| @@ -20,7 +20,7 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||
| Supported Interpreters | ||||
| ---------------------- | ||||
|  | ||||
| PyMongo supports CPython 2.5 and newer. Language | ||||
| MongoEngine supports CPython 2.6 and newer. Language | ||||
| features not supported by all interpreters can not be used. | ||||
| Please also ensure that your code is properly converted by | ||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||
| @@ -46,7 +46,7 @@ General Guidelines | ||||
| - Write tests and make sure they pass (make sure you have a mongod | ||||
|   running on the default port, then execute ``python setup.py test`` | ||||
|   from the cmd line to run the test suite). | ||||
| - Add yourself to AUTHORS.rst :) | ||||
| - Add yourself to AUTHORS :) | ||||
|  | ||||
| Documentation | ||||
| ------------- | ||||
|   | ||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | ||||
| Copyright (c) 2009-2012 See AUTHORS | ||||
| Copyright (c) 2009 See AUTHORS | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person | ||||
| obtaining a copy of this software and associated documentation | ||||
|   | ||||
| @@ -26,7 +26,7 @@ setup.py install``. | ||||
|  | ||||
| Dependencies | ||||
| ============ | ||||
| - pymongo 2.1.1+ | ||||
| - pymongo 2.5+ | ||||
| - sphinx (optional - for documentation generation) | ||||
|  | ||||
| Examples | ||||
| @@ -92,4 +92,4 @@ Community | ||||
|  | ||||
| Contributing | ||||
| ============ | ||||
| We welcome contributions! see  the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||
| We welcome contributions! see  the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||
|   | ||||
							
								
								
									
										143
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										143
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -86,17 +86,43 @@ def main(): | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.36906409264 | ||||
|     0.8.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.69964408875 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
|     3.5526599884 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     7.00959801674 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries without continual assign - MongoEngine | ||||
|     5.60943293571 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True | ||||
|     6.715102911 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
|     5.50644683838 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
|     4.69851183891 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
|     4.68946313858 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     """ | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import Connection | ||||
| connection = Connection() | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import Connection | ||||
| connection = Connection() | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.timeit_test | ||||
| noddy = db.noddy | ||||
| @@ -106,7 +132,7 @@ for i in xrange(10000): | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.insert(example) | ||||
|     noddy.save(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| @@ -117,9 +143,32 @@ myNoddys = noddy.find() | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.timeit_test | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in xrange(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.save(example, write_concern={"w": 0}) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import Connection | ||||
| connection = Connection() | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| connection.disconnect() | ||||
|  | ||||
| @@ -149,33 +198,18 @@ myNoddys = Noddy.objects() | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     fields = {} | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(safe=False, validate=False) | ||||
|         fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.fields = fields | ||||
|     noddy.save() | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(safe=False, validate=False, cascade=False) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" | ||||
|     print """Creating 10000 dictionaries without continual assign - MongoEngine""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
| @@ -184,16 +218,65 @@ for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(force_insert=True, safe=False, validate=False, cascade=False) | ||||
|     noddy.save(write_concern={"w": 0}, cascade=True) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, force=True""" | ||||
|     print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(validate=False, write_concern={"w": 0}) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in xrange(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print "-" * 100 | ||||
|     print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""" | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print t.timeit(1) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
|     main() | ||||
							
								
								
									
										229
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										229
									
								
								docs/_themes/nature/static/nature.css_t
									
									
									
									
										vendored
									
									
								
							| @@ -1,229 +0,0 @@ | ||||
| /** | ||||
|  * Sphinx stylesheet -- default theme | ||||
|  * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||||
|  */ | ||||
|   | ||||
| @import url("basic.css"); | ||||
|   | ||||
| /* -- page layout ----------------------------------------------------------- */ | ||||
|   | ||||
| body { | ||||
|     font-family: Arial, sans-serif; | ||||
|     font-size: 100%; | ||||
|     background-color: #111; | ||||
|     color: #555; | ||||
|     margin: 0; | ||||
|     padding: 0; | ||||
| } | ||||
|  | ||||
| div.documentwrapper { | ||||
|     float: left; | ||||
|     width: 100%; | ||||
| } | ||||
|  | ||||
| div.bodywrapper { | ||||
|     margin: 0 0 0 230px; | ||||
| } | ||||
|  | ||||
| hr{ | ||||
|     border: 1px solid #B1B4B6; | ||||
| } | ||||
|   | ||||
| div.document { | ||||
|     background-color: #eee; | ||||
| } | ||||
|   | ||||
| div.body { | ||||
|     background-color: #ffffff; | ||||
|     color: #3E4349; | ||||
|     padding: 0 30px 30px 30px; | ||||
|     font-size: 0.8em; | ||||
| } | ||||
|   | ||||
| div.footer { | ||||
|     color: #555; | ||||
|     width: 100%; | ||||
|     padding: 13px 0; | ||||
|     text-align: center; | ||||
|     font-size: 75%; | ||||
| } | ||||
|   | ||||
| div.footer a { | ||||
|     color: #444; | ||||
|     text-decoration: underline; | ||||
| } | ||||
|   | ||||
| div.related { | ||||
|     background-color: #6BA81E; | ||||
|     line-height: 32px; | ||||
|     color: #fff; | ||||
|     text-shadow: 0px 1px 0 #444; | ||||
|     font-size: 0.80em; | ||||
| } | ||||
|   | ||||
| div.related a { | ||||
|     color: #E2F3CC; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar { | ||||
|     font-size: 0.75em; | ||||
|     line-height: 1.5em; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebarwrapper{ | ||||
|     padding: 20px 0; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar h3, | ||||
| div.sphinxsidebar h4 { | ||||
|     font-family: Arial, sans-serif; | ||||
|     color: #222; | ||||
|     font-size: 1.2em; | ||||
|     font-weight: normal; | ||||
|     margin: 0; | ||||
|     padding: 5px 10px; | ||||
|     background-color: #ddd; | ||||
|     text-shadow: 1px 1px 0 white | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar h4{ | ||||
|     font-size: 1.1em; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar h3 a { | ||||
|     color: #444; | ||||
| } | ||||
|   | ||||
|   | ||||
| div.sphinxsidebar p { | ||||
|     color: #888; | ||||
|     padding: 5px 20px; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar p.topless { | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar ul { | ||||
|     margin: 10px 20px; | ||||
|     padding: 0; | ||||
|     color: #000; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar a { | ||||
|     color: #444; | ||||
| } | ||||
|   | ||||
| div.sphinxsidebar input { | ||||
|     border: 1px solid #ccc; | ||||
|     font-family: sans-serif; | ||||
|     font-size: 1em; | ||||
| } | ||||
|  | ||||
| div.sphinxsidebar input[type=text]{ | ||||
|     margin-left: 20px; | ||||
| } | ||||
|   | ||||
| /* -- body styles ----------------------------------------------------------- */ | ||||
|   | ||||
| a { | ||||
|     color: #005B81; | ||||
|     text-decoration: none; | ||||
| } | ||||
|   | ||||
| a:hover { | ||||
|     color: #E32E00; | ||||
|     text-decoration: underline; | ||||
| } | ||||
|   | ||||
| div.body h1, | ||||
| div.body h2, | ||||
| div.body h3, | ||||
| div.body h4, | ||||
| div.body h5, | ||||
| div.body h6 { | ||||
|     font-family: Arial, sans-serif; | ||||
|     background-color: #BED4EB; | ||||
|     font-weight: normal; | ||||
|     color: #212224; | ||||
|     margin: 30px 0px 10px 0px; | ||||
|     padding: 5px 0 5px 10px; | ||||
|     text-shadow: 0px 1px 0 white | ||||
| } | ||||
|   | ||||
| div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } | ||||
| div.body h2 { font-size: 150%; background-color: #C8D5E3; } | ||||
| div.body h3 { font-size: 120%; background-color: #D8DEE3; } | ||||
| div.body h4 { font-size: 110%; background-color: #D8DEE3; } | ||||
| div.body h5 { font-size: 100%; background-color: #D8DEE3; } | ||||
| div.body h6 { font-size: 100%; background-color: #D8DEE3; } | ||||
|   | ||||
| a.headerlink { | ||||
|     color: #c60f0f; | ||||
|     font-size: 0.8em; | ||||
|     padding: 0 4px 0 4px; | ||||
|     text-decoration: none; | ||||
| } | ||||
|   | ||||
| a.headerlink:hover { | ||||
|     background-color: #c60f0f; | ||||
|     color: white; | ||||
| } | ||||
|   | ||||
| div.body p, div.body dd, div.body li { | ||||
|     line-height: 1.5em; | ||||
| } | ||||
|   | ||||
| div.admonition p.admonition-title + p { | ||||
|     display: inline; | ||||
| } | ||||
|  | ||||
| div.highlight{ | ||||
|     background-color: white; | ||||
| } | ||||
|  | ||||
| div.note { | ||||
|     background-color: #eee; | ||||
|     border: 1px solid #ccc; | ||||
| } | ||||
|   | ||||
| div.seealso { | ||||
|     background-color: #ffc; | ||||
|     border: 1px solid #ff6; | ||||
| } | ||||
|   | ||||
| div.topic { | ||||
|     background-color: #eee; | ||||
| } | ||||
|   | ||||
| div.warning { | ||||
|     background-color: #ffe4e4; | ||||
|     border: 1px solid #f66; | ||||
| } | ||||
|   | ||||
| p.admonition-title { | ||||
|     display: inline; | ||||
| } | ||||
|   | ||||
| p.admonition-title:after { | ||||
|     content: ":"; | ||||
| } | ||||
|   | ||||
| pre { | ||||
|     padding: 10px; | ||||
|     background-color: White; | ||||
|     color: #222; | ||||
|     line-height: 1.2em; | ||||
|     border: 1px solid #C6C9CB; | ||||
|     font-size: 1.2em; | ||||
|     margin: 1.5em 0 1.5em 0; | ||||
|     -webkit-box-shadow: 1px 1px 1px #d8d8d8; | ||||
|     -moz-box-shadow: 1px 1px 1px #d8d8d8; | ||||
| } | ||||
|   | ||||
| tt { | ||||
|     background-color: #ecf0f3; | ||||
|     color: #222; | ||||
|     padding: 1px 2px; | ||||
|     font-size: 1.2em; | ||||
|     font-family: monospace; | ||||
| } | ||||
							
								
								
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										54
									
								
								docs/_themes/nature/static/pygments.css
									
									
									
									
										vendored
									
									
								
							| @@ -1,54 +0,0 @@ | ||||
| .c { color: #999988; font-style: italic } /* Comment */ | ||||
| .k { font-weight: bold } /* Keyword */ | ||||
| .o { font-weight: bold } /* Operator */ | ||||
| .cm { color: #999988; font-style: italic } /* Comment.Multiline */ | ||||
| .cp { color: #999999; font-weight: bold } /* Comment.preproc */ | ||||
| .c1 { color: #999988; font-style: italic } /* Comment.Single */ | ||||
| .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ | ||||
| .ge { font-style: italic } /* Generic.Emph */ | ||||
| .gr { color: #aa0000 } /* Generic.Error */ | ||||
| .gh { color: #999999 } /* Generic.Heading */ | ||||
| .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ | ||||
| .go { color: #111 } /* Generic.Output */ | ||||
| .gp { color: #555555 } /* Generic.Prompt */ | ||||
| .gs { font-weight: bold } /* Generic.Strong */ | ||||
| .gu { color: #aaaaaa } /* Generic.Subheading */ | ||||
| .gt { color: #aa0000 } /* Generic.Traceback */ | ||||
| .kc { font-weight: bold } /* Keyword.Constant */ | ||||
| .kd { font-weight: bold } /* Keyword.Declaration */ | ||||
| .kp { font-weight: bold } /* Keyword.Pseudo */ | ||||
| .kr { font-weight: bold } /* Keyword.Reserved */ | ||||
| .kt { color: #445588; font-weight: bold } /* Keyword.Type */ | ||||
| .m { color: #009999 } /* Literal.Number */ | ||||
| .s { color: #bb8844 } /* Literal.String */ | ||||
| .na { color: #008080 } /* Name.Attribute */ | ||||
| .nb { color: #999999 } /* Name.Builtin */ | ||||
| .nc { color: #445588; font-weight: bold } /* Name.Class */ | ||||
| .no { color: #ff99ff } /* Name.Constant */ | ||||
| .ni { color: #800080 } /* Name.Entity */ | ||||
| .ne { color: #990000; font-weight: bold } /* Name.Exception */ | ||||
| .nf { color: #990000; font-weight: bold } /* Name.Function */ | ||||
| .nn { color: #555555 } /* Name.Namespace */ | ||||
| .nt { color: #000080 } /* Name.Tag */ | ||||
| .nv { color: purple } /* Name.Variable */ | ||||
| .ow { font-weight: bold } /* Operator.Word */ | ||||
| .mf { color: #009999 } /* Literal.Number.Float */ | ||||
| .mh { color: #009999 } /* Literal.Number.Hex */ | ||||
| .mi { color: #009999 } /* Literal.Number.Integer */ | ||||
| .mo { color: #009999 } /* Literal.Number.Oct */ | ||||
| .sb { color: #bb8844 } /* Literal.String.Backtick */ | ||||
| .sc { color: #bb8844 } /* Literal.String.Char */ | ||||
| .sd { color: #bb8844 } /* Literal.String.Doc */ | ||||
| .s2 { color: #bb8844 } /* Literal.String.Double */ | ||||
| .se { color: #bb8844 } /* Literal.String.Escape */ | ||||
| .sh { color: #bb8844 } /* Literal.String.Heredoc */ | ||||
| .si { color: #bb8844 } /* Literal.String.Interpol */ | ||||
| .sx { color: #bb8844 } /* Literal.String.Other */ | ||||
| .sr { color: #808000 } /* Literal.String.Regex */ | ||||
| .s1 { color: #bb8844 } /* Literal.String.Single */ | ||||
| .ss { color: #bb8844 } /* Literal.String.Symbol */ | ||||
| .bp { color: #999999 } /* Name.Builtin.Pseudo */ | ||||
| .vc { color: #ff99ff } /* Name.Variable.Class */ | ||||
| .vg { color: #ff99ff } /* Name.Variable.Global */ | ||||
| .vi { color: #ff99ff } /* Name.Variable.Instance */ | ||||
| .il { color: #009999 } /* Literal.Number.Integer.Long */ | ||||
							
								
								
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								docs/_themes/nature/theme.conf
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +0,0 @@ | ||||
| [theme] | ||||
| inherit = basic | ||||
| stylesheet = nature.css | ||||
| pygments_style = tango | ||||
							
								
								
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| """Sphinx ReadTheDocs theme. | ||||
|  | ||||
| From https://github.com/ryan-roemer/sphinx-bootstrap-theme. | ||||
|  | ||||
| """ | ||||
| import os | ||||
|  | ||||
| VERSION = (0, 1, 5) | ||||
|  | ||||
| __version__ = ".".join(str(v) for v in VERSION) | ||||
| __version_full__ = __version__ | ||||
|  | ||||
|  | ||||
| def get_html_theme_path(): | ||||
|     """Return list of HTML theme paths.""" | ||||
|     cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|     return cur_dir | ||||
							
								
								
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| <ul class="wy-breadcrumbs"> | ||||
|   <li><a href="{{ pathto(master_doc) }}">Docs</a> »</li> | ||||
|   <li><a href="">{{ title }}</a></li> | ||||
|     <li class="wy-breadcrumbs-aside"> | ||||
|       {% if display_github %} | ||||
|         <a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a> | ||||
|       {% elif display_bitbucket %} | ||||
|         <a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a> | ||||
|       {% elif show_source and has_source and sourcename %} | ||||
|         <a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a> | ||||
|       {% endif %} | ||||
|     </li> | ||||
| </ul> | ||||
| <hr/> | ||||
|  | ||||
							
								
								
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| <footer> | ||||
|   {% if next or prev %} | ||||
|     <div class="rst-footer-buttons"> | ||||
|       {% if next %} | ||||
|         <a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a> | ||||
|       {% endif %} | ||||
|       {% if prev %} | ||||
|         <a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a> | ||||
|       {% endif %} | ||||
|     </div> | ||||
|   {% endif %} | ||||
|  | ||||
|   <hr/> | ||||
|  | ||||
|   <p> | ||||
|   {%- if show_copyright %} | ||||
|     {%- if hasdoc('copyright') %} | ||||
|       {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} | ||||
|     {%- else %} | ||||
|       {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} | ||||
|     {%- endif %} | ||||
|   {%- endif %} | ||||
|  | ||||
|   {%- if last_updated %} | ||||
|     {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} | ||||
|   {%- endif %} | ||||
|   </p> | ||||
|  | ||||
|   {% trans %}<a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %} | ||||
| </footer> | ||||
							
								
								
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,142 @@ | ||||
| {# TEMPLATE VAR SETTINGS #} | ||||
| {%- set url_root = pathto('', 1) %} | ||||
| {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} | ||||
| {%- if not embedded and docstitle %} | ||||
|   {%- set titlesuffix = " — "|safe + docstitle|e %} | ||||
| {%- else %} | ||||
|   {%- set titlesuffix = "" %} | ||||
| {%- endif %} | ||||
|  | ||||
| <!DOCTYPE html> | ||||
| <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> | ||||
| <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> | ||||
| <head> | ||||
|   <meta charset="utf-8"> | ||||
|   <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||||
|   {% block htmltitle %} | ||||
|   <title>{{ title|striptags|e }}{{ titlesuffix }}</title> | ||||
|   {% endblock %} | ||||
|  | ||||
|   {# FAVICON #} | ||||
|   {% if favicon %} | ||||
|     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> | ||||
|   {% endif %} | ||||
|   {# CANONICAL #} | ||||
|   {%- if theme_canonical_url %} | ||||
|     <link rel="canonical" href="{{ theme_canonical_url }}{{ pagename }}.html"/> | ||||
|   {%- endif %} | ||||
|  | ||||
|   {# CSS #} | ||||
|   <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'> | ||||
|  | ||||
|   {# JS #} | ||||
|   {% if not embedded %} | ||||
|  | ||||
|     <script type="text/javascript"> | ||||
|       var DOCUMENTATION_OPTIONS = { | ||||
|         URL_ROOT:'{{ url_root }}', | ||||
|         VERSION:'{{ release|e }}', | ||||
|         COLLAPSE_INDEX:false, | ||||
|         FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}', | ||||
|         HAS_SOURCE:  {{ has_source|lower }} | ||||
|       }; | ||||
|     </script> | ||||
|     {%- for scriptfile in script_files %} | ||||
|       <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> | ||||
|     {%- endfor %} | ||||
|  | ||||
|     {% if use_opensearch %} | ||||
|       <link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/> | ||||
|     {% endif %} | ||||
|  | ||||
|   {% endif %} | ||||
|  | ||||
|   {# RTD hosts these file themselves, so just load on non RTD builds #} | ||||
|   {% if not READTHEDOCS %} | ||||
|     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> | ||||
|     <script type="text/javascript" src="_static/js/theme.js"></script> | ||||
|   {% endif %} | ||||
|  | ||||
|   {% for cssfile in css_files %} | ||||
|     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> | ||||
|   {% endfor %} | ||||
|  | ||||
|   {%- block linktags %} | ||||
|     {%- if hasdoc('about') %} | ||||
|         <link rel="author" title="{{ _('About these documents') }}" | ||||
|               href="{{ pathto('about') }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('genindex') %} | ||||
|         <link rel="index" title="{{ _('Index') }}" | ||||
|               href="{{ pathto('genindex') }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('search') %} | ||||
|         <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('copyright') %} | ||||
|         <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/> | ||||
|     {%- endif %} | ||||
|     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/> | ||||
|     {%- if parents %} | ||||
|         <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if next %} | ||||
|         <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if prev %} | ||||
|         <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/> | ||||
|     {%- endif %} | ||||
|   {%- endblock %} | ||||
|   {%- block extrahead %} {% endblock %} | ||||
|  | ||||
|   <script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script> | ||||
|  | ||||
| </head> | ||||
|  | ||||
| <body class="wy-body-for-nav"> | ||||
|  | ||||
|   <div class="wy-grid-for-nav"> | ||||
|  | ||||
|     {# SIDE NAV, TOGGLES ON MOBILE #} | ||||
|     <nav data-toggle="wy-nav-shift" class="wy-nav-side"> | ||||
|       <div class="wy-side-nav-search"> | ||||
|         <a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a> | ||||
|         {% include "searchbox.html" %} | ||||
|       </div> | ||||
|  | ||||
|       <div class="wy-menu wy-menu-vertical" data-spy="affix"> | ||||
|         {% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %} | ||||
|         {% if toctree %} | ||||
|             {{ toctree }} | ||||
|         {% else %} | ||||
|             <!-- Local TOC --> | ||||
|             <div class="local-toc">{{ toc }}</div> | ||||
|         {% endif %} | ||||
|       </div> | ||||
|         | ||||
|     </nav> | ||||
|  | ||||
|     <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> | ||||
|  | ||||
|       {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} | ||||
|       <nav class="wy-nav-top"> | ||||
|         <i data-toggle="wy-nav-top" class="icon icon-reorder"></i> | ||||
|         <a href="{{ pathto(master_doc) }}">{{ project }}</a> | ||||
|       </nav> | ||||
|  | ||||
|  | ||||
|       {# PAGE CONTENT #} | ||||
|       <div class="wy-nav-content"> | ||||
|         <div class="rst-content"> | ||||
|           {% include "breadcrumbs.html" %} | ||||
|           {% block body %}{% endblock %} | ||||
|           {% include "footer.html" %} | ||||
|         </div> | ||||
|       </div> | ||||
|  | ||||
|     </section> | ||||
|  | ||||
|   </div> | ||||
|   {% include "versions.html" %} | ||||
| </body> | ||||
| </html> | ||||
							
								
								
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,205 @@ | ||||
| {# | ||||
|     basic/layout.html | ||||
|     ~~~~~~~~~~~~~~~~~ | ||||
|  | ||||
|     Master layout template for Sphinx themes. | ||||
|  | ||||
|     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. | ||||
|     :license: BSD, see LICENSE for details. | ||||
| #} | ||||
| {%- block doctype -%} | ||||
| <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" | ||||
|   "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> | ||||
| {%- endblock %} | ||||
| {%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} | ||||
| {%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} | ||||
| {%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and | ||||
|                          (sidebars != []) %} | ||||
| {%- set url_root = pathto('', 1) %} | ||||
| {# XXX necessary? #} | ||||
| {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} | ||||
| {%- if not embedded and docstitle %} | ||||
|   {%- set titlesuffix = " — "|safe + docstitle|e %} | ||||
| {%- else %} | ||||
|   {%- set titlesuffix = "" %} | ||||
| {%- endif %} | ||||
|  | ||||
| {%- macro relbar() %} | ||||
|     <div class="related"> | ||||
|       <h3>{{ _('Navigation') }}</h3> | ||||
|       <ul> | ||||
|         {%- for rellink in rellinks %} | ||||
|         <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}> | ||||
|           <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}" | ||||
|              {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a> | ||||
|           {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li> | ||||
|         {%- endfor %} | ||||
|         {%- block rootrellink %} | ||||
|         <li><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li> | ||||
|         {%- endblock %} | ||||
|         {%- for parent in parents %} | ||||
|           <li><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li> | ||||
|         {%- endfor %} | ||||
|         {%- block relbaritems %} {% endblock %} | ||||
|       </ul> | ||||
|     </div> | ||||
| {%- endmacro %} | ||||
|  | ||||
| {%- macro sidebar() %} | ||||
|       {%- if render_sidebar %} | ||||
|       <div class="sphinxsidebar"> | ||||
|         <div class="sphinxsidebarwrapper"> | ||||
|           {%- block sidebarlogo %} | ||||
|           {%- if logo %} | ||||
|             <p class="logo"><a href="{{ pathto(master_doc) }}"> | ||||
|               <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/> | ||||
|             </a></p> | ||||
|           {%- endif %} | ||||
|           {%- endblock %} | ||||
|           {%- if sidebars != None %} | ||||
|             {#- new style sidebar: explicitly include/exclude templates #} | ||||
|             {%- for sidebartemplate in sidebars %} | ||||
|             {%- include sidebartemplate %} | ||||
|             {%- endfor %} | ||||
|           {%- else %} | ||||
|             {#- old style sidebars: using blocks -- should be deprecated #} | ||||
|             {%- block sidebartoc %} | ||||
|             {%- include "localtoc.html" %} | ||||
|             {%- endblock %} | ||||
|             {%- block sidebarrel %} | ||||
|             {%- include "relations.html" %} | ||||
|             {%- endblock %} | ||||
|             {%- block sidebarsourcelink %} | ||||
|             {%- include "sourcelink.html" %} | ||||
|             {%- endblock %} | ||||
|             {%- if customsidebar %} | ||||
|             {%- include customsidebar %} | ||||
|             {%- endif %} | ||||
|             {%- block sidebarsearch %} | ||||
|             {%- include "searchbox.html" %} | ||||
|             {%- endblock %} | ||||
|           {%- endif %} | ||||
|         </div> | ||||
|       </div> | ||||
|       {%- endif %} | ||||
| {%- endmacro %} | ||||
|  | ||||
| {%- macro script() %} | ||||
|     <script type="text/javascript"> | ||||
|       var DOCUMENTATION_OPTIONS = { | ||||
|         URL_ROOT:    '{{ url_root }}', | ||||
|         VERSION:     '{{ release|e }}', | ||||
|         COLLAPSE_INDEX: false, | ||||
|         FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}', | ||||
|         HAS_SOURCE:  {{ has_source|lower }} | ||||
|       }; | ||||
|     </script> | ||||
|     {%- for scriptfile in script_files %} | ||||
|     <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> | ||||
|     {%- endfor %} | ||||
| {%- endmacro %} | ||||
|  | ||||
| {%- macro css() %} | ||||
|     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> | ||||
|     <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" /> | ||||
|     {%- for cssfile in css_files %} | ||||
|     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> | ||||
|     {%- endfor %} | ||||
| {%- endmacro %} | ||||
|  | ||||
| <html xmlns="http://www.w3.org/1999/xhtml"> | ||||
|   <head> | ||||
|     <meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" /> | ||||
|     {{ metatags }} | ||||
|     {%- block htmltitle %} | ||||
|     <title>{{ title|striptags|e }}{{ titlesuffix }}</title> | ||||
|     {%- endblock %} | ||||
|     {{ css() }} | ||||
|     {%- if not embedded %} | ||||
|     {{ script() }} | ||||
|     {%- if use_opensearch %} | ||||
|     <link rel="search" type="application/opensearchdescription+xml" | ||||
|           title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" | ||||
|           href="{{ pathto('_static/opensearch.xml', 1) }}"/> | ||||
|     {%- endif %} | ||||
|     {%- if favicon %} | ||||
|     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> | ||||
|     {%- endif %} | ||||
|     {%- endif %} | ||||
| {%- block linktags %} | ||||
|     {%- if hasdoc('about') %} | ||||
|     <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" /> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('genindex') %} | ||||
|     <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" /> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('search') %} | ||||
|     <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" /> | ||||
|     {%- endif %} | ||||
|     {%- if hasdoc('copyright') %} | ||||
|     <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" /> | ||||
|     {%- endif %} | ||||
|     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}" /> | ||||
|     {%- if parents %} | ||||
|     <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" /> | ||||
|     {%- endif %} | ||||
|     {%- if next %} | ||||
|     <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" /> | ||||
|     {%- endif %} | ||||
|     {%- if prev %} | ||||
|     <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" /> | ||||
|     {%- endif %} | ||||
| {%- endblock %} | ||||
| {%- block extrahead %} {% endblock %} | ||||
|   </head> | ||||
|   <body> | ||||
| {%- block header %}{% endblock %} | ||||
|  | ||||
| {%- block relbar1 %}{{ relbar() }}{% endblock %} | ||||
|  | ||||
| {%- block content %} | ||||
|   {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} | ||||
|  | ||||
|     <div class="document"> | ||||
|   {%- block document %} | ||||
|       <div class="documentwrapper"> | ||||
|       {%- if render_sidebar %} | ||||
|         <div class="bodywrapper"> | ||||
|       {%- endif %} | ||||
|           <div class="body"> | ||||
|             {% block body %} {% endblock %} | ||||
|           </div> | ||||
|       {%- if render_sidebar %} | ||||
|         </div> | ||||
|       {%- endif %} | ||||
|       </div> | ||||
|   {%- endblock %} | ||||
|  | ||||
|   {%- block sidebar2 %}{{ sidebar() }}{% endblock %} | ||||
|       <div class="clearer"></div> | ||||
|     </div> | ||||
| {%- endblock %} | ||||
|  | ||||
| {%- block relbar2 %}{{ relbar() }}{% endblock %} | ||||
|  | ||||
| {%- block footer %} | ||||
|     <div class="footer"> | ||||
|     {%- if show_copyright %} | ||||
|       {%- if hasdoc('copyright') %} | ||||
|         {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} | ||||
|       {%- else %} | ||||
|         {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} | ||||
|       {%- endif %} | ||||
|     {%- endif %} | ||||
|     {%- if last_updated %} | ||||
|       {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} | ||||
|     {%- endif %} | ||||
|     {%- if show_sphinx %} | ||||
|       {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %} | ||||
|     {%- endif %} | ||||
|     </div> | ||||
|     <p>asdf asdf asdf asdf 22</p> | ||||
| {%- endblock %} | ||||
|   </body> | ||||
| </html> | ||||
|  | ||||
							
								
								
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,50 @@ | ||||
| {# | ||||
|     basic/search.html | ||||
|     ~~~~~~~~~~~~~~~~~ | ||||
|  | ||||
|     Template for the search page. | ||||
|  | ||||
|     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. | ||||
|     :license: BSD, see LICENSE for details. | ||||
| #} | ||||
| {%- extends "layout.html" %} | ||||
| {% set title = _('Search') %} | ||||
| {% set script_files = script_files + ['_static/searchtools.js'] %} | ||||
| {% block extrahead %} | ||||
|   <script type="text/javascript"> | ||||
|     jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); }); | ||||
|   </script> | ||||
|   {# this is used when loading the search index using $.ajax fails, | ||||
|      such as on Chrome for documents on localhost #} | ||||
|   <script type="text/javascript" id="searchindexloader"></script> | ||||
|   {{ super() }} | ||||
| {% endblock %} | ||||
| {% block body %} | ||||
|   <noscript> | ||||
|   <div id="fallback" class="admonition warning"> | ||||
|     <p class="last"> | ||||
|       {% trans %}Please activate JavaScript to enable the search | ||||
|       functionality.{% endtrans %} | ||||
|     </p> | ||||
|   </div> | ||||
|   </noscript> | ||||
|  | ||||
|   {% if search_performed %} | ||||
|     <h2>{{ _('Search Results') }}</h2> | ||||
|     {% if not search_results %} | ||||
|       <p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p> | ||||
|     {% endif %} | ||||
|   {% endif %} | ||||
|   <div id="search-results"> | ||||
|   {% if search_results %} | ||||
|     <ul> | ||||
|     {% for href, caption, context in search_results %} | ||||
|       <li> | ||||
|         <a href="{{ pathto(item.href) }}">{{ caption }}</a> | ||||
|         <p class="context">{{ context|e }}</p> | ||||
|       </li> | ||||
|     {% endfor %} | ||||
|     </ul> | ||||
|   {% endif %} | ||||
|   </div> | ||||
| {% endblock %} | ||||
							
								
								
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| <form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get"> | ||||
|   <input type="text" name="q" placeholder="Search docs" /> | ||||
|   <input type="hidden" name="check_keywords" value="yes" /> | ||||
|   <input type="hidden" name="area" value="default" /> | ||||
| </form> | ||||
							
								
								
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1 @@ | ||||
| .font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} | ||||
							
								
								
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/theme.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/theme.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 6.1 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										399
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										399
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,399 @@ | ||||
| <?xml version="1.0" standalone="no"?> | ||||
| <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" > | ||||
| <svg xmlns="http://www.w3.org/2000/svg"> | ||||
| <metadata></metadata> | ||||
| <defs> | ||||
| <font id="fontawesomeregular" horiz-adv-x="1536" > | ||||
| <font-face units-per-em="1792" ascent="1536" descent="-256" /> | ||||
| <missing-glyph horiz-adv-x="448" /> | ||||
| <glyph unicode=" "  horiz-adv-x="448" /> | ||||
| <glyph unicode="	" horiz-adv-x="448" /> | ||||
| <glyph unicode=" " horiz-adv-x="448" /> | ||||
| <glyph unicode="¨" horiz-adv-x="1792" /> | ||||
| <glyph unicode="©" horiz-adv-x="1792" /> | ||||
| <glyph unicode="®" horiz-adv-x="1792" /> | ||||
| <glyph unicode="´" horiz-adv-x="1792" /> | ||||
| <glyph unicode="Æ" horiz-adv-x="1792" /> | ||||
| <glyph unicode=" " horiz-adv-x="768" /> | ||||
| <glyph unicode=" " /> | ||||
| <glyph unicode=" " horiz-adv-x="768" /> | ||||
| <glyph unicode=" " /> | ||||
| <glyph unicode=" " horiz-adv-x="512" /> | ||||
| <glyph unicode=" " horiz-adv-x="384" /> | ||||
| <glyph unicode=" " horiz-adv-x="256" /> | ||||
| <glyph unicode=" " horiz-adv-x="256" /> | ||||
| <glyph unicode=" " horiz-adv-x="192" /> | ||||
| <glyph unicode=" " horiz-adv-x="307" /> | ||||
| <glyph unicode=" " horiz-adv-x="85" /> | ||||
| <glyph unicode=" " horiz-adv-x="307" /> | ||||
| <glyph unicode=" " horiz-adv-x="384" /> | ||||
| <glyph unicode="™" horiz-adv-x="1792" /> | ||||
| <glyph unicode="∞" horiz-adv-x="1792" /> | ||||
| <glyph unicode="≠" horiz-adv-x="1792" /> | ||||
| <glyph unicode="" horiz-adv-x="500" d="M0 0z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" /> | ||||
| <glyph unicode="" d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317q54 43 100.5 115.5t46.5 131.5z M1792 1120v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 q-18 -18 -44 -18z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 l502 -73q56 -9 56 -46z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 131q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81 t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45 t45 -19h128q26 0 45 19t19 45zM1792 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 704v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1792 320v128 q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 704v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19 t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1920 1248v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h960q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 t-28 -68l-294 -294l294 -294q28 -28 28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5 t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z " /> | ||||
| <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0 52 38 90t90 38t90 -38t38 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5v222q0 12 8 23t19 13 l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10q129 -119 165 -170q7 -8 7 -22 q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832 q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280zM768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z " /> | ||||
| <glyph unicode="" d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39z" /> | ||||
| <glyph unicode="" d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1023 576h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8t-2.5 -8h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 q25 -61 25 -123z" /> | ||||
| <glyph unicode="" d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9 l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" /> | ||||
| <glyph unicode="" d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5v7q65 268 270 434.5t480 166.5 q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5z M1536 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5zM1536 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5 t9.5 -22.5zM1664 160v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 1248v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47 t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314q0 151 67 291t179 242.5 t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5 t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5q140 -59 225 -188.5t85 -282.5zM1664 640q0 -230 -127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289 t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19q13 0 26 -5q211 -91 338 -283.5t127 -422.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" /> | ||||
| <glyph unicode="" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906 q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57q38 -15 59 -43zM575 1056q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5 t-16.5 -22.5zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M725 977l-170 -450q73 -1 153.5 -2t119 -1.5t52.5 -0.5l29 2q-32 95 -92 241q-53 132 -92 211zM21 -128h-21l2 79q22 7 80 18q89 16 110 31q20 16 48 68l237 616l280 724h75h53l11 -21l205 -480q103 -242 124 -297q39 -102 96 -235q26 -58 65 -164q24 -67 65 -149 q22 -49 35 -57q22 -19 69 -23q47 -6 103 -27q6 -39 6 -57q0 -14 -1 -26q-80 0 -192 8q-93 8 -189 8q-79 0 -135 -2l-200 -11l-58 -2q0 45 4 78l131 28q56 13 68 23q12 12 12 27t-6 32l-47 114l-92 228l-450 2q-29 -65 -104 -274q-23 -64 -23 -84q0 -31 17 -43 q26 -21 103 -32q3 0 13.5 -2t30 -5t40.5 -6q1 -28 1 -58q0 -17 -2 -27q-66 0 -349 20l-48 -8q-81 -14 -167 -14z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M555 15q76 -32 140 -32q131 0 216 41t122 113q38 70 38 181q0 114 -41 180q-58 94 -141 126q-80 32 -247 32q-74 0 -101 -10v-144l-1 -173l3 -270q0 -15 12 -44zM541 761q43 -7 109 -7q175 0 264 65t89 224q0 112 -85 187q-84 75 -255 75q-52 0 -130 -13q0 -44 2 -77 q7 -122 6 -279l-1 -98q0 -43 1 -77zM0 -128l2 94q45 9 68 12q77 12 123 31q17 27 21 51q9 66 9 194l-2 497q-5 256 -9 404q-1 87 -11 109q-1 4 -12 12q-18 12 -69 15q-30 2 -114 13l-4 83l260 6l380 13l45 1q5 0 14 0.5t14 0.5q1 0 21.5 -0.5t40.5 -0.5h74q88 0 191 -27 q43 -13 96 -39q57 -29 102 -76q44 -47 65 -104t21 -122q0 -70 -32 -128t-95 -105q-26 -20 -150 -77q177 -41 267 -146q92 -106 92 -236q0 -76 -29 -161q-21 -62 -71 -117q-66 -72 -140 -108q-73 -36 -203 -60q-82 -15 -198 -11l-197 4q-84 2 -298 -11q-33 -3 -272 -11z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M0 -126l17 85q4 1 77 20q76 19 116 39q29 37 41 101l27 139l56 268l12 64q8 44 17 84.5t16 67t12.5 46.5t9 30.5t3.5 11.5l29 157l16 63l22 135l8 50v38q-41 22 -144 28q-28 2 -38 4l19 103l317 -14q39 -2 73 -2q66 0 214 9q33 2 68 4.5t36 2.5q-2 -19 -6 -38 q-7 -29 -13 -51q-55 -19 -109 -31q-64 -16 -101 -31q-12 -31 -24 -88q-9 -44 -13 -82q-44 -199 -66 -306l-61 -311l-38 -158l-43 -235l-12 -45q-2 -7 1 -27q64 -15 119 -21q36 -5 66 -10q-1 -29 -7 -58q-7 -31 -9 -41q-18 0 -23 -1q-24 -2 -42 -2q-9 0 -28 3q-19 4 -145 17 l-198 2q-41 1 -174 -11q-74 -7 -98 -9z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l215 -1h293l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -42.5 2t-103.5 -1t-111 -1 q-34 0 -67 -5q-10 -97 -8 -136l1 -152v-332l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-88 0 -233 -14q-48 -4 -70 -4q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q8 192 6 433l-5 428q-1 62 -0.5 118.5t0.5 102.5t-2 57t-6 15q-6 5 -14 6q-38 6 -148 6q-43 0 -100 -13.5t-73 -24.5q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1744 128q33 0 42 -18.5t-11 -44.5 l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5t42 18.5h80v1024h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80z" /> | ||||
| <glyph unicode="" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l446 -1h318l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -58.5 2t-138.5 -1t-128 -1 q-94 0 -127 -5q-10 -97 -8 -136l1 -152v52l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-82 0 -233 -13q-45 -5 -70 -5q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q6 137 6 433l-5 44q0 265 -2 278q-2 11 -6 15q-6 5 -14 6q-38 6 -148 6q-50 0 -168.5 -14t-132.5 -24q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1505 113q26 -20 26 -49t-26 -49l-162 -126 q-26 -20 -44.5 -11t-18.5 42v80h-1024v-80q0 -33 -18.5 -42t-44.5 11l-162 126q-26 20 -26 49t26 49l162 126q26 20 44.5 11t18.5 -42v-80h1024v80q0 33 18.5 42t44.5 -11z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344 q13 0 22.5 -9.5t9.5 -22.5zM256 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192 q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 q39 -17 39 -59z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 q53 0 91 -38l235 -234q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" /> | ||||
| <glyph unicode="" d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 1216l288 -288l-672 -672h-288v288zM1756 1084l-92 -92 l-288 288l92 92q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5 t-38.5 114t-17.5 122q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110 q24 -24 24 -57t-24 -57z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710 q19 19 32 13t13 -32v-710q4 11 13 19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" /> | ||||
| <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710 q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1538" d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M742 -37l-652 651q-37 37 -37 90.5t37 90.5l652 651q37 37 90.5 37t90.5 -37l75 -75q37 -37 37 -90.5t-37 -90.5l-486 -486l486 -485q37 -38 37 -91t-37 -90l-75 -75q-37 -37 -90.5 -37t-90.5 37z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1099 704q0 -52 -37 -91l-652 -651q-37 -37 -90 -37t-90 37l-76 75q-37 39 -37 91q0 53 37 90l486 486l-486 485q-37 39 -37 91q0 53 37 90l76 75q36 38 90 38t90 -38l652 -651q37 -37 37 -90z" /> | ||||
| <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143 q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5 t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" /> | ||||
| <glyph unicode="" d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 t32.5 -90.5z" /> | ||||
| <glyph unicode="" d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" /> | ||||
| <glyph unicode="" d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" /> | ||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" /> | ||||
| <glyph unicode="" d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5 t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5 t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150.5t27.5 -184z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5 q16 -10 16 -27zM1344 704q0 -139 -79 -253.5t-209 -164.5l280 502q8 -45 8 -84zM1792 576q0 -35 -20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69z " /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h288v288h-288v-288zM1280 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64 q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47 h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5 v192h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111 t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" /> | ||||
| <glyph unicode="" d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 320q0 -53 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-486 485l-486 -485q-36 -38 -90 -38t-90 38l-75 75q-38 36 -38 90q0 53 38 91l651 651q37 37 90 37q52 0 91 -37l650 -651q38 -38 38 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1611 832q0 -53 -37 -90l-651 -651q-38 -38 -91 -38q-54 0 -90 38l-651 651q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l486 -486l486 486q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21 zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45z " /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5 l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5 t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M512 512v-384h-256v384h256zM896 1024v-896h-256v896h256zM1280 768v-640h-256v640h256zM1664 1152v-1024h-256v1024h256zM1792 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5z M1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5 t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1307 618l23 219h-198v109q0 49 15.5 68.5t71.5 19.5h110v219h-175q-152 0 -218 -72t-66 -213v-131h-131v-219h131v-635h262v635h175zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5v1280 q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5 t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5 l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7 l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8q144 -133 144 -160q0 -9 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5zM1920 198v-140q0 -16 -149 -31 q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20 t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31zM1920 1222v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68 q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70 q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7 q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230z" /> | ||||
| <glyph unicode="" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81zM1536 769 q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128 q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179z" /> | ||||
| <glyph unicode="" d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5z M1536 511q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5 h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -74 49 -163z" /> | ||||
| <glyph unicode="" horiz-adv-x="896" d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5 q224 0 351 -124t127 -344z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45z" /> | ||||
| <glyph unicode="" d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143v128q0 40 28 68t68 28h288v96 q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" d="M394 184q-8 -9 -20 3q-13 11 -4 19q8 9 20 -3q12 -11 4 -19zM352 245q9 -12 0 -19q-8 -6 -17 7t0 18q9 7 17 -6zM291 305q-5 -7 -13 -2q-10 5 -7 12q3 5 13 2q10 -5 7 -12zM322 271q-6 -7 -16 3q-9 11 -2 16q6 6 16 -3q9 -11 2 -16zM451 159q-4 -12 -19 -6q-17 4 -13 15 t19 7q16 -5 13 -16zM514 154q0 -11 -16 -11q-17 -2 -17 11q0 11 16 11q17 2 17 -11zM572 164q2 -10 -14 -14t-18 8t14 15q16 2 18 -9zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 27.5v239q0 97 -52 142q57 6 102.5 18t94 39 t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103 q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -103t0.5 -68q0 -22 -11 -33.5t-22 -13t-33 -1.5 h-224q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69z" /> | ||||
| <glyph unicode="" d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t-281 -180.5q-124 -44 -326 -44 q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5 q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -10 1 -18.5t3 -17t4 -13.5t6.5 -16t6.5 -17q16 -40 25 -118.5t9 -136.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5 q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235t235 -174 q2 -1 19 -11.5t24 -14t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> | ||||
| <glyph unicode="" d="M1280 343q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5 t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5t320.5 -216.5q6 -2 30 -11t33 -12.5 t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M511 980h257l-30 -284h-227v-824h-341v824h-170v284h170v171q0 182 86 275.5t283 93.5h227v-284h-142q-39 0 -62.5 -6.5t-34 -23.5t-13.5 -34.5t-3 -49.5v-142z" /> | ||||
| <glyph unicode="" d="M1536 640q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5 q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23 q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -89t0.5 -54q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 t316.5 -131.5t131.5 -316.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294 q187 -186 294 -425.5t120 -501.5z" /> | ||||
| <glyph unicode="" d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v320q0 25 16 75 l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM1664 128q0 -52 -38 -90t-90 -38 h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> | ||||
| <glyph unicode="" d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70 l-53 -186l188 -48q40 -10 52 -51q10 -42 -20 -70z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43 q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5 t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576 q-50 0 -89 -38.5t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32zM1664 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45 t45 -19t45 19t19 45zM1792 768v-640q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181q0 103 76 179t180 76h374q-22 60 -22 128 q0 122 81.5 189t206.5 67q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||
| <glyph unicode="" d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5 t-90.5 -37.5h-640q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76 q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227z" /> | ||||
| <glyph unicode="" d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 580 q0 -142 -77.5 -230t-217.5 -87l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100 q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317z" /> | ||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5 q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5 q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5 t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-5 7 -8 9q-12 4 -27 -5q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3 q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25 q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5q-16 0 -22 -1q-146 -80 -235 -222q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5 t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5 t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10t17 -20q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21 q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5 q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3 q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5 t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q7 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5 q15 10 -7 16q-17 5 -43 -12zM879 10q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7t-10 1.5t-11.5 -7 q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5q0 -6 2 -16z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19z " /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75 t75 -181zM1344 896q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5zM1920 671q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128q81 117 81 256q0 29 -5 66q66 -23 133 -23q59 0 119 21.5t97.5 42.5 t43.5 21q124 0 124 -353zM1792 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15 t21.5 -21.5t18.5 -19q33 31 33 73zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204t85 203l147 146q83 83 203 83q121 0 204 -85l206 -207 q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z " /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148 q4 -48 -10 -97q4 -1 12 -5l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56zM579 836q46 42 21 108t-106 117q-92 59 -192 59q-74 0 -113 -36q-46 -42 -21 -108t106 -117q92 -59 192 -59q74 0 113 36zM494 91q81 51 106 117t-21 108 q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117t21 -108q39 -36 113 -36q100 0 192 59zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 480l96 -32l736 576l-128 64l-768 -431v-113l-160 -96l9 -8q2 -2 7 -6 q4 -4 11 -12t11 -12l26 -26zM1600 64l128 64l-520 408l-177 -138q-2 -3 -13 -7z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235 z" /> | ||||
| <glyph unicode="" d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88z" /> | ||||
| <glyph unicode="" d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z M1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105zM1792 224v-192q0 -13 -9.5 -22.5 t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1123v-99h-335v99h107q0 41 0.5 122t0.5 121v12h-2q-8 -17 -50 -54l-71 76l136 127h106v-404h108zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5 t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 97 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23 l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71z" /> | ||||
| <glyph unicode="" d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41 q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39zM1536 -96v64q0 14 -9 23t-23 9h-1472 q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 160v192 q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192 q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1664 1248v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113 z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5q0 26 19 45t45 19v320q0 8 -0.5 35t0 38 t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33 q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5 t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960z" /> | ||||
| <glyph unicode="" d="M678 -57q0 -38 -10 -71h-380q-95 0 -171.5 56.5t-103.5 147.5q24 45 69 77.5t100 49.5t107 24t107 7q32 0 49 -2q6 -4 30.5 -21t33 -23t31 -23t32 -25.5t27.5 -25.5t26.5 -29.5t21 -30.5t17.5 -34.5t9.5 -36t4.5 -40.5zM385 294q-234 -7 -385 -85v433q103 -118 273 -118 q32 0 70 5q-21 -61 -21 -86q0 -67 63 -149zM558 805q0 -100 -43.5 -160.5t-140.5 -60.5q-51 0 -97 26t-78 67.5t-56 93.5t-35.5 104t-11.5 99q0 96 51.5 165t144.5 69q66 0 119 -41t84 -104t47 -130t16 -128zM1536 896v-736q0 -119 -84.5 -203.5t-203.5 -84.5h-468 q39 73 39 157q0 66 -22 122.5t-55.5 93t-72 71t-72 59.5t-55.5 54.5t-22 59.5q0 36 23 68t56 61.5t65.5 64.5t55.5 93t23 131t-26.5 145.5t-75.5 118.5q-6 6 -14 11t-12.5 7.5t-10 9.5t-10.5 17h135l135 64h-437q-138 0 -244.5 -38.5t-182.5 -133.5q0 126 81 213t207 87h960 q119 0 203.5 -84.5t84.5 -203.5v-96h-256v256h-128v-256h-256v-128h256v-256h128v256h256z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M876 71q0 21 -4.5 40.5t-9.5 36t-17.5 34.5t-21 30.5t-26.5 29.5t-27.5 25.5t-32 25.5t-31 23t-33 23t-30.5 21q-17 2 -50 2q-54 0 -106 -7t-108 -25t-98 -46t-69 -75t-27 -107q0 -68 35.5 -121.5t93 -84t120.5 -45.5t127 -15q59 0 112.5 12.5t100.5 39t74.5 73.5 t27.5 110zM756 933q0 60 -16.5 127.5t-47 130.5t-84 104t-119.5 41q-93 0 -144 -69t-51 -165q0 -47 11.5 -99t35.5 -104t56 -93.5t78 -67.5t97 -26q97 0 140.5 60.5t43.5 160.5zM625 1408h437l-135 -79h-135q71 -45 110 -126t39 -169q0 -74 -23 -131.5t-56 -92.5t-66 -64.5 t-56 -61t-23 -67.5q0 -26 16.5 -51t43 -48t58.5 -48t64 -55.5t58.5 -66t43 -85t16.5 -106.5q0 -160 -140 -282q-152 -131 -420 -131q-59 0 -119.5 10t-122 33.5t-108.5 58t-77 89t-30 121.5q0 61 37 135q32 64 96 110.5t145 71t155 36t150 13.5q-64 83 -64 149q0 12 2 23.5 t5 19.5t8 21.5t7 21.5q-40 -5 -70 -5q-149 0 -255.5 98t-106.5 246q0 140 95 250.5t234 141.5q94 20 187 20zM1664 1152v-128h-256v-256h-128v256h-256v128h256v256h128v-256h256z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t118.5 52h1472q65 0 112.5 -47t47.5 -113z" /> | ||||
| <glyph unicode="" d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" /> | ||||
| <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14 q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28 q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 1024q0 53 -37.5 90.5 t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1472 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 384q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29 q-141 221 -141 483q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 174 120 321.5 t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224q0 139 94 257t256.5 186.5 t353.5 68.5zM1526 111q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129 q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="896" d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68 z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69 q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7zM896 1408v-98q-42 2 -64 2t-64 -2v98q0 26 19 45t45 19t45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47 q0 -46 -31.5 -71t-77.5 -25q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268q0 99 44.5 184.5t117 142t164 89t186.5 32.5 t186.5 -32.5t164 -89t117 -142t44.5 -184.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68 t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5t81 -103t47.5 -132.5t24 -138t5.5 -131zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 t271.5 -112.5t112.5 -271.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252 t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 t66 -158z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1664 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5 q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1024 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1024 608v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280z M768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1536h-1152v-1536h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM1408 1472v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1152h-256v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM896 1056v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5 t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1408 1088v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1280q0 26 19 45t45 19h320 v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM1920 1344v-1152 q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128q-26 0 -45 19t-19 45t19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 158 -66t66 -158z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" /> | ||||
| <glyph unicode="" d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> | ||||
| <glyph unicode="" d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136z" /> | ||||
| <glyph unicode="" horiz-adv-x="1568" d="M496 192q0 -60 -42.5 -102t-101.5 -42q-60 0 -102 42t-42 102t42 102t102 42q59 0 101.5 -42t42.5 -102zM928 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -66 -47 -113t-113 -47t-113 47t-47 113 t47 113t113 47t113 -47t47 -113zM1360 192q0 -46 -33 -79t-79 -33t-79 33t-33 79t33 79t79 33t79 -33t33 -79zM528 1088q0 -73 -51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5t51.5 124.5t124.5 51.5t124.5 -51.5t51.5 -124.5zM992 1280q0 -80 -56 -136t-136 -56 t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1536 640q0 -40 -28 -68t-68 -28t-68 28t-28 68t28 68t68 28t68 -28t28 -68zM1328 1088q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5z" /> | ||||
| <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" /> | ||||
| <glyph unicode="" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150 t-150 362t150 362t362 150h896q212 0 362 -150t150 -362z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1024 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16 h96q16 0 16 -16zM896 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1280 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1152 880v-96 q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 880v-352q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h112v240q0 16 16 16h96q16 0 16 -16zM1792 128v896h-1664v-896 h1664zM1920 1024v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102 q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266 q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8 q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 -173 169 -509z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497 q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M439 265l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204zM1031 1044l-239 -18 l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56zM1664 960q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9 t-9 23t9 23t23 9h320q14 0 23 -9t9 -23zM1120 1504v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM1527 1353l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="640" d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" /> | ||||
| <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1534 846v-206h-514l-3 27 q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5t-65.5 -51.5t-30.5 -63h232v80 h126z" /> | ||||
| <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1536 -50v-206h-514l-4 27 q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73h232v80h126z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89 q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117 q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23 t-10 -23zM1005 1325l-621 -621v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" /> | ||||
| <glyph unicode="" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192q26 0 45 -19 t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1152" d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 v-320h736z" /> | ||||
| <glyph unicode="" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="384" d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> | ||||
| <glyph unicode="" d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10 t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 q16 -8 32 -8q17 0 32 9z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" /> | ||||
| <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" /> | ||||
| <glyph unicode="" d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26 l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5l12 3l5 2q13 5 26 -2q12 -7 15 -21z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" /> | ||||
| <glyph unicode="" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1024 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1024 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28 t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162 l230 -662h70z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248 v119h121z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1216 1504v-192q0 -14 -9 -23t-23 -9h-256 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1792 1504v-192q0 -14 -9 -23t-23 -9h-832 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5 t82 -252.5zM1456 882v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165z" /> | ||||
| <glyph unicode="" d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13 q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24 2 76 59t101 121q68 87 101 120q18 18 31 48t17.5 48.5 t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121 t-76 59q-25 2 -43 20.5t-18 43.5v641q0 26 19 44.5t45 19.5q35 1 158 44q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76z" /> | ||||
| <glyph unicode="" d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86v129q0 59 20 86q29 38 80 38t78 -38 q21 -28 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68zM785 1079v-156q0 -51 -32 -51t-32 51v156q0 52 32 52t32 -52zM1318 366q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5 q-20 -87 -20 -260q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73q20 84 20 260zM563 1017l90 296h-75l-51 -195l-53 195h-78l24 -69t23 -69q35 -103 46 -158v-201h74v201zM852 936v130q0 58 -21 87q-29 38 -78 38q-51 0 -78 -38 q-21 -29 -21 -87v-130q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87zM1033 816h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293q0 -37 6 -55q11 -27 43 -27q36 0 77 45v-40zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51 q-28 -37 -28 -116v-173q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54q2 9 2 58zM790 1011v210q0 69 -43 69t-43 -69v-210q0 -70 43 -70t43 70zM1509 260q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99 q-26 112 -26 350q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350zM511 1536h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187h106l71 -263zM881 1203v-175q0 -81 -28 -118q-37 -51 -106 -51q-67 0 -105 51 q-28 38 -28 118v175q0 80 28 117q38 51 105 51q69 0 106 -51q28 -37 28 -117zM1216 1365v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 q25 45 64 45h241q22 0 31 -15z" /> | ||||
| <glyph unicode="" d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1280 640q0 37 -30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54zM1792 640q0 -96 -1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58t-69.5 123 q-14 65 -21.5 147.5t-8.5 136.5t-1 150t1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M928 135v-151l-707 -1v151zM1169 481v-701l-1 -35v-1h-1132l-35 1h-1v736h121v-618h928v618h120zM241 393l704 -65l-13 -150l-705 65zM309 709l683 -183l-39 -146l-683 183zM472 1058l609 -360l-77 -130l-609 360zM832 1389l398 -585l-124 -85l-399 584zM1285 1536 l121 -697l-149 -26l-121 697z" /> | ||||
| <glyph unicode="" d="M1362 110v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5zM1078 643q0 124 -90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5 t90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5zM1362 1003v165q0 28 -20 48.5t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165q0 -29 20 -49t49 -20h174q29 0 49 20t20 49zM1536 1211v-1142q0 -81 -58 -139t-139 -58h-1142q-81 0 -139 58t-58 139v1142q0 81 58 139 t139 58h1142q81 0 139 -58t58 -139z" /> | ||||
| <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" /> | ||||
| <glyph unicode="" d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18 t-76.5 27t-73 43.5t-52 61.5q-25 96 -57 292l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37zM1403 1166q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34t-6 39.5 t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54z" /> | ||||
| <glyph unicode="" d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71z M1272 1020q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63q24 13 39.5 23t31 29t19.5 40q48 267 80 473zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M390 1408h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170v217q91 30 155 84q64 55 103 132q39 78 54 196z " /> | ||||
| <glyph unicode="" d="M1123 127v181q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" /> | ||||
| <glyph unicode="" horiz-adv-x="768" d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" /> | ||||
| <glyph unicode="" horiz-adv-x="1408" d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78v666h918zM931 1255q107 -55 171 -153.5t64 -215.5 h-925q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20zM1408 767v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5z" /> | ||||
| <glyph unicode="" d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18l-4 -5q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54t7 -70.5q46 24 7 92 q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5t60 -22.5zM626 1152 q3 17 -2.5 30t-11.5 15q-9 2 -9 -7q2 -5 5 -6q10 0 7 -15q-3 -20 8 -20q3 0 3 3zM1045 955q-2 8 -6.5 11.5t-13 5t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20zM867 1168q0 11 -5 19.5t-11 12.5t-9 3q-14 -1 -7 -7l4 -2 q14 -4 18 -31q0 -3 8 2zM921 1401q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9t3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7zM1486 60q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5t-30 -18.5 t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43q-19 4 -51 9.5 t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49t-14 -48 q3 -17 37 -26q20 -6 84.5 -18.5t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54q110 143 124 195 q-12 112 -16 310q-2 90 24 151.5t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5t-40.5 -33.5t-61 -14 q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5t15.5 47.5q1 -31 8 -56.5 t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13t16.5 -9.5z" /> | ||||
| <glyph unicode="" d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q-185 164 -433 164q-76 0 -155 -19 q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5zM1424 647q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5q25 -53 44 -95q2 -6 6.5 -17.5t7.5 -16.5q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5t36.5 -6 t25 -4.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 -92 122 -157.5t291 -65.5 q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5q0 73 16 150q-80 104 -80 234q0 159 112.5 271.5t271.5 112.5q130 0 234 -80 q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1483 512l-587 -587q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128t53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41 l619 619l2 -3q53 -53 53 -128t-53 -128zM1406 1138l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15v0q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36t14 35l121 120q13 15 35 15t36 -15l252 -252l574 575q15 15 36 15t36 -15z" /> | ||||
| <glyph unicode="" d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1280" d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1024" d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> | ||||
| <glyph unicode="" d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94 q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29z" /> | ||||
| <glyph unicode="" d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" /> | ||||
| <glyph unicode="" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" /> | ||||
| <glyph unicode="" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" /> | ||||
| <glyph unicode="" horiz-adv-x="1792" /> | ||||
| </font> | ||||
| </defs></svg>  | ||||
| After Width: | Height: | Size: 193 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| $( document ).ready(function() { | ||||
|   // Shift nav in mobile when clicking the menu. | ||||
|   $("[data-toggle='wy-nav-top']").click(function() { | ||||
|     $("[data-toggle='wy-nav-shift']").toggleClass("shift"); | ||||
|     $("[data-toggle='rst-versions']").toggleClass("shift"); | ||||
|   }); | ||||
|   // Close menu when you click a link. | ||||
|   $(".wy-menu-vertical .current ul li a").click(function() { | ||||
|     $("[data-toggle='wy-nav-shift']").removeClass("shift"); | ||||
|     $("[data-toggle='rst-versions']").toggleClass("shift"); | ||||
|   }); | ||||
|   $("[data-toggle='rst-current-version']").click(function() { | ||||
|     $("[data-toggle='rst-versions']").toggleClass("shift-up"); | ||||
|   }); | ||||
|   $("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>"); | ||||
| }); | ||||
							
								
								
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| [theme] | ||||
| inherit = basic | ||||
| stylesheet = css/theme.css | ||||
|  | ||||
| [options] | ||||
| typekit_id = hiw1hhg | ||||
| analytics_id = | ||||
| canonical_url = | ||||
							
								
								
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| {% if READTHEDOCS %} | ||||
| {# Add rst-badge after rst-versions for small badge style. #} | ||||
|   <div class="rst-versions" data-toggle="rst-versions"> | ||||
|     <span class="rst-current-version" data-toggle="rst-current-version"> | ||||
|       <span class="icon icon-book"> Read the Docs</span> | ||||
|       v: {{ current_version }}  | ||||
|       <span class="icon icon-caret-down"></span> | ||||
|     </span> | ||||
|     <div class="rst-other-versions"> | ||||
|       <dl> | ||||
|         <dt>Versions</dt> | ||||
|         {% for slug, url in versions %} | ||||
|           <dd><a href="{{ url }}">{{ slug }}</a></dd> | ||||
|         {% endfor %} | ||||
|       </dl> | ||||
|       <dl> | ||||
|         <dt>Downloads</dt> | ||||
|         {% for type, url in downloads %} | ||||
|           <dd><a href="{{ url }}">{{ type }}</a></dd> | ||||
|         {% endfor %} | ||||
|       </dl> | ||||
|       <dl> | ||||
|         <dt>On Read the Docs</dt> | ||||
|           <dd> | ||||
|             <a href="//{{ PRODUCTION_DOMAIN }}/projects/{{ slug }}/?fromdocs={{ slug }}">Project Home</a> | ||||
|           </dd> | ||||
|           <dd> | ||||
|             <a href="//{{ PRODUCTION_DOMAIN }}/builds/{{ slug }}/?fromdocs={{ slug }}">Builds</a> | ||||
|           </dd> | ||||
|       </dl> | ||||
|       <hr/> | ||||
|       Free document hosting provided by <a href="http://www.readthedocs.org">Read the Docs</a>. | ||||
|  | ||||
|     </div> | ||||
|   </div> | ||||
| {% endif %} | ||||
|  | ||||
| @@ -34,41 +34,72 @@ Documents | ||||
| .. autoclass:: mongoengine.ValidationError | ||||
|   :members: | ||||
|  | ||||
| Context Managers | ||||
| ================ | ||||
|  | ||||
| .. autoclass:: mongoengine.context_managers.switch_db | ||||
| .. autoclass:: mongoengine.context_managers.switch_collection | ||||
| .. autoclass:: mongoengine.context_managers.no_dereference | ||||
| .. autoclass:: mongoengine.context_managers.query_counter | ||||
|  | ||||
| Querying | ||||
| ======== | ||||
|  | ||||
| .. autoclass:: mongoengine.queryset.QuerySet | ||||
|    :members: | ||||
| .. automodule:: mongoengine.queryset | ||||
|     :synopsis: Queryset level operations | ||||
|  | ||||
|    .. automethod:: mongoengine.queryset.QuerySet.__call__ | ||||
|     .. autoclass:: mongoengine.queryset.QuerySet | ||||
|       :members: | ||||
|       :inherited-members: | ||||
|  | ||||
| .. autofunction:: mongoengine.queryset.queryset_manager | ||||
|       .. automethod:: QuerySet.__call__ | ||||
|  | ||||
|     .. autoclass:: mongoengine.queryset.QuerySetNoCache | ||||
|       :members: | ||||
|  | ||||
|        .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ | ||||
|  | ||||
|     .. autofunction:: mongoengine.queryset.queryset_manager | ||||
|  | ||||
| Fields | ||||
| ====== | ||||
|  | ||||
| .. autoclass:: mongoengine.BinaryField | ||||
| .. autoclass:: mongoengine.BooleanField | ||||
| .. autoclass:: mongoengine.ComplexDateTimeField | ||||
| .. autoclass:: mongoengine.DateTimeField | ||||
| .. autoclass:: mongoengine.DecimalField | ||||
| .. autoclass:: mongoengine.DictField | ||||
| .. autoclass:: mongoengine.DynamicField | ||||
| .. autoclass:: mongoengine.EmailField | ||||
| .. autoclass:: mongoengine.EmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.FileField | ||||
| .. autoclass:: mongoengine.FloatField | ||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.GenericReferenceField | ||||
| .. autoclass:: mongoengine.GeoPointField | ||||
| .. autoclass:: mongoengine.ImageField | ||||
| .. autoclass:: mongoengine.IntField | ||||
| .. autoclass:: mongoengine.ListField | ||||
| .. autoclass:: mongoengine.MapField | ||||
| .. autoclass:: mongoengine.ObjectIdField | ||||
| .. autoclass:: mongoengine.ReferenceField | ||||
| .. autoclass:: mongoengine.SequenceField | ||||
| .. autoclass:: mongoengine.SortedListField | ||||
| .. autoclass:: mongoengine.StringField | ||||
| .. autoclass:: mongoengine.URLField | ||||
| .. autoclass:: mongoengine.UUIDField | ||||
| .. autoclass:: mongoengine.base.fields.BaseField | ||||
| .. autoclass:: mongoengine.fields.StringField | ||||
| .. autoclass:: mongoengine.fields.URLField | ||||
| .. autoclass:: mongoengine.fields.EmailField | ||||
| .. autoclass:: mongoengine.fields.IntField | ||||
| .. autoclass:: mongoengine.fields.LongField | ||||
| .. autoclass:: mongoengine.fields.FloatField | ||||
| .. autoclass:: mongoengine.fields.DecimalField | ||||
| .. autoclass:: mongoengine.fields.BooleanField | ||||
| .. autoclass:: mongoengine.fields.DateTimeField | ||||
| .. autoclass:: mongoengine.fields.ComplexDateTimeField | ||||
| .. autoclass:: mongoengine.fields.EmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.fields.DynamicField | ||||
| .. autoclass:: mongoengine.fields.ListField | ||||
| .. autoclass:: mongoengine.fields.SortedListField | ||||
| .. autoclass:: mongoengine.fields.DictField | ||||
| .. autoclass:: mongoengine.fields.MapField | ||||
| .. autoclass:: mongoengine.fields.ReferenceField | ||||
| .. autoclass:: mongoengine.fields.GenericReferenceField | ||||
| .. autoclass:: mongoengine.fields.BinaryField | ||||
| .. autoclass:: mongoengine.fields.FileField | ||||
| .. autoclass:: mongoengine.fields.ImageField | ||||
| .. autoclass:: mongoengine.fields.SequenceField | ||||
| .. autoclass:: mongoengine.fields.ObjectIdField | ||||
| .. autoclass:: mongoengine.fields.UUIDField | ||||
| .. autoclass:: mongoengine.fields.GeoPointField | ||||
| .. autoclass:: mongoengine.fields.PointField | ||||
| .. autoclass:: mongoengine.fields.LineStringField | ||||
| .. autoclass:: mongoengine.fields.PolygonField | ||||
| .. autoclass:: mongoengine.fields.GridFSError | ||||
| .. autoclass:: mongoengine.fields.GridFSProxy | ||||
| .. autoclass:: mongoengine.fields.ImageGridFsProxy | ||||
| .. autoclass:: mongoengine.fields.ImproperlyConfigured | ||||
|  | ||||
| Misc | ||||
| ==== | ||||
|  | ||||
| .. autofunction:: mongoengine.common._import_class | ||||
|   | ||||
| @@ -2,6 +2,200 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Changes in 0.8.7 | ||||
| ================ | ||||
| - Calling reload on deleted / nonexistant documents raises DoesNotExist (#538) | ||||
| - Stop ensure_indexes running on a secondaries (#555) | ||||
| - Fix circular import issue with django auth (#531) (#545) | ||||
|  | ||||
| Changes in 0.8.6 | ||||
| ================ | ||||
| - Fix django auth import (#531) | ||||
|  | ||||
| Changes in 0.8.5 | ||||
| ================ | ||||
| - Fix multi level nested fields getting marked as changed (#523) | ||||
| - Django 1.6 login fix (#522) (#527) | ||||
| - Django 1.6 session fix (#509) | ||||
| - EmbeddedDocument._instance is now set when settng the attribute (#506) | ||||
| - Fixed EmbeddedDocument with ReferenceField equality issue (#502) | ||||
| - Fixed GenericReferenceField serialization order (#499) | ||||
| - Fixed count and none bug (#498) | ||||
| - Fixed bug with .only() and DictField with digit keys (#496) | ||||
| - Added user_permissions to Django User object (#491, #492) | ||||
| - Fix updating Geo Location fields (#488) | ||||
| - Fix handling invalid dict field value (#485) | ||||
| - Added app_label to MongoUser (#484) | ||||
| - Use defaults when host and port are passed as None (#483) | ||||
| - Fixed distinct casting issue with ListField of EmbeddedDocuments (#470) | ||||
| - Fixed Django 1.6 sessions (#454, #480) | ||||
|  | ||||
| Changes in 0.8.4 | ||||
| ================ | ||||
| - Remove database name necessity in uri connection schema (#452) | ||||
| - Fixed "$pull" semantics for nested ListFields (#447) | ||||
| - Allow fields to be named the same as query operators (#445) | ||||
| - Updated field filter logic - can now exclude subclass fields (#443) | ||||
| - Fixed dereference issue with embedded listfield referencefields (#439) | ||||
| - Fixed slice when using inheritance causing fields to be excluded (#437) | ||||
| - Fixed ._get_db() attribute after a Document.switch_db() (#441) | ||||
| - Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) | ||||
| - Handle dynamic fieldnames that look like digits (#434) | ||||
| - Added get_user_document and improve mongo_auth module (#423) | ||||
| - Added str representation of GridFSProxy (#424) | ||||
| - Update transform to handle docs erroneously passed to unset (#416) | ||||
| - Fixed indexing - turn off _cls (#414) | ||||
| - Fixed dereference threading issue in ComplexField.__get__ (#412) | ||||
| - Fixed QuerySetNoCache.count() caching (#410) | ||||
| - Don't follow references in _get_changed_fields (#422, #417) | ||||
| - Allow args and kwargs to be passed through to_json (#420) | ||||
|  | ||||
| Changes in 0.8.3 | ||||
| ================ | ||||
| - Fixed EmbeddedDocuments with `id` also storing `_id` (#402) | ||||
| - Added get_proxy_object helper to filefields (#391) | ||||
| - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | ||||
| - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | ||||
| - Fixed as_pymongo to return the id (#386) | ||||
| - Document.select_related() now respects `db_alias` (#377) | ||||
| - Reload uses shard_key if applicable (#384) | ||||
| - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||
|  | ||||
|   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 | ||||
|  | ||||
| - Fixed pickling dynamic documents `_dynamic_fields` (#387) | ||||
| - Fixed ListField setslice and delslice dirty tracking (#390) | ||||
| - Added Django 1.5 PY3 support (#392) | ||||
| - Added match ($elemMatch) support for EmbeddedDocuments (#379) | ||||
| - Fixed weakref being valid after reload (#374) | ||||
| - Fixed queryset.get() respecting no_dereference (#373) | ||||
| - Added full_result kwarg to update (#380) | ||||
|  | ||||
|  | ||||
|  | ||||
| Changes in 0.8.2 | ||||
| ================ | ||||
| - Added compare_indexes helper (#361) | ||||
| - Fixed cascading saves which weren't turned off as planned (#291) | ||||
| - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) | ||||
| - Improved cascading saves write performance (#361) | ||||
| - Fixed ambiguity and differing behaviour regarding field defaults (#349) | ||||
| - ImageFields now include PIL error messages if invalid error (#353) | ||||
| - Added lock when calling doc.Delete() for when signals have no sender (#350) | ||||
| - Reload forces read preference to be PRIMARY (#355) | ||||
| - Querysets are now lest restrictive when querying duplicate fields (#332, #333) | ||||
| - FileField now honouring db_alias (#341) | ||||
| - Removed customised __set__ change tracking in ComplexBaseField (#344) | ||||
| - Removed unused var in _get_changed_fields (#347) | ||||
| - Added pre_save_post_validation signal (#345) | ||||
| - DateTimeField now auto converts valid datetime isostrings into dates (#343) | ||||
| - DateTimeField now uses dateutil for parsing if available (#343) | ||||
| - Fixed Doc.objects(read_preference=X) not setting read preference (#352) | ||||
| - Django session ttl index expiry fixed (#329) | ||||
| - Fixed pickle.loads (#342) | ||||
| - Documentation fixes | ||||
|  | ||||
| Changes in 0.8.1 | ||||
| ================ | ||||
| - Fixed Python 2.6 django auth importlib issue (#326) | ||||
| - Fixed pickle unsaved document regression (#327) | ||||
|  | ||||
| Changes in 0.8.0 | ||||
| ================ | ||||
| - Fixed querying ReferenceField custom_id (#317) | ||||
| - Fixed pickle issues with collections (#316) | ||||
| - Added `get_next_value` preview for SequenceFields (#319) | ||||
| - Added no_sub_classes context manager and queryset helper (#312) | ||||
| - Querysets now utilises a local cache | ||||
| - Changed __len__ behavour in the queryset (#247, #311) | ||||
| - Fixed querying string versions of ObjectIds issue with ReferenceField (#307) | ||||
| - Added $setOnInsert support for upserts (#308) | ||||
| - Upserts now possible with just query parameters (#309) | ||||
| - Upserting is the only way to ensure docs are saved correctly (#306) | ||||
| - Fixed register_delete_rule inheritance issue | ||||
| - Fix cloning of sliced querysets (#303) | ||||
| - Fixed update_one write concern (#302) | ||||
| - Updated minimum requirement for pymongo to 2.5 | ||||
| - Add support for new geojson fields, indexes and queries (#299) | ||||
| - If values cant be compared mark as changed (#287) | ||||
| - Ensure as_pymongo() and to_json honour only() and exclude() (#293) | ||||
| - Document serialization uses field order to ensure a strict order is set (#296) | ||||
| - DecimalField now stores as float not string (#289) | ||||
| - UUIDField now stores as a binary by default (#292) | ||||
| - Added Custom User Model for Django 1.5 (#285) | ||||
| - Cascading saves now default to off (#291) | ||||
| - ReferenceField now store ObjectId's by default rather than DBRef (#290) | ||||
| - Added ImageField support for inline replacements (#86) | ||||
| - Added SequenceField.set_next_value(value) helper (#159) | ||||
| - Updated .only() behaviour - now like exclude it is chainable (#202) | ||||
| - Added with_limit_and_skip support to count() (#235) | ||||
| - Objects queryset manager now inherited (#256) | ||||
| - Updated connection to use MongoClient (#262, #274) | ||||
| - Fixed db_alias and inherited Documents (#143) | ||||
| - Documentation update for document errors (#124) | ||||
| - Deprecated `get_or_create` (#35) | ||||
| - Updated inheritable objects created by upsert now contain _cls (#118) | ||||
| - Added support for creating documents with embedded documents in a single operation (#6) | ||||
| - Added to_json and from_json to Document (#1) | ||||
| - Added to_json and from_json to QuerySet (#131) | ||||
| - Updated index creation now tied to Document class (#102) | ||||
| - Added none() to queryset (#127) | ||||
| - Updated SequenceFields to allow post processing of the calculated counter value (#141) | ||||
| - Added clean method to documents for pre validation data cleaning (#60) | ||||
| - Added support setting for read prefrence at a query level (#157) | ||||
| - Added _instance to EmbeddedDocuments pointing to the parent (#139) | ||||
| - Inheritance is off by default (#122) | ||||
| - Remove _types and just use _cls for inheritance (#148) | ||||
| - Only allow QNode instances to be passed as query objects (#199) | ||||
| - Dynamic fields are now validated on save (#153) (#154) | ||||
| - Added support for multiple slices and made slicing chainable. (#170) (#190) (#191) | ||||
| - Fixed GridFSProxy __getattr__ behaviour (#196) | ||||
| - Fix Django timezone support (#151) | ||||
| - Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171) | ||||
| - FileFields now copyable (#198) | ||||
| - Querysets now return clones and are no longer edit in place (#56) | ||||
| - Added support for $maxDistance (#179) | ||||
| - Uses getlasterror to test created on updated saves (#163) | ||||
| - Fixed inheritance and unique index creation (#140) | ||||
| - Fixed reverse delete rule with inheritance (#197) | ||||
| - Fixed validation for GenericReferences which havent been dereferenced | ||||
| - Added switch_db context manager (#106) | ||||
| - Added switch_db method to document instances (#106) | ||||
| - Added no_dereference context manager (#82) (#61) | ||||
| - Added switch_collection context manager (#220) | ||||
| - Added switch_collection method to document instances (#220) | ||||
| - Added support for compound primary keys (#149) (#121) | ||||
| - Fixed overriding objects with custom manager (#58) | ||||
| - Added no_dereference method for querysets (#82) (#61) | ||||
| - Undefined data should not override instance methods (#49) | ||||
| - Added Django Group and Permission (#142) | ||||
| - Added Doc class and pk to Validation messages (#69) | ||||
| - Fixed Documents deleted via a queryset don't call any signals (#105) | ||||
| - Added the "get_decoded" method to the MongoSession class (#216) | ||||
| - Fixed invalid choices error bubbling (#214) | ||||
| - Updated Save so it calls $set and $unset in a single operation (#211) | ||||
| - Fixed inner queryset looping (#204) | ||||
|  | ||||
| Changes in 0.7.10 | ||||
| ================= | ||||
| - Fix UnicodeEncodeError for dbref (#278) | ||||
| - Allow construction using positional parameters (#268) | ||||
| - Updated EmailField length to support long domains (#243) | ||||
| - Added 64-bit integer support (#251) | ||||
| - Added Django sessions TTL support (#224) | ||||
| - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) | ||||
| - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) | ||||
| - Added "id" back to _data dictionary (#255) | ||||
| - Only mark a field as changed if the value has changed (#258) | ||||
| - Explicitly check for Document instances when dereferencing (#261) | ||||
| - Fixed order_by chaining issue (#265) | ||||
| - Added dereference support for tuples (#250) | ||||
| - Resolve field name to db field name when using distinct(#260, #264, #269) | ||||
| - Added kwargs to doc.save to help interop with django (#223, #270) | ||||
| - Fixed cloning querysets in PY3 | ||||
| - Int fields no longer unset in save when changed to 0 (#272) | ||||
| - Fixed ReferenceField query chaining bug fixed (#254) | ||||
|  | ||||
| Changes in 0.7.9 | ||||
| ================ | ||||
| - Better fix handling for old style _types | ||||
| @@ -9,12 +203,12 @@ Changes in 0.7.9 | ||||
|  | ||||
| Changes in 0.7.8 | ||||
| ================ | ||||
| - Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) | ||||
| - Fix query chaining with .order_by() (MongoEngine/mongoengine#176) | ||||
| - Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) | ||||
| - Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) | ||||
| - Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) | ||||
| - Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) | ||||
| - Fix sequence fields in embedded documents (#166) | ||||
| - Fix query chaining with .order_by() (#176) | ||||
| - Added optional encoding and collection config for Django sessions (#180, #181, #183) | ||||
| - Fixed EmailField so can add extra validation (#173, #174, #187) | ||||
| - Fixed bulk inserts can now handle custom pk's (#192) | ||||
| - Added as_pymongo method to return raw or cast results from pymongo (#193) | ||||
|  | ||||
| Changes in 0.7.7 | ||||
| ================ | ||||
| @@ -22,70 +216,70 @@ Changes in 0.7.7 | ||||
|  | ||||
| Changes in 0.7.6 | ||||
| ================ | ||||
| - Unicode fix for repr (MongoEngine/mongoengine#133) | ||||
| - Allow updates with match operators (MongoEngine/mongoengine#144) | ||||
| - Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136) | ||||
| - Unicode fix for repr (#133) | ||||
| - Allow updates with match operators (#144) | ||||
| - Updated URLField - now can have a override the regex (#136) | ||||
| - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573) | ||||
| - Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) | ||||
| - Fixed reload issue with ReferenceField where dbref=False (#138) | ||||
|  | ||||
| Changes in 0.7.5 | ||||
| ================ | ||||
| - ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134) | ||||
|   See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134) | ||||
| - ReferenceFields with dbref=False use ObjectId instead of strings (#134) | ||||
|   See ticket for upgrade notes (#134) | ||||
|  | ||||
| Changes in 0.7.4 | ||||
| ================ | ||||
| - Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125) | ||||
| - Fixed index inheritance issues - firmed up testcases (#123) (#125) | ||||
|  | ||||
| Changes in 0.7.3 | ||||
| ================ | ||||
| - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119) | ||||
| - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119) | ||||
|  | ||||
| Changes in 0.7.2 | ||||
| ================ | ||||
| - Update index spec generation so its not destructive (MongoEngine/mongoengine#113) | ||||
| - Update index spec generation so its not destructive (#113) | ||||
|  | ||||
| Changes in 0.7.1 | ||||
| ================= | ||||
| - Fixed index spec inheritance (MongoEngine/mongoengine#111) | ||||
| - Fixed index spec inheritance (#111) | ||||
|  | ||||
| Changes in 0.7.0 | ||||
| ================= | ||||
| - Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) | ||||
| - Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) | ||||
| - Fixed Q object merge edge case (MongoEngine/mongoengine#109) | ||||
| - Updated queryset.delete so you can use with skip / limit (#107) | ||||
| - Updated index creation allows kwargs to be passed through refs (#104) | ||||
| - Fixed Q object merge edge case (#109) | ||||
| - Fixed reloading on sharded documents (hmarr/mongoengine#569) | ||||
| - Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62) | ||||
| - Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92) | ||||
| - Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88) | ||||
| - Added NotUniqueError for duplicate keys (#62) | ||||
| - Added custom collection / sequence naming for SequenceFields (#92) | ||||
| - Fixed UnboundLocalError in composite index with pk field (#88) | ||||
| - Updated ReferenceField's to optionally store ObjectId strings | ||||
|   this will become the default in 0.8 (MongoEngine/mongoengine#89) | ||||
|   this will become the default in 0.8 (#89) | ||||
| - Added FutureWarning - save will default to `cascade=False` in 0.8 | ||||
| - Added example of indexing embedded document fields (MongoEngine/mongoengine#75) | ||||
| - Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80) | ||||
| - Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) | ||||
| - Added example of indexing embedded document fields (#75) | ||||
| - Fixed ImageField resizing when forcing size (#80) | ||||
| - Add flexibility for fields handling bad data (#78) | ||||
| - Embedded Documents no longer handle meta definitions | ||||
| - Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) | ||||
| - Use weakref proxies in base lists / dicts (#74) | ||||
| - Improved queryset filtering (hmarr/mongoengine#554) | ||||
| - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) | ||||
| - Fixed abstract classes and shard keys (MongoEngine/mongoengine#64) | ||||
| - Fixed abstract classes and shard keys (#64) | ||||
| - Fixed Python 2.5 support | ||||
| - Added Python 3 support (thanks to Laine Heron) | ||||
|  | ||||
| Changes in 0.6.20 | ||||
| ================= | ||||
| - Added support for distinct and db_alias (MongoEngine/mongoengine#59) | ||||
| - Added support for distinct and db_alias (#59) | ||||
| - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) | ||||
| - Fixed BinaryField lookup re (MongoEngine/mongoengine#48) | ||||
| - Fixed BinaryField lookup re (#48) | ||||
|  | ||||
| Changes in 0.6.19 | ||||
| ================= | ||||
|  | ||||
| - Added Binary support to UUID (MongoEngine/mongoengine#47) | ||||
| - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) | ||||
| - Fixed BinaryField python value issue (MongoEngine/mongoengine#48) | ||||
| - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) | ||||
| - Fixed queryset manager issue (MongoEngine/mongoengine#52) | ||||
| - Added Binary support to UUID (#47) | ||||
| - Fixed MapField lookup for fields without declared lookups (#46) | ||||
| - Fixed BinaryField python value issue (#48) | ||||
| - Fixed SequenceField non numeric value lookup (#41) | ||||
| - Fixed queryset manager issue (#52) | ||||
| - Fixed FileField comparision (hmarr/mongoengine#547) | ||||
|  | ||||
| Changes in 0.6.18 | ||||
|   | ||||
| @@ -45,7 +45,7 @@ print 'ALL POSTS' | ||||
| print | ||||
| for post in Post.objects: | ||||
|     print post.title | ||||
|     print '=' * len(post.title) | ||||
|     print '=' * post.title.count() | ||||
|  | ||||
|     if isinstance(post, TextPost): | ||||
|         print post.content | ||||
|   | ||||
							
								
								
									
										24
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										24
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -16,7 +16,7 @@ import sys, os | ||||
| # If extensions (or modules to document with autodoc) are in another directory, | ||||
| # add these directories to sys.path here. If the directory is relative to the | ||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||
| sys.path.append(os.path.abspath('..')) | ||||
| sys.path.insert(0, os.path.abspath('..')) | ||||
|  | ||||
| # -- General configuration ----------------------------------------------------- | ||||
|  | ||||
| @@ -38,7 +38,7 @@ master_doc = 'index' | ||||
|  | ||||
| # General information about the project. | ||||
| project = u'MongoEngine' | ||||
| copyright = u'2009-2012, MongoEngine Authors' | ||||
| copyright = u'2009, MongoEngine Authors' | ||||
|  | ||||
| # The version info for the project you're documenting, acts as replacement for | ||||
| # |version| and |release|, also used in various other places throughout the | ||||
| @@ -92,7 +92,7 @@ pygments_style = 'sphinx' | ||||
|  | ||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | ||||
| html_theme = 'nature' | ||||
| html_theme = 'sphinx_rtd_theme' | ||||
|  | ||||
| # Theme options are theme-specific and customize the look and feel of a theme | ||||
| # further.  For a list of options available for each theme, see the | ||||
| @@ -116,7 +116,7 @@ html_theme_path = ['_themes'] | ||||
| # The name of an image file (within the static path) to use as favicon of the | ||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||
| # pixels large. | ||||
| #html_favicon = None | ||||
| html_favicon = "favicon.ico" | ||||
|  | ||||
| # Add any paths that contain custom static files (such as style sheets) here, | ||||
| # relative to this directory. They are copied after the builtin static files, | ||||
| @@ -132,7 +132,11 @@ html_theme_path = ['_themes'] | ||||
| html_use_smartypants = True | ||||
|  | ||||
| # Custom sidebar templates, maps document names to template names. | ||||
| #html_sidebars = {} | ||||
| html_sidebars = { | ||||
|     'index': ['globaltoc.html', 'searchbox.html'], | ||||
|     '**': ['localtoc.html', 'relations.html', 'searchbox.html'] | ||||
| } | ||||
|  | ||||
|  | ||||
| # Additional templates that should be rendered to pages, maps page names to | ||||
| # template names. | ||||
| @@ -173,8 +177,8 @@ latex_paper_size = 'a4' | ||||
| # Grouping the document tree into LaTeX files. List of tuples | ||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | ||||
| latex_documents = [ | ||||
|   ('index', 'MongoEngine.tex', u'MongoEngine Documentation', | ||||
|    u'Harry Marr', 'manual'), | ||||
|   ('index', 'MongoEngine.tex', 'MongoEngine Documentation', | ||||
|    'Ross Lawley', 'manual'), | ||||
| ] | ||||
|  | ||||
| # The name of an image file (relative to this directory) to place at the top of | ||||
| @@ -193,3 +197,9 @@ latex_documents = [ | ||||
|  | ||||
| # If false, no module index is generated. | ||||
| #latex_use_modindex = True | ||||
|  | ||||
| autoclass_content = 'both' | ||||
|  | ||||
| html_theme_options = dict( | ||||
|     canonical_url='http://docs.mongoengine.org/en/latest/' | ||||
| ) | ||||
|   | ||||
							
								
								
									
										107
									
								
								docs/django.rst
									
									
									
									
									
								
							
							
						
						
									
										107
									
								
								docs/django.rst
									
									
									
									
									
								
							| @@ -1,8 +1,8 @@ | ||||
| ============================= | ||||
| Using MongoEngine with Django | ||||
| ============================= | ||||
| ============== | ||||
| Django Support | ||||
| ============== | ||||
|  | ||||
| .. note :: Updated to support Django 1.4 | ||||
| .. note:: Updated to support Django 1.5 | ||||
|  | ||||
| Connecting | ||||
| ========== | ||||
| @@ -10,6 +10,16 @@ In your **settings.py** file, ignore the standard database settings (unless you | ||||
| also plan to use the ORM in your project), and instead call | ||||
| :func:`~mongoengine.connect` somewhere in the settings module. | ||||
|  | ||||
| .. note:: | ||||
|    If you are not using another Database backend you may need to add a dummy | ||||
|    database backend to ``settings.py`` eg:: | ||||
|  | ||||
|         DATABASES = { | ||||
|             'default': { | ||||
|                 'ENGINE': 'django.db.backends.dummy' | ||||
|             } | ||||
|         } | ||||
|  | ||||
| Authentication | ||||
| ============== | ||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The | ||||
| @@ -17,9 +27,9 @@ MongoEngine includes a Django authentication backend, which uses MongoDB. The | ||||
| :class:`~mongoengine.Document`, but implements most of the methods and | ||||
| attributes that the standard Django :class:`User` model does - so the two are | ||||
| moderately compatible. Using this backend will allow you to store users in | ||||
| MongoDB but still use many of the Django authentication infrastucture (such as | ||||
| MongoDB but still use many of the Django authentication infrastructure (such as | ||||
| the :func:`login_required` decorator and the :func:`authenticate` function). To | ||||
| enable the MongoEngine auth backend, add the following to you **settings.py** | ||||
| enable the MongoEngine auth backend, add the following to your **settings.py** | ||||
| file:: | ||||
|  | ||||
|     AUTHENTICATION_BACKENDS = ( | ||||
| @@ -32,24 +42,68 @@ The :mod:`~mongoengine.django.auth` module also contains a | ||||
|  | ||||
| .. versionadded:: 0.1.3 | ||||
|  | ||||
| Custom User model | ||||
| ================= | ||||
| Django 1.5 introduced `Custom user Models | ||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_ | ||||
| which can be used as an alternative to the MongoEngine authentication backend. | ||||
|  | ||||
| The main advantage of this option is that other components relying on | ||||
| :mod:`django.contrib.auth` and supporting the new swappable user model are more | ||||
| likely to work. For example, you can use the ``createsuperuser`` management | ||||
| command as usual. | ||||
|  | ||||
| To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'`` | ||||
| in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user | ||||
| user model to use. In your **settings.py** file you will have:: | ||||
|  | ||||
|     INSTALLED_APPS = ( | ||||
|         ... | ||||
|         'django.contrib.auth', | ||||
|         'mongoengine.django.mongo_auth', | ||||
|         ... | ||||
|     ) | ||||
|  | ||||
|     AUTH_USER_MODEL = 'mongo_auth.MongoUser' | ||||
|  | ||||
| An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the | ||||
| :class:`~mongoengine.django.auth.User` class with another class of your choice:: | ||||
|  | ||||
|     MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' | ||||
|  | ||||
| The custom :class:`User` must be a :class:`~mongoengine.Document` class, but | ||||
| otherwise has the same requirements as a standard custom user model, | ||||
| as specified in the `Django Documentation | ||||
| <https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_. | ||||
| In particular, the custom class must define :attr:`USERNAME_FIELD` and | ||||
| :attr:`REQUIRED_FIELDS` attributes. | ||||
|  | ||||
| Sessions | ||||
| ======== | ||||
| Django allows the use of different backend stores for its sessions. MongoEngine | ||||
| provides a MongoDB-based session backend for Django, which allows you to use | ||||
| sessions in you Django application with just MongoDB. To enable the MongoEngine | ||||
| sessions in your Django application with just MongoDB. To enable the MongoEngine | ||||
| session backend, ensure that your settings module has | ||||
| ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the | ||||
| ``MIDDLEWARE_CLASSES`` field  and ``'django.contrib.sessions'`` in your | ||||
| ``INSTALLED_APPS``. From there, all you need to do is add the following line | ||||
| into you settings module:: | ||||
| into your settings module:: | ||||
|  | ||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' | ||||
|     SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer' | ||||
|  | ||||
| Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports  `mongodb TTL | ||||
| <http://docs.mongodb.org/manual/tutorial/expire-data/>`_. | ||||
|  | ||||
| .. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default | ||||
|    serializer is based around JSON and doesn't know how to convert | ||||
|    ``bson.objectid.ObjectId`` instances to strings. | ||||
|  | ||||
| .. versionadded:: 0.2.1 | ||||
|  | ||||
| Storage | ||||
| ======= | ||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, | ||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`, | ||||
| it is useful to have a Django file storage backend that wraps this. The new | ||||
| storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. | ||||
| Using it is very similar to using the default FileSystemStorage.:: | ||||
| @@ -79,7 +133,7 @@ appended to the filename until the generated filename doesn't exist. The | ||||
|     >>> fs.listdir() | ||||
|     ([], [u'hello.txt']) | ||||
|  | ||||
| All files will be saved and retrieved in GridFS via the :class::`FileDocument` | ||||
| All files will be saved and retrieved in GridFS via the :class:`FileDocument` | ||||
| document, allowing easy access to the files without the GridFSStorage | ||||
| backend.:: | ||||
|  | ||||
| @@ -88,3 +142,36 @@ backend.:: | ||||
|     [<FileDocument: FileDocument object>] | ||||
|  | ||||
| .. versionadded:: 0.4 | ||||
|  | ||||
| Shortcuts | ||||
| ========= | ||||
| Inspired by the `Django shortcut get_object_or_404 | ||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_, | ||||
| the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns  | ||||
| a document or raises an Http404 exception if the document does not exist:: | ||||
|  | ||||
|     from mongoengine.django.shortcuts import get_document_or_404 | ||||
|      | ||||
|     admin_user = get_document_or_404(User, username='root') | ||||
|  | ||||
| The first argument may be a Document or QuerySet object. All other passed arguments | ||||
| and keyword arguments are used in the query:: | ||||
|  | ||||
|     foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email | ||||
|  | ||||
| .. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one | ||||
|     object is found. | ||||
|  | ||||
|  | ||||
| Also inspired by the `Django shortcut get_list_or_404 | ||||
| <https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_, | ||||
| the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of | ||||
| documents or raises an Http404 exception if the list is empty:: | ||||
|  | ||||
|     from mongoengine.django.shortcuts import get_list_or_404 | ||||
|      | ||||
|     active_users = get_list_or_404(User, is_active=True) | ||||
|  | ||||
| The first argument may be a Document or QuerySet object. All other passed | ||||
| arguments and keyword arguments are used to filter the query. | ||||
|  | ||||
|   | ||||
| @@ -6,33 +6,45 @@ Connecting to MongoDB | ||||
|  | ||||
| To connect to a running instance of :program:`mongod`, use the | ||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | ||||
| database to connect to. If the database does not exist, it will be created. If | ||||
| the database requires authentication, :attr:`username` and :attr:`password` | ||||
| arguments may be provided:: | ||||
| database to connect to:: | ||||
|  | ||||
|     from mongoengine import connect | ||||
|     connect('project1', username='webapp', password='pwd123') | ||||
|     connect('project1') | ||||
|  | ||||
| By default, MongoEngine assumes that the :program:`mongod` instance is running | ||||
| on **localhost** on port **27017**. If MongoDB is running elsewhere, you may | ||||
| provide :attr:`host` and :attr:`port` arguments to | ||||
| on **localhost** on port **27017**. If MongoDB is running elsewhere, you should | ||||
| provide the :attr:`host` and :attr:`port` arguments to | ||||
| :func:`~mongoengine.connect`:: | ||||
|  | ||||
|     connect('project1', host='192.168.1.35', port=12345) | ||||
|  | ||||
| Uri style connections are also supported as long as you include the database | ||||
| name - just supply the uri as the :attr:`host` to | ||||
| If the database requires authentication, :attr:`username` and :attr:`password` | ||||
| arguments should be provided:: | ||||
|  | ||||
|     connect('project1', username='webapp', password='pwd123') | ||||
|  | ||||
| Uri style connections are also supported - just supply the uri as | ||||
| the :attr:`host` to | ||||
| :func:`~mongoengine.connect`:: | ||||
|  | ||||
|     connect('project1', host='mongodb://localhost/database_name') | ||||
|  | ||||
| Note that database name from uri has priority over name | ||||
| in ::func:`~mongoengine.connect` | ||||
|  | ||||
| ReplicaSets | ||||
| =========== | ||||
|  | ||||
| MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` | ||||
| MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient` | ||||
| to use them please use a URI style connection and provide the `replicaSet` name in the | ||||
| connection kwargs. | ||||
|  | ||||
| Read preferences are supported through the connection or via individual | ||||
| queries by passing the read_preference :: | ||||
|  | ||||
|     Bar.objects().read_preference(ReadPreference.PRIMARY) | ||||
|     Bar.objects(read_preference=ReadPreference.PRIMARY) | ||||
|  | ||||
| Multiple Databases | ||||
| ================== | ||||
|  | ||||
| @@ -63,3 +75,43 @@ to point across databases and collections.  Below is an example schema, using | ||||
|             book = ReferenceField(Book) | ||||
|  | ||||
|             meta = {"db_alias": "users-books-db"} | ||||
|  | ||||
|  | ||||
| Switch Database Context Manager | ||||
| =============================== | ||||
|  | ||||
| Sometimes you may want to switch the database to query against for a class | ||||
| for example, archiving older data into a separate database for performance | ||||
| reasons. | ||||
|  | ||||
| The :class:`~mongoengine.context_managers.switch_db` context manager allows | ||||
| you to change the database alias for a given class allowing quick and easy | ||||
| access to the same User document across databases:: | ||||
|  | ||||
|         from mongoengine.context_managers import switch_db | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = {"db_alias": "user-db"} | ||||
|  | ||||
|         with switch_db(User, 'archive-user-db') as User: | ||||
|             User(name="Ross").save()  # Saves the 'archive-user-db' | ||||
|  | ||||
| .. note:: Make sure any aliases have been registered with | ||||
|     :func:`~mongoengine.register_connection` before using the context manager. | ||||
|  | ||||
| There is also a switch collection context manager as well.  The | ||||
| :class:`~mongoengine.context_managers.switch_collection` context manager allows | ||||
| you to change the collection for a given class allowing quick and easy | ||||
| access to the same Group document across collection:: | ||||
|  | ||||
|         from mongoengine.context_managers import switch_db | ||||
|  | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|  | ||||
|         with switch_collection(Group, 'group2000') as Group: | ||||
|             Group(name="hello Group 2000 collection!").save()  # Saves in group2000 collection | ||||
|   | ||||
| @@ -24,6 +24,9 @@ objects** as class attributes to the document class:: | ||||
|         title = StringField(max_length=200, required=True) | ||||
|         date_modified = DateTimeField(default=datetime.datetime.now) | ||||
|  | ||||
| As BSON (the binary format for storing data in mongodb) is order dependent, | ||||
| documents are serialized based on their field order. | ||||
|  | ||||
| Dynamic document schemas | ||||
| ======================== | ||||
| One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | ||||
| @@ -47,10 +50,11 @@ be saved :: | ||||
|     >>> Page.objects(tags='mongoengine').count() | ||||
|     >>> 1 | ||||
|  | ||||
| ..note:: | ||||
| .. note:: | ||||
|  | ||||
|    There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||
|  | ||||
| Dynamic fields are stored in creation order *after* any declared fields. | ||||
|  | ||||
| Fields | ||||
| ====== | ||||
| @@ -62,31 +66,31 @@ not provided. Default values may optionally be a callable, which will be called | ||||
| to retrieve the value (such as in the above example). The field types available | ||||
| are as follows: | ||||
|  | ||||
| * :class:`~mongoengine.BinaryField` | ||||
| * :class:`~mongoengine.BooleanField` | ||||
| * :class:`~mongoengine.ComplexDateTimeField` | ||||
| * :class:`~mongoengine.DateTimeField` | ||||
| * :class:`~mongoengine.DecimalField` | ||||
| * :class:`~mongoengine.DictField` | ||||
| * :class:`~mongoengine.DynamicField` | ||||
| * :class:`~mongoengine.EmailField` | ||||
| * :class:`~mongoengine.EmbeddedDocumentField` | ||||
| * :class:`~mongoengine.FileField` | ||||
| * :class:`~mongoengine.FloatField` | ||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` | ||||
| * :class:`~mongoengine.GenericReferenceField` | ||||
| * :class:`~mongoengine.GeoPointField` | ||||
| * :class:`~mongoengine.ImageField` | ||||
| * :class:`~mongoengine.IntField` | ||||
| * :class:`~mongoengine.ListField` | ||||
| * :class:`~mongoengine.MapField` | ||||
| * :class:`~mongoengine.ObjectIdField` | ||||
| * :class:`~mongoengine.ReferenceField` | ||||
| * :class:`~mongoengine.SequenceField` | ||||
| * :class:`~mongoengine.SortedListField` | ||||
| * :class:`~mongoengine.StringField` | ||||
| * :class:`~mongoengine.URLField` | ||||
| * :class:`~mongoengine.UUIDField` | ||||
| * :class:`~mongoengine.fields.BinaryField` | ||||
| * :class:`~mongoengine.fields.BooleanField` | ||||
| * :class:`~mongoengine.fields.ComplexDateTimeField` | ||||
| * :class:`~mongoengine.fields.DateTimeField` | ||||
| * :class:`~mongoengine.fields.DecimalField` | ||||
| * :class:`~mongoengine.fields.DictField` | ||||
| * :class:`~mongoengine.fields.DynamicField` | ||||
| * :class:`~mongoengine.fields.EmailField` | ||||
| * :class:`~mongoengine.fields.EmbeddedDocumentField` | ||||
| * :class:`~mongoengine.fields.FileField` | ||||
| * :class:`~mongoengine.fields.FloatField` | ||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||
| * :class:`~mongoengine.fields.GenericReferenceField` | ||||
| * :class:`~mongoengine.fields.GeoPointField` | ||||
| * :class:`~mongoengine.fields.ImageField` | ||||
| * :class:`~mongoengine.fields.IntField` | ||||
| * :class:`~mongoengine.fields.ListField` | ||||
| * :class:`~mongoengine.fields.MapField` | ||||
| * :class:`~mongoengine.fields.ObjectIdField` | ||||
| * :class:`~mongoengine.fields.ReferenceField` | ||||
| * :class:`~mongoengine.fields.SequenceField` | ||||
| * :class:`~mongoengine.fields.SortedListField` | ||||
| * :class:`~mongoengine.fields.StringField` | ||||
| * :class:`~mongoengine.fields.URLField` | ||||
| * :class:`~mongoengine.fields.UUIDField` | ||||
|  | ||||
| Field arguments | ||||
| --------------- | ||||
| @@ -96,9 +100,6 @@ arguments can be set on all fields: | ||||
| :attr:`db_field` (Default: None) | ||||
|     The MongoDB field name. | ||||
|  | ||||
| :attr:`name` (Default: None) | ||||
|     The mongoengine field name. | ||||
|  | ||||
| :attr:`required` (Default: False) | ||||
|     If set to True and the field is not set on the document instance, a | ||||
|     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||
| @@ -110,7 +111,7 @@ arguments can be set on all fields: | ||||
|     The definion of default parameters follow `the general rules on Python | ||||
|     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||
|     which means that some care should be taken when dealing with default mutable objects | ||||
|     (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: | ||||
|     (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`):: | ||||
|  | ||||
|         class ExampleFirst(Document): | ||||
|             # Default an empty list | ||||
| @@ -125,6 +126,7 @@ arguments can be set on all fields: | ||||
|             # instead to just an object | ||||
|             values = ListField(IntField(), default=[1,2,3]) | ||||
|  | ||||
|     .. note:: Unsetting a field with a default value will revert back to the default. | ||||
|  | ||||
| :attr:`unique` (Default: False) | ||||
|     When True, no documents in the collection will have the same value for this | ||||
| @@ -135,7 +137,8 @@ arguments can be set on all fields: | ||||
|     field, will not have two documents in the collection with the same value. | ||||
|  | ||||
| :attr:`primary_key` (Default: False) | ||||
|     When True, use this field as a primary key for the collection. | ||||
|     When True, use this field as a primary key for the collection.  `DictField` | ||||
|     and `EmbeddedDocuments` both support being the primary key for a document. | ||||
|  | ||||
| :attr:`choices` (Default: None) | ||||
|     An iterable (e.g. a list or tuple) of choices to which the value of this | ||||
| @@ -171,8 +174,8 @@ arguments can be set on all fields: | ||||
| List fields | ||||
| ----------- | ||||
| MongoDB allows the storage of lists of items. To add a list of items to a | ||||
| :class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field | ||||
| type. :class:`~mongoengine.ListField` takes another field object as its first | ||||
| :class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field | ||||
| type. :class:`~mongoengine.fields.ListField` takes another field object as its first | ||||
| argument, which specifies which type elements may be stored within the list:: | ||||
|  | ||||
|     class Page(Document): | ||||
| @@ -190,7 +193,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than | ||||
|         content = StringField() | ||||
|  | ||||
| To embed the document within another document, use the | ||||
| :class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded | ||||
| :class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded | ||||
| document class as the first argument:: | ||||
|  | ||||
|     class Page(Document): | ||||
| @@ -205,7 +208,7 @@ Dictionary Fields | ||||
| Often, an embedded document may be used instead of a dictionary -- generally | ||||
| this is recommended as dictionaries don't support validation or custom field | ||||
| types. However, sometimes you will not know the structure of what you want to | ||||
| store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | ||||
| store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: | ||||
|  | ||||
|     class SurveyResponse(Document): | ||||
|         date = DateTimeField() | ||||
| @@ -223,7 +226,7 @@ other objects, so are the most flexible field type available. | ||||
| Reference fields | ||||
| ---------------- | ||||
| References may be stored to other documents in the database using the | ||||
| :class:`~mongoengine.ReferenceField`. Pass in another document class as the | ||||
| :class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the | ||||
| first argument to the constructor, then simply assign document objects to the | ||||
| field:: | ||||
|  | ||||
| @@ -244,9 +247,9 @@ field:: | ||||
| The :class:`User` object is automatically turned into a reference behind the | ||||
| scenes, and dereferenced when the :class:`Page` object is retrieved. | ||||
|  | ||||
| To add a :class:`~mongoengine.ReferenceField` that references the document | ||||
| To add a :class:`~mongoengine.fields.ReferenceField` that references the document | ||||
| being defined, use the string ``'self'`` in place of the document class as the | ||||
| argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a | ||||
| argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a | ||||
| document that has not yet been defined, use the name of the undefined document | ||||
| as the constructor's argument:: | ||||
|  | ||||
| @@ -287,6 +290,12 @@ instance of the object to the query:: | ||||
|     # Find all pages that both Bob and John have authored | ||||
|     Page.objects(authors__all=[bob, john]) | ||||
|  | ||||
|     # Remove Bob from the authors for a page. | ||||
|     Page.objects(id='...').update_one(pull__authors=bob) | ||||
|  | ||||
|     # Add John to the authors for a page. | ||||
|     Page.objects(id='...').update_one(push__authors=john) | ||||
|  | ||||
|  | ||||
| Dealing with deletion of referred documents | ||||
| ''''''''''''''''''''''''''''''''''''''''''' | ||||
| @@ -324,7 +333,7 @@ Its value can take any of the following constants: | ||||
| :const:`mongoengine.PULL` | ||||
|   Removes the reference to the object (using MongoDB's "pull" operation) | ||||
|   from any object's fields of | ||||
|   :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). | ||||
|   :class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`). | ||||
|  | ||||
|  | ||||
| .. warning:: | ||||
| @@ -351,7 +360,7 @@ Its value can take any of the following constants: | ||||
| Generic reference fields | ||||
| '''''''''''''''''''''''' | ||||
| A second kind of reference field also exists, | ||||
| :class:`~mongoengine.GenericReferenceField`. This allows you to reference any | ||||
| :class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any | ||||
| kind of :class:`~mongoengine.Document`, and hence doesn't take a | ||||
| :class:`~mongoengine.Document` subclass as a constructor argument:: | ||||
|  | ||||
| @@ -375,15 +384,15 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|    Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less | ||||
|    efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if | ||||
|    Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less | ||||
|    efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if | ||||
|    you will only be referencing one document type, prefer the standard | ||||
|    :class:`~mongoengine.ReferenceField`. | ||||
|    :class:`~mongoengine.fields.ReferenceField`. | ||||
|  | ||||
| Uniqueness constraints | ||||
| ---------------------- | ||||
| MongoEngine allows you to specify that a field should be unique across a | ||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | ||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's | ||||
| constructor. If you try to save a document that has the same value for a unique | ||||
| field as a document that is already in the database, a | ||||
| :class:`~mongoengine.OperationError` will be raised. You may also specify | ||||
| @@ -398,7 +407,7 @@ either a single field name, or a list or tuple of field names:: | ||||
| Skipping Document validation on save | ||||
| ------------------------------------ | ||||
| You can also skip the whole document validation process by setting | ||||
| ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` | ||||
| ``validate=False`` when calling the :meth:`~mongoengine.document.Document.save` | ||||
| method:: | ||||
|  | ||||
|     class Recipient(Document): | ||||
| @@ -439,15 +448,18 @@ The following example shows a :class:`Log` document that will be limited to | ||||
|         ip_address = StringField() | ||||
|         meta = {'max_documents': 1000, 'max_size': 2000000} | ||||
|  | ||||
| .. defining-indexes_ | ||||
|  | ||||
| Indexes | ||||
| ======= | ||||
|  | ||||
| You can specify indexes on collections to make querying faster. This is done | ||||
| by creating a list of index specifications called :attr:`indexes` in the | ||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||
| either be a single field name, a tuple containing multiple field names, or a | ||||
| dictionary containing a full index definition. A direction may be specified on | ||||
| fields by prefixing the field name with a **+** or a **-** sign. Note that | ||||
| direction only matters on multi-field indexes. :: | ||||
| fields by prefixing the field name with a **+** (for ascending) or a **-** sign | ||||
| (for descending). Note that direction only matters on multi-field indexes. :: | ||||
|  | ||||
|     class Page(Document): | ||||
|         title = StringField() | ||||
| @@ -461,9 +473,11 @@ If a dictionary is passed then the following options are available: | ||||
| :attr:`fields` (Default: None) | ||||
|     The fields to index. Specified in the same format as described above. | ||||
|  | ||||
| :attr:`types` (Default: True) | ||||
|     Whether the index should have the :attr:`_types` field added automatically | ||||
|     to the start of the index. | ||||
| :attr:`cls` (Default: True) | ||||
|     If you have polymorphic models that inherit and have | ||||
|     :attr:`allow_inheritance` turned on, you can configure whether the index | ||||
|     should have the :attr:`_cls` field added automatically to the start of the | ||||
|     index. | ||||
|  | ||||
| :attr:`sparse` (Default: False) | ||||
|     Whether the index should be sparse. | ||||
| @@ -471,26 +485,89 @@ If a dictionary is passed then the following options are available: | ||||
| :attr:`unique` (Default: False) | ||||
|     Whether the index should be unique. | ||||
|  | ||||
| .. note :: | ||||
| :attr:`expireAfterSeconds` (Optional) | ||||
|     Allows you to automatically expire data from a collection by setting the | ||||
|     time in seconds to expire the a field. | ||||
|  | ||||
|     To index embedded files / dictionary fields use 'dot' notation eg: | ||||
|     `rank.title` | ||||
| .. note:: | ||||
|  | ||||
| .. warning:: | ||||
|     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||
|  | ||||
|     Inheritance adds extra indices. | ||||
|     If don't need inheritance for a document turn inheritance off - | ||||
|     see :ref:`document-inheritance`. | ||||
| Global index default options | ||||
| ---------------------------- | ||||
|  | ||||
| There are a few top level defaults for all indexes that can be set:: | ||||
|  | ||||
|     class Page(Document): | ||||
|         title = StringField() | ||||
|         rating = StringField() | ||||
|         meta = { | ||||
|             'index_options': {}, | ||||
|             'index_background': True, | ||||
|             'index_drop_dups': True, | ||||
|             'index_cls': False | ||||
|         } | ||||
|  | ||||
|  | ||||
| :attr:`index_options` (Optional) | ||||
|     Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_ | ||||
|  | ||||
| :attr:`index_background` (Optional) | ||||
|     Set the default value for if an index should be indexed in the background | ||||
|  | ||||
| :attr:`index_drop_dups` (Optional) | ||||
|     Set the default value for if an index should drop duplicates | ||||
|  | ||||
| :attr:`index_cls` (Optional) | ||||
|     A way to turn off a specific index for _cls. | ||||
|  | ||||
|  | ||||
| Compound Indexes and Indexing sub documents | ||||
| ------------------------------------------- | ||||
|  | ||||
| Compound indexes can be created by adding the Embedded field or dictionary | ||||
| field name to the index definition. | ||||
|  | ||||
| Sometimes its more efficient to index parts of Embedded / dictionary fields, | ||||
| in this case use 'dot' notation to identify the value to index eg: `rank.title` | ||||
|  | ||||
| Geospatial indexes | ||||
| --------------------------- | ||||
| ------------------ | ||||
|  | ||||
| The best geo index for mongodb is the new "2dsphere", which has an improved | ||||
| spherical model and provides better performance and more options when querying. | ||||
| The following fields will explicitly add a "2dsphere" index: | ||||
|  | ||||
|     - :class:`~mongoengine.fields.PointField` | ||||
|     - :class:`~mongoengine.fields.LineStringField` | ||||
|     - :class:`~mongoengine.fields.PolygonField` | ||||
|  | ||||
| As "2dsphere" indexes can be part of a compound index, you may not want the | ||||
| automatic index but would prefer a compound index.  In this example we turn off | ||||
| auto indexing and explicitly declare a compound index on ``location`` and ``datetime``:: | ||||
|  | ||||
|     class Log(Document): | ||||
|         location = PointField(auto_index=False) | ||||
|         datetime = DateTimeField() | ||||
|  | ||||
|         meta = { | ||||
|             'indexes': [[("location", "2dsphere"), ("datetime", 1)]] | ||||
|         } | ||||
|  | ||||
|  | ||||
| Pre MongoDB 2.4 Geo | ||||
| ''''''''''''''''''' | ||||
|  | ||||
| .. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere | ||||
|     index is a big improvement over the previous 2D model - so upgrading is | ||||
|     advised. | ||||
|  | ||||
| Geospatial indexes will be automatically created for all | ||||
| :class:`~mongoengine.GeoPointField`\ s | ||||
| :class:`~mongoengine.fields.GeoPointField`\ s | ||||
|  | ||||
| It is also possible to explicitly define geospatial indexes. This is | ||||
| useful if you need to define a geospatial index on a subfield of a | ||||
| :class:`~mongoengine.DictField` or a custom field that contains a | ||||
| :class:`~mongoengine.fields.DictField` or a custom field that contains a | ||||
| point. To create a geospatial index you must prefix the field with the | ||||
| ***** sign. :: | ||||
|  | ||||
| @@ -502,6 +579,35 @@ point. To create a geospatial index you must prefix the field with the | ||||
|             ], | ||||
|         } | ||||
|  | ||||
| Time To Live indexes | ||||
| -------------------- | ||||
|  | ||||
| A special index type that allows you to automatically expire data from a | ||||
| collection after a given period. See the official | ||||
| `ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_ | ||||
| documentation for more information.  A common usecase might be session data:: | ||||
|  | ||||
|     class Session(Document): | ||||
|         created = DateTimeField(default=datetime.now) | ||||
|         meta = { | ||||
|             'indexes': [ | ||||
|                 {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||
|             ] | ||||
|         } | ||||
|  | ||||
| .. warning:: TTL indexes happen on the MongoDB server and not in the application | ||||
|     code, therefore no signals will be fired on document deletion. | ||||
|     If you need signals to be fired on deletion, then you must handle the | ||||
|     deletion of Documents in your application code. | ||||
|  | ||||
| Comparing Indexes | ||||
| ----------------- | ||||
|  | ||||
| Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in | ||||
| the database to those that your document definitions define.  This is useful | ||||
| for maintenance purposes and ensuring you have the correct indexes for your | ||||
| schema. | ||||
|  | ||||
| Ordering | ||||
| ======== | ||||
| A default ordering can be specified for your | ||||
| @@ -572,7 +678,9 @@ defined, you may subclass it and add any extra fields or methods you may need. | ||||
| As this is new class is not a direct subclass of | ||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||
| will use the same collection as its superclass uses. This allows for more | ||||
| convenient and efficient retrieval of related documents:: | ||||
| convenient and efficient retrieval of related documents - all you need do is | ||||
| set :attr:`allow_inheritance` to True in the :attr:`meta` data for a | ||||
| document.:: | ||||
|  | ||||
|     # Stored in a collection named 'page' | ||||
|     class Page(Document): | ||||
| @@ -584,25 +692,47 @@ convenient and efficient retrieval of related documents:: | ||||
|     class DatedPage(Page): | ||||
|         date = DateTimeField() | ||||
|  | ||||
| .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. | ||||
|  | ||||
| .. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults | ||||
|           to False, meaning you must set it to True to use inheritance. | ||||
|  | ||||
| Working with existing data | ||||
| -------------------------- | ||||
| To enable correct retrieval of documents involved in this kind of heirarchy, | ||||
| two extra attributes are stored on each document in the database: :attr:`_cls` | ||||
| and :attr:`_types`. These are hidden from the user through the MongoEngine | ||||
| interface, but may not be present if you are trying to use MongoEngine with | ||||
| an existing database. For this reason, you may disable this inheritance | ||||
| mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling | ||||
| you to work with existing databases. To disable inheritance on a document | ||||
| class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` | ||||
| dictionary:: | ||||
| As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and | ||||
| easily get working with existing data.  Just define the document to match | ||||
| the expected schema in your database :: | ||||
|  | ||||
|     # Will work with data in an existing collection named 'cmsPage' | ||||
|     class Page(Document): | ||||
|         title = StringField(max_length=200, required=True) | ||||
|         meta = { | ||||
|             'collection': 'cmsPage', | ||||
|             'allow_inheritance': False, | ||||
|             'collection': 'cmsPage' | ||||
|         } | ||||
|  | ||||
| If you have wildly varying schemas then using a | ||||
| :class:`~mongoengine.DynamicDocument` might be more appropriate, instead of | ||||
| defining all possible field types. | ||||
|  | ||||
| If you use :class:`~mongoengine.Document` and the database contains data that | ||||
| isn't defined then that data will be stored in the `document._data` dictionary. | ||||
|  | ||||
| Abstract classes | ||||
| ================ | ||||
|  | ||||
| If you want to add some extra functionality to a group of Document classes but | ||||
| you don't need or want the overhead of inheritance you can use the | ||||
| :attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`. | ||||
| This won't turn on :ref:`document-inheritance` but will allow you to keep your | ||||
| code DRY:: | ||||
|  | ||||
|         class BaseDocument(Document): | ||||
|             meta = { | ||||
|                 'abstract': True, | ||||
|             } | ||||
|             def check_permissions(self): | ||||
|                 ... | ||||
|  | ||||
|         class User(BaseDocument): | ||||
|            ... | ||||
|  | ||||
| Now the User class will have access to the inherited `check_permissions` method | ||||
| and won't store any of the extra `_cls` information. | ||||
|   | ||||
| @@ -30,21 +30,53 @@ already exist, then any changes will be updated atomically.  For example:: | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|     Changes to documents are tracked and on the whole perform `set` operations. | ||||
|     Changes to documents are tracked and on the whole perform ``set`` operations. | ||||
|  | ||||
|     * ``list_field.pop(0)`` - *sets* the resulting list | ||||
|     * ``list_field.push(0)`` - *sets* the resulting list | ||||
|     * ``del(list_field)``   - *unsets* whole list | ||||
|  | ||||
|     With lists its preferable to use ``Doc.update(push__list_field=0)`` as | ||||
|     this stops the whole list being updated - stopping any race conditions. | ||||
|  | ||||
| .. seealso:: | ||||
|     :ref:`guide-atomic-updates` | ||||
|  | ||||
| Pre save data validation and cleaning | ||||
| ------------------------------------- | ||||
| MongoEngine allows you to create custom cleaning rules for your documents when | ||||
| calling :meth:`~mongoengine.Document.save`.  By providing a custom | ||||
| :meth:`~mongoengine.Document.clean` method you can do any pre validation / data | ||||
| cleaning. | ||||
|  | ||||
| This might be useful if you want to ensure a default value based on other | ||||
| document values for example:: | ||||
|  | ||||
|     class Essay(Document): | ||||
|         status = StringField(choices=('Published', 'Draft'), required=True) | ||||
|         pub_date = DateTimeField() | ||||
|  | ||||
|         def clean(self): | ||||
|             """Ensures that only published essays have a `pub_date` and | ||||
|             automatically sets the pub_date if published and not set""" | ||||
|             if self.status == 'Draft' and self.pub_date is not None: | ||||
|                 msg = 'Draft entries should not have a publication date.' | ||||
|                 raise ValidationError(msg) | ||||
|             # Set the pub_date for published items if not set. | ||||
|             if self.status == 'Published' and self.pub_date is None: | ||||
|                 self.pub_date = datetime.now() | ||||
|  | ||||
| .. note:: | ||||
|     Cleaning is only called if validation is turned on and when calling | ||||
|     :meth:`~mongoengine.Document.save`. | ||||
|  | ||||
| Cascading Saves | ||||
| --------------- | ||||
| If your document contains :class:`~mongoengine.ReferenceField` or | ||||
| :class:`~mongoengine.GenericReferenceField` objects, then by default the | ||||
| :meth:`~mongoengine.Document.save` method will automatically save any changes to | ||||
| those objects as well.  If this is not desired passing :attr:`cascade` as False | ||||
| to the save method turns this feature off. | ||||
| If your document contains :class:`~mongoengine.fields.ReferenceField` or | ||||
| :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the | ||||
| :meth:`~mongoengine.Document.save` method will not save any changes to | ||||
| those objects.  If you want all references to also be saved also, noting each | ||||
| save is a separate query, then passing :attr:`cascade` as True | ||||
| to the save method will cascade any saves. | ||||
|  | ||||
| Deleting documents | ||||
| ------------------ | ||||
|   | ||||
| @@ -7,7 +7,7 @@ GridFS | ||||
| Writing | ||||
| ------- | ||||
|  | ||||
| GridFS support comes in the form of the :class:`~mongoengine.FileField` field | ||||
| GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | ||||
| object. This field acts as a file-like object and provides a couple of | ||||
| different ways of inserting and retrieving data. Arbitrary metadata such as | ||||
| content type can also be stored alongside the files. In the following example, | ||||
| @@ -18,26 +18,16 @@ a document is created to store details about animals, including a photo:: | ||||
|         family = StringField() | ||||
|         photo = FileField() | ||||
|  | ||||
|     marmot = Animal('Marmota', 'Sciuridae') | ||||
|  | ||||
|     marmot_photo = open('marmot.jpg', 'r')      # Retrieve a photo from disk | ||||
|     marmot.photo = marmot_photo                 # Store photo in the document | ||||
|     marmot.photo.content_type = 'image/jpeg'    # Store metadata | ||||
|  | ||||
|     marmot.save() | ||||
|  | ||||
| Another way of writing to a :class:`~mongoengine.FileField` is to use the | ||||
| :func:`put` method. This allows for metadata to be stored in the same call as | ||||
| the file:: | ||||
|  | ||||
|     marmot.photo.put(marmot_photo, content_type='image/jpeg') | ||||
|     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
|  | ||||
|     marmot_photo = open('marmot.jpg', 'rb') | ||||
|     marmot.photo.put(marmot_photo, content_type = 'image/jpeg') | ||||
|     marmot.save() | ||||
|  | ||||
| Retrieval | ||||
| --------- | ||||
|  | ||||
| So using the :class:`~mongoengine.FileField` is just like using any other | ||||
| So using the :class:`~mongoengine.fields.FileField` is just like using any other | ||||
| field. The file can also be retrieved just as easily:: | ||||
|  | ||||
|     marmot = Animal.objects(genus='Marmota').first() | ||||
| @@ -47,7 +37,7 @@ field. The file can also be retrieved just as easily:: | ||||
| Streaming | ||||
| --------- | ||||
|  | ||||
| Streaming data into a :class:`~mongoengine.FileField` is achieved in a | ||||
| Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a | ||||
| slightly different manner.  First, a new file must be created by calling the | ||||
| :func:`new_file` method. Data can then be written using :func:`write`:: | ||||
|  | ||||
| @@ -80,5 +70,5 @@ Replacing files | ||||
| Files can be replaced with the :func:`replace` method. This works just like | ||||
| the :func:`put` method so even metadata can (and should) be replaced:: | ||||
|  | ||||
|     another_marmot = open('another_marmot.png', 'r') | ||||
|     another_marmot = open('another_marmot.png', 'rb') | ||||
|     marmot.photo.replace(another_marmot, content_type='image/png') | ||||
|   | ||||
| @@ -22,10 +22,10 @@ Alternatively, if you don't have setuptools installed, `download it from PyPi | ||||
|     $ python setup.py install | ||||
|  | ||||
| To use the bleeding-edge version of MongoEngine, you can get the source from | ||||
| `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | ||||
| `GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above: | ||||
|  | ||||
| .. code-block:: console | ||||
|  | ||||
|     $ git clone git://github.com/hmarr/mongoengine | ||||
|     $ git clone git://github.com/mongoengine/mongoengine | ||||
|     $ cd mongoengine | ||||
|     $ python setup.py install | ||||
|   | ||||
| @@ -15,11 +15,10 @@ fetch documents from the database:: | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|    Once the iteration finishes (when :class:`StopIteration` is raised), | ||||
|    :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the | ||||
|    :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The | ||||
|    results of the first iteration are *not* cached, so the database will be hit | ||||
|    each time the :class:`~mongoengine.queryset.QuerySet` is iterated over. | ||||
|     As of MongoEngine 0.8 the querysets utilise a local cache.  So iterating | ||||
|     it multiple times will only cause a single query.  If this is not the | ||||
|     desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` | ||||
|     (version **0.8.3+**) to return a non-caching queryset. | ||||
|  | ||||
| Filtering queries | ||||
| ================= | ||||
| @@ -65,6 +64,9 @@ Available operators are as follows: | ||||
| * ``size`` -- the size of the array is | ||||
| * ``exists`` -- value for field exists | ||||
|  | ||||
| String queries | ||||
| -------------- | ||||
|  | ||||
| The following operators are available as shortcuts to querying with regular | ||||
| expressions: | ||||
|  | ||||
| @@ -78,8 +80,71 @@ expressions: | ||||
| * ``iendswith`` -- string field ends with value (case insensitive) | ||||
| * ``match``  -- performs an $elemMatch so you can match an entire document within an array | ||||
|  | ||||
| There are a few special operators for performing geographical queries, that | ||||
| may used with :class:`~mongoengine.GeoPointField`\ s: | ||||
|  | ||||
| Geo queries | ||||
| ----------- | ||||
|  | ||||
| There are a few special operators for performing geographical queries. The following | ||||
| were added in 0.8 for:  :class:`~mongoengine.fields.PointField`, | ||||
| :class:`~mongoengine.fields.LineStringField` and | ||||
| :class:`~mongoengine.fields.PolygonField`: | ||||
|  | ||||
| * ``geo_within`` -- Check if a geometry is within a polygon.  For ease of use | ||||
|     it accepts either a geojson geometry or just the polygon coordinates eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) | ||||
|         loc.objects(point__geo_within={"type": "Polygon", | ||||
|                                  "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||
|  | ||||
| * ``geo_within_box`` - simplified geo_within searching with a box eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)]) | ||||
|         loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>]) | ||||
|  | ||||
| * ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]]) | ||||
|         loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] , | ||||
|                                                 [ <x2> , <y2> ] , | ||||
|                                                 [ <x3> , <y3> ] ]) | ||||
|  | ||||
| * ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within_center=[(-125.0, 35.0), 1]) | ||||
|         loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ]) | ||||
|  | ||||
| * ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg:: | ||||
|  | ||||
|         loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1]) | ||||
|         loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ]) | ||||
|  | ||||
| * ``geo_intersects`` -- selects all locations that intersect with a geometry eg:: | ||||
|  | ||||
|         # Inferred from provided points lists: | ||||
|         loc.objects(poly__geo_intersects=[40, 6]) | ||||
|         loc.objects(poly__geo_intersects=[[40, 5], [40, 6]]) | ||||
|         loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]) | ||||
|  | ||||
|         # With geoJson style objects | ||||
|         loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]}) | ||||
|         loc.objects(poly__geo_intersects={"type": "LineString", | ||||
|                                           "coordinates": [[40, 5], [40, 6]]}) | ||||
|         loc.objects(poly__geo_intersects={"type": "Polygon", | ||||
|                                           "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}) | ||||
|  | ||||
| * ``near`` -- Find all the locations near a given point:: | ||||
|  | ||||
|         loc.objects(point__near=[40, 5]) | ||||
|         loc.objects(point__near={"type": "Point", "coordinates": [40, 5]}) | ||||
|  | ||||
|  | ||||
|     You can also set the maximum distance in meters as well:: | ||||
|  | ||||
|         loc.objects(point__near=[40, 5], point__max_distance=1000) | ||||
|  | ||||
|  | ||||
| The older 2D indexes are still supported with the | ||||
| :class:`~mongoengine.fields.GeoPointField`: | ||||
|  | ||||
| * ``within_distance`` -- provide a list containing a point and a maximum | ||||
|   distance (e.g. [(41.342, -87.653), 5]) | ||||
| @@ -91,14 +156,18 @@ may used with :class:`~mongoengine.GeoPointField`\ s: | ||||
|   [(35.0, -125.0), (40.0, -100.0)]) | ||||
| * ``within_polygon`` -- filter documents to those within a given polygon (e.g. | ||||
|   [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). | ||||
|  | ||||
|   .. note:: Requires Mongo Server 2.0 | ||||
|  | ||||
| * ``max_distance`` -- can be added to your location queries to set a maximum | ||||
|   distance. | ||||
|  | ||||
|  | ||||
| Querying lists | ||||
| -------------- | ||||
| On most fields, this syntax will look up documents where the field specified | ||||
| matches the given value exactly, but when the field refers to a | ||||
| :class:`~mongoengine.ListField`, a single item may be provided, in which case | ||||
| :class:`~mongoengine.fields.ListField`, a single item may be provided, in which case | ||||
| lists that contain that item will be matched:: | ||||
|  | ||||
|     class Page(Document): | ||||
| @@ -179,9 +248,11 @@ Retrieving unique results | ||||
| ------------------------- | ||||
| To retrieve a result that should be unique in the collection, use | ||||
| :meth:`~mongoengine.queryset.QuerySet.get`. This will raise | ||||
| :class:`~mongoengine.queryset.DoesNotExist` if no document matches the query, | ||||
| and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one | ||||
| document matched the query. | ||||
| :class:`~mongoengine.queryset.DoesNotExist` if | ||||
| no document matches the query, and | ||||
| :class:`~mongoengine.queryset.MultipleObjectsReturned` | ||||
| if more than one document matched the query.  These exceptions are merged into | ||||
| your document defintions eg: `MyDoc.DoesNotExist` | ||||
|  | ||||
| A variation of this method exists, | ||||
| :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | ||||
| @@ -315,7 +386,7 @@ Retrieving a subset of fields | ||||
| Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | ||||
| and for efficiency only these should be retrieved from the database. This issue | ||||
| is especially important for MongoDB, as fields may often be extremely large | ||||
| (e.g. a :class:`~mongoengine.ListField` of | ||||
| (e.g. a :class:`~mongoengine.fields.ListField` of | ||||
| :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a | ||||
| blog post. To select only a subset of fields, use | ||||
| :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to | ||||
| @@ -347,14 +418,14 @@ If you later need the missing fields, just call | ||||
| Getting related data | ||||
| -------------------- | ||||
|  | ||||
| When iterating the results of :class:`~mongoengine.ListField` or | ||||
| :class:`~mongoengine.DictField` we automatically dereference any | ||||
| When iterating the results of :class:`~mongoengine.fields.ListField` or | ||||
| :class:`~mongoengine.fields.DictField` we automatically dereference any | ||||
| :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the | ||||
| number the queries to mongo. | ||||
|  | ||||
| There are times when that efficiency is not enough, documents that have | ||||
| :class:`~mongoengine.ReferenceField` objects or | ||||
| :class:`~mongoengine.GenericReferenceField` objects at the top level are | ||||
| :class:`~mongoengine.fields.ReferenceField` objects or | ||||
| :class:`~mongoengine.fields.GenericReferenceField` objects at the top level are | ||||
| expensive as the number of queries to MongoDB can quickly rise. | ||||
|  | ||||
| To limit the number of queries use | ||||
| @@ -365,8 +436,30 @@ references to the depth of 1 level.  If you have more complicated documents and | ||||
| want to dereference more of the object at once then increasing the :attr:`max_depth` | ||||
| will dereference more levels of the document. | ||||
|  | ||||
| Turning off dereferencing | ||||
| ------------------------- | ||||
|  | ||||
| Sometimes for performance reasons you don't want to automatically dereference | ||||
| data. To turn off dereferencing of the results of a query use | ||||
| :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||
|  | ||||
|     post = Post.objects.no_dereference().first() | ||||
|     assert(isinstance(post.author, ObjectId)) | ||||
|  | ||||
| You can also turn off all dereferencing for a fixed period by using the | ||||
| :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||
|  | ||||
|     with no_dereference(Post) as Post: | ||||
|         post = Post.objects.first() | ||||
|         assert(isinstance(post.author, ObjectId)) | ||||
|  | ||||
|     # Outside the context manager dereferencing occurs. | ||||
|     assert(isinstance(post.author, User)) | ||||
|  | ||||
|  | ||||
| Advanced queries | ||||
| ================ | ||||
|  | ||||
| Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | ||||
| arguments can't fully express the query you want to use -- for example if you | ||||
| need to combine a number of constraints using *and* and *or*. This is made | ||||
| @@ -385,6 +478,11 @@ calling it with keyword arguments:: | ||||
|     # Get top posts | ||||
|     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) | ||||
|  | ||||
| .. warning:: You have to use bitwise operators.  You cannot use ``or``, ``and`` | ||||
|     to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as | ||||
|     ``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is | ||||
|     the same as ``Q(a=a)``. | ||||
|  | ||||
| .. _guide-atomic-updates: | ||||
|  | ||||
| Atomic updates | ||||
| @@ -399,7 +497,6 @@ that you may use with these methods: | ||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3+) | ||||
| * ``inc`` -- increment a value by a given amount | ||||
| * ``dec`` -- decrement a value by a given amount | ||||
| * ``pop`` -- remove the last item from a list | ||||
| * ``push`` -- append a value to a list | ||||
| * ``push_all`` -- append several values to a list | ||||
| * ``pop`` -- remove the first or last element of a list | ||||
| @@ -425,7 +522,7 @@ modifier comes before the field, not after it:: | ||||
|     >>> post.tags | ||||
|     ['database', 'nosql'] | ||||
|  | ||||
| .. note :: | ||||
| .. note:: | ||||
|  | ||||
|     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates | ||||
|     on changed documents by tracking changes to that document. | ||||
| @@ -441,7 +538,7 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | ||||
|     >>> post.tags | ||||
|     ['database', 'mongodb'] | ||||
|  | ||||
| .. note :: | ||||
| .. note:: | ||||
|     Currently only top level lists are handled, future versions of mongodb / | ||||
|     pymongo plan to support nested positional operators.  See `The $ positional | ||||
|     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. | ||||
| @@ -510,7 +607,7 @@ Javascript code. When accessing a field on a collection object, use | ||||
| square-bracket notation, and prefix the MongoEngine field name with a tilde. | ||||
| The field name that follows the tilde will be translated to the name used in | ||||
| the database. Note that when referring to fields on embedded documents, | ||||
| the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, | ||||
| the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot, | ||||
| should be used before the name of the field on the embedded document. The | ||||
| following example shows how the substitutions are made:: | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| .. _signals: | ||||
|  | ||||
| ======= | ||||
| Signals | ||||
| ======= | ||||
|  | ||||
| @@ -7,32 +8,95 @@ Signals | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|   Signal support is provided by the excellent `blinker`_ library and | ||||
|   will gracefully fall back if it is not available. | ||||
|   Signal support is provided by the excellent `blinker`_ library. If you wish | ||||
|   to enable signal support this library must be installed, though it is not | ||||
|   required for MongoEngine to function. | ||||
|  | ||||
| Overview | ||||
| -------- | ||||
|  | ||||
| The following document signals exist in MongoEngine and are pretty self-explanatory: | ||||
| Signals are found within the `mongoengine.signals` module.  Unless | ||||
| specified signals receive no additional arguments beyond the `sender` class and | ||||
| `document` instance.  Post-signals are only called if there were no exceptions | ||||
| raised during the processing of their related function. | ||||
|  | ||||
|   * `mongoengine.signals.pre_init` | ||||
|   * `mongoengine.signals.post_init` | ||||
|   * `mongoengine.signals.pre_save` | ||||
|   * `mongoengine.signals.post_save` | ||||
|   * `mongoengine.signals.pre_delete` | ||||
|   * `mongoengine.signals.post_delete` | ||||
|   * `mongoengine.signals.pre_bulk_insert` | ||||
|   * `mongoengine.signals.post_bulk_insert` | ||||
| Available signals include: | ||||
|  | ||||
| Example usage:: | ||||
| `pre_init` | ||||
|   Called during the creation of a new :class:`~mongoengine.Document` or | ||||
|   :class:`~mongoengine.EmbeddedDocument` instance, after the constructor | ||||
|   arguments have been collected but before any additional processing has been | ||||
|   done to them.  (I.e. assignment of default values.)  Handlers for this signal | ||||
|   are passed the dictionary of arguments using the `values` keyword argument | ||||
|   and may modify this dictionary prior to returning. | ||||
|  | ||||
| `post_init` | ||||
|   Called after all processing of a new :class:`~mongoengine.Document` or | ||||
|   :class:`~mongoengine.EmbeddedDocument` instance has been completed. | ||||
|  | ||||
| `pre_save` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` prior to performing | ||||
|   any actions. | ||||
|  | ||||
| `pre_save_post_validation` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` after validation | ||||
|   has taken place but before saving. | ||||
|  | ||||
| `post_save` | ||||
|   Called within :meth:`~mongoengine.document.Document.save` after all actions | ||||
|   (validation, insert/update, cascades, clearing dirty flags) have completed | ||||
|   successfully.  Passed the additional boolean keyword argument `created` to | ||||
|   indicate if the save was an insert or an update. | ||||
|  | ||||
| `pre_delete` | ||||
|   Called within :meth:`~mongoengine.document.Document.delete` prior to | ||||
|   attempting the delete operation. | ||||
|  | ||||
| `post_delete` | ||||
|   Called within :meth:`~mongoengine.document.Document.delete` upon successful | ||||
|   deletion of the record. | ||||
|  | ||||
| `pre_bulk_insert` | ||||
|   Called after validation of the documents to insert, but prior to any data | ||||
|   being written. In this case, the `document` argument is replaced by a | ||||
|   `documents` argument representing the list of documents being inserted. | ||||
|  | ||||
| `post_bulk_insert` | ||||
|   Called after a successful bulk insert operation.  As per `pre_bulk_insert`, | ||||
|   the `document` argument is omitted and replaced with a `documents` argument. | ||||
|   An additional boolean argument, `loaded`, identifies the contents of | ||||
|   `documents` as either :class:`~mongoengine.Document` instances when `True` or | ||||
|   simply a list of primary key values for the inserted records if `False`. | ||||
|  | ||||
| Attaching Events | ||||
| ---------------- | ||||
|  | ||||
| After writing a handler function like the following:: | ||||
|  | ||||
|     import logging | ||||
|     from datetime import datetime | ||||
|  | ||||
|     from mongoengine import * | ||||
|     from mongoengine import signals | ||||
|  | ||||
|     def update_modified(sender, document): | ||||
|         document.modified = datetime.utcnow() | ||||
|  | ||||
| You attach the event handler to your :class:`~mongoengine.Document` or | ||||
| :class:`~mongoengine.EmbeddedDocument` subclass:: | ||||
|  | ||||
|     class Record(Document): | ||||
|         modified = DateTimeField() | ||||
|  | ||||
|     signals.pre_save.connect(update_modified) | ||||
|  | ||||
| While this is not the most elaborate document model, it does demonstrate the | ||||
| concepts involved.  As a more complete demonstration you can also define your | ||||
| handlers within your subclass:: | ||||
|  | ||||
|     class Author(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|         def __unicode__(self): | ||||
|             return self.name | ||||
|  | ||||
|         @classmethod | ||||
|         def pre_save(cls, sender, document, **kwargs): | ||||
|             logging.debug("Pre Save: %s" % document.name) | ||||
| @@ -49,12 +113,40 @@ Example usage:: | ||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||
|     signals.post_save.connect(Author.post_save, sender=Author) | ||||
|  | ||||
| Finally, you can also use this small decorator to quickly create a number of | ||||
| signals and attach them to your :class:`~mongoengine.Document` or | ||||
| :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||
|  | ||||
| ReferenceFields and signals | ||||
|     def handler(event): | ||||
|         """Signal decorator to allow use of callback functions as class decorators.""" | ||||
|  | ||||
|         def decorator(fn): | ||||
|             def apply(cls): | ||||
|                 event.connect(fn, sender=cls) | ||||
|                 return cls | ||||
|  | ||||
|             fn.apply = apply | ||||
|             return fn | ||||
|  | ||||
|         return decorator | ||||
|  | ||||
| Using the first example of updating a modification time the code is now much | ||||
| cleaner looking while still allowing manual execution of the callback:: | ||||
|  | ||||
|     @handler(signals.pre_save) | ||||
|     def update_modified(sender, document): | ||||
|         document.modified = datetime.utcnow() | ||||
|  | ||||
|     @update_modified.apply | ||||
|     class Record(Document): | ||||
|         modified = DateTimeField() | ||||
|  | ||||
|  | ||||
| ReferenceFields and Signals | ||||
| --------------------------- | ||||
|  | ||||
| Currently `reverse_delete_rules` do not trigger signals on the other part of | ||||
| the relationship.  If this is required you must manually handled the | ||||
| the relationship.  If this is required you must manually handle the | ||||
| reverse deletion. | ||||
|  | ||||
| .. _blinker: http://pypi.python.org/pypi/blinker | ||||
|   | ||||
| @@ -7,16 +7,18 @@ MongoDB. To install it, simply run | ||||
|  | ||||
| .. code-block:: console | ||||
|  | ||||
|     # pip install -U mongoengine | ||||
|     $ pip install -U mongoengine | ||||
|  | ||||
| :doc:`tutorial` | ||||
|   Start here for a quick overview. | ||||
|   A quick tutorial building a tumblelog to get you up and running with | ||||
|   MongoEngine. | ||||
|  | ||||
| :doc:`guide/index` | ||||
|   The Full guide to MongoEngine | ||||
|   The Full guide to MongoEngine - from modeling documents to storing files, | ||||
|   from querying for data to firing signals and *everything* between. | ||||
|  | ||||
| :doc:`apireference` | ||||
|   The complete API documentation. | ||||
|   The complete API documentation --- the innards of documents, querysets and fields. | ||||
|  | ||||
| :doc:`upgrade` | ||||
|   How to upgrade MongoEngine. | ||||
| @@ -28,35 +30,50 @@ Community | ||||
| --------- | ||||
|  | ||||
| To get help with using MongoEngine, use the `MongoEngine Users mailing list | ||||
| <http://groups.google.com/group/mongoengine-users>`_ or come chat on the | ||||
| `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_. | ||||
| <http://groups.google.com/group/mongoengine-users>`_ or the ever popular | ||||
| `stackoverflow <http://www.stackoverflow.com>`_. | ||||
|  | ||||
| Contributing | ||||
| ------------ | ||||
|  | ||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||
| contributions are always encouraged. Contributions can be as simple as | ||||
| minor tweaks to this documentation. To contribute, fork the project on | ||||
| **Yes please!**  We are always looking for contributions, additions and improvements. | ||||
|  | ||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ | ||||
| and contributions are always encouraged. Contributions can be as simple as | ||||
| minor tweaks to this documentation, the website or the core. | ||||
|  | ||||
| To contribute, fork the project on | ||||
| `GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a | ||||
| pull request. | ||||
|  | ||||
| Also, you can join the developers' `mailing list | ||||
| <http://groups.google.com/group/mongoengine-dev>`_. | ||||
|  | ||||
| Changes | ||||
| ------- | ||||
|  | ||||
| See the :doc:`changelog` for a full list of changes to MongoEngine and | ||||
| :doc:`upgrade` for upgrade information. | ||||
|  | ||||
| .. toctree:: | ||||
|    :hidden: | ||||
| .. note::  Always read and test the `upgrade <upgrade>`_ documentation before | ||||
|     putting updates live in production **;)** | ||||
|  | ||||
|    tutorial | ||||
|    guide/index | ||||
|    apireference | ||||
|    django | ||||
|    changelog | ||||
|    upgrade | ||||
| Offline Reading | ||||
| --------------- | ||||
|  | ||||
| Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_ | ||||
| or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_ | ||||
| formats for offline reading. | ||||
|  | ||||
|  | ||||
| .. toctree:: | ||||
|     :maxdepth: 1 | ||||
|     :numbered: | ||||
|     :hidden: | ||||
|  | ||||
|     tutorial | ||||
|     guide/index | ||||
|     apireference | ||||
|     changelog | ||||
|     upgrade | ||||
|     django | ||||
|  | ||||
| Indices and tables | ||||
| ------------------ | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| ======== | ||||
| Tutorial | ||||
| ======== | ||||
|  | ||||
| This tutorial introduces **MongoEngine** by means of example --- we will walk | ||||
| through how to create a simple **Tumblelog** application. A Tumblelog is a type | ||||
| of blog where posts are not constrained to being conventional text-based posts. | ||||
| @@ -12,23 +13,29 @@ interface. | ||||
|  | ||||
| Getting started | ||||
| =============== | ||||
|  | ||||
| Before we start, make sure that a copy of MongoDB is running in an accessible | ||||
| location --- running it locally will be easier, but if that is not an option | ||||
| then it may be run on a remote server. | ||||
| then it may be run on a remote server. If you haven't installed mongoengine, | ||||
| simply use pip to install it like so:: | ||||
|  | ||||
|     $ pip install mongoengine | ||||
|  | ||||
| Before we can start using MongoEngine, we need to tell it how to connect to our | ||||
| instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | ||||
| function. The only argument we need to provide is the name of the MongoDB | ||||
| database to use:: | ||||
| function. If running locally the only argument we need to provide is the name | ||||
| of the MongoDB database to use:: | ||||
|  | ||||
|     from mongoengine import * | ||||
|  | ||||
|     connect('tumblelog') | ||||
|  | ||||
| For more information about connecting to MongoDB see :ref:`guide-connecting`. | ||||
| There are lots of options for connecting to MongoDB, for more information about | ||||
| them see the :ref:`guide-connecting` guide. | ||||
|  | ||||
| Defining our documents | ||||
| ====================== | ||||
|  | ||||
| MongoDB is *schemaless*, which means that no schema is enforced by the database | ||||
| --- we may add and remove fields however we want and MongoDB won't complain. | ||||
| This makes life a lot easier in many regards, especially when there is a change | ||||
| @@ -39,17 +46,19 @@ define utility methods on our documents in the same way that traditional | ||||
|  | ||||
| In our Tumblelog application we need to store several different types of | ||||
| information.  We will need to have a collection of **users**, so that we may | ||||
| link posts to an individual. We also need to store our different types | ||||
| **posts** (text, image and link) in the database. To aid navigation of our | ||||
| link posts to an individual. We also need to store our different types of | ||||
| **posts** (eg: text, image and link) in the database. To aid navigation of our | ||||
| Tumblelog, posts may have **tags** associated with them, so that the list of | ||||
| posts shown to the user may be limited to posts that have been assigned a | ||||
| specified tag.  Finally, it would be nice if **comments** could be added to | ||||
| posts. We'll start with **users**, as the others are slightly more involved. | ||||
| specific tag.  Finally, it would be nice if **comments** could be added to | ||||
| posts. We'll start with **users**, as the other document models are slightly | ||||
| more involved. | ||||
|  | ||||
| Users | ||||
| ----- | ||||
|  | ||||
| Just as if we were using a relational database with an ORM, we need to define | ||||
| which fields a :class:`User` may have, and what their types will be:: | ||||
| which fields a :class:`User` may have, and what types of data they might store:: | ||||
|  | ||||
|     class User(Document): | ||||
|         email = StringField(required=True) | ||||
| @@ -58,11 +67,13 @@ which fields a :class:`User` may have, and what their types will be:: | ||||
|  | ||||
| This looks similar to how a the structure of a table would be defined in a | ||||
| regular ORM. The key difference is that this schema will never be passed on to | ||||
| MongoDB --- this will only be enforced at the application level. Also, the User | ||||
| documents will be stored in a MongoDB *collection* rather than a table. | ||||
| MongoDB --- this will only be enforced at the application level, making future | ||||
| changes easy to manage. Also, the User documents will be stored in a | ||||
| MongoDB *collection* rather than a table. | ||||
|  | ||||
| Posts, Comments and Tags | ||||
| ------------------------ | ||||
|  | ||||
| Now we'll think about how to store the rest of the information. If we were | ||||
| using a relational database, we would most likely have a table of **posts**, a | ||||
| table of **comments** and a table of **tags**.  To associate the comments with | ||||
| @@ -75,21 +86,25 @@ of them stand out as particularly intuitive solutions. | ||||
|  | ||||
| Posts | ||||
| ^^^^^ | ||||
| But MongoDB *isn't* a relational database, so we're not going to do it that | ||||
|  | ||||
| Happily mongoDB *isn't* a relational database, so we're not going to do it that | ||||
| way. As it turns out, we can use MongoDB's schemaless nature to provide us with | ||||
| a much nicer solution. We will store all of the posts in *one collection* --- | ||||
| each post type will just have the fields it needs. If we later want to add | ||||
| a much nicer solution. We will store all of the posts in *one collection* and | ||||
| each post type will only store the fields it needs. If we later want to add | ||||
| video posts, we don't have to modify the collection at all, we just *start | ||||
| using* the new fields we need to support video posts. This fits with the | ||||
| Object-Oriented principle of *inheritance* nicely. We can think of | ||||
| :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and | ||||
| :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports | ||||
| this kind of modelling out of the box:: | ||||
| this kind of modelling out of the box --- all you need do is turn on inheritance | ||||
| by setting :attr:`allow_inheritance` to True in the :attr:`meta`:: | ||||
|  | ||||
|     class Post(Document): | ||||
|         title = StringField(max_length=120, required=True) | ||||
|         author = ReferenceField(User) | ||||
|  | ||||
|         meta = {'allow_inheritance': True} | ||||
|  | ||||
|     class TextPost(Post): | ||||
|         content = StringField() | ||||
|  | ||||
| @@ -100,12 +115,13 @@ this kind of modelling out of the box:: | ||||
|         link_url = StringField() | ||||
|  | ||||
| We are storing a reference to the author of the posts using a | ||||
| :class:`~mongoengine.ReferenceField` object. These are similar to foreign key | ||||
| :class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key | ||||
| fields in traditional ORMs, and are automatically translated into references | ||||
| when they are saved, and dereferenced when they are loaded. | ||||
|  | ||||
| Tags | ||||
| ^^^^ | ||||
|  | ||||
| Now that we have our Post models figured out, how will we attach tags to them? | ||||
| MongoDB allows us to store lists of items natively, so rather than having a | ||||
| link table, we can just store a list of tags in each post. So, for both | ||||
| @@ -121,13 +137,16 @@ size of our database. So let's take a look that the code our modified | ||||
|         author = ReferenceField(User) | ||||
|         tags = ListField(StringField(max_length=30)) | ||||
|  | ||||
| The :class:`~mongoengine.ListField` object that is used to define a Post's tags | ||||
| The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags | ||||
| takes a field object as its first argument --- this means that you can have | ||||
| lists of any type of field (including lists). Note that we don't need to | ||||
| modify the specialised post types as they all inherit from :class:`Post`. | ||||
| lists of any type of field (including lists). | ||||
|  | ||||
| .. note:: We don't need to modify the specialised post types as they all | ||||
|     inherit from :class:`Post`. | ||||
|  | ||||
| Comments | ||||
| ^^^^^^^^ | ||||
|  | ||||
| A comment is typically associated with *one* post. In a relational database, to | ||||
| display a post with its comments, we would have to retrieve the post from the | ||||
| database, then query the database again for the comments associated with the | ||||
| @@ -155,7 +174,7 @@ We can then store a list of comment documents in our post document:: | ||||
| Handling deletions of references | ||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
| The :class:`~mongoengine.ReferenceField` object takes a keyword | ||||
| The :class:`~mongoengine.fields.ReferenceField` object takes a keyword | ||||
| `reverse_delete_rule` for handling deletion rules if the reference is deleted. | ||||
| To delete all the posts if a user is deleted set the rule:: | ||||
|  | ||||
| @@ -165,9 +184,9 @@ To delete all the posts if a user is deleted set the rule:: | ||||
|         tags = ListField(StringField(max_length=30)) | ||||
|         comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
| See :class:`~mongoengine.ReferenceField` for more information. | ||||
| See :class:`~mongoengine.fields.ReferenceField` for more information. | ||||
|  | ||||
| ..note:: | ||||
| .. note:: | ||||
|     MapFields and DictFields currently don't support automatic handling of | ||||
|     deleted references | ||||
|  | ||||
| @@ -178,15 +197,15 @@ Now that we've defined how our documents will be structured, let's start adding | ||||
| some documents to the database. Firstly, we'll need to create a :class:`User` | ||||
| object:: | ||||
|  | ||||
|     john = User(email='jdoe@example.com', first_name='John', last_name='Doe') | ||||
|     john.save() | ||||
|     ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save() | ||||
|  | ||||
| Note that we could have also defined our user using attribute syntax:: | ||||
| .. note:: | ||||
|     We could have also defined our user using attribute syntax:: | ||||
|  | ||||
|     john = User(email='jdoe@example.com') | ||||
|     john.first_name = 'John' | ||||
|     john.last_name = 'Doe' | ||||
|     john.save() | ||||
|         ross = User(email='ross@example.com') | ||||
|         ross.first_name = 'Ross' | ||||
|         ross.last_name = 'Lawley' | ||||
|         ross.save() | ||||
|  | ||||
| Now that we've got our user in the database, let's add a couple of posts:: | ||||
|  | ||||
| @@ -195,16 +214,17 @@ Now that we've got our user in the database, let's add a couple of posts:: | ||||
|     post1.tags = ['mongodb', 'mongoengine'] | ||||
|     post1.save() | ||||
|  | ||||
|     post2 = LinkPost(title='MongoEngine Documentation', author=john) | ||||
|     post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | ||||
|     post2 = LinkPost(title='MongoEngine Documentation', author=ross) | ||||
|     post2.link_url = 'http://docs.mongoengine.com/' | ||||
|     post2.tags = ['mongoengine'] | ||||
|     post2.save() | ||||
|  | ||||
| Note that if you change a field on a object that has already been saved, then | ||||
| call :meth:`save` again, the document will be updated. | ||||
| .. note:: If you change a field on a object that has already been saved, then | ||||
|     call :meth:`save` again, the document will be updated. | ||||
|  | ||||
| Accessing our data | ||||
| ================== | ||||
|  | ||||
| So now we've got a couple of posts in our database, how do we display them? | ||||
| Each document class (i.e. any class that inherits either directly or indirectly | ||||
| from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is | ||||
| @@ -216,6 +236,7 @@ class. So let's see how we can get our posts' titles:: | ||||
|  | ||||
| Retrieving type-specific information | ||||
| ------------------------------------ | ||||
|  | ||||
| This will print the titles of our posts, one on each line. But What if we want | ||||
| to access the type-specific data (link_url, content, etc.)? One way is simply | ||||
| to use the :attr:`objects` attribute of a subclass of :class:`Post`:: | ||||
| @@ -254,6 +275,7 @@ text post, and "Link: <url>" if it was a link post. | ||||
|  | ||||
| Searching our posts by tag | ||||
| -------------------------- | ||||
|  | ||||
| The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a | ||||
| :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the | ||||
| database only when you need the data. It may also be filtered to narrow down | ||||
| @@ -272,3 +294,9 @@ used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||
|     num_posts = Post.objects(tags='mongodb').count() | ||||
|     print 'Found %d posts with tag "mongodb"' % num_posts | ||||
|  | ||||
| Learning more about mongoengine | ||||
| ------------------------------- | ||||
|  | ||||
| If you got this far you've made a great start, so well done!  The next step on | ||||
| your mongoengine journey is the `full user guide <guide/index.html>`_, where you | ||||
| can learn indepth about how to use mongoengine and mongodb. | ||||
|   | ||||
							
								
								
									
										389
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							
							
						
						
									
										389
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							| @@ -1,12 +1,361 @@ | ||||
| ========= | ||||
| ######### | ||||
| Upgrading | ||||
| ========= | ||||
| ######### | ||||
|  | ||||
| 0.6 to 0.7 | ||||
| 0.8.7 | ||||
| ***** | ||||
|  | ||||
| Calling reload on deleted / nonexistant documents now raises a DoesNotExist | ||||
| exception. | ||||
|  | ||||
|  | ||||
| 0.8.2 to 0.8.3 | ||||
| ************** | ||||
|  | ||||
| Minor change that may impact users: | ||||
|  | ||||
| DynamicDocument fields are now stored in creation order after any declared | ||||
| fields.  Previously they were stored alphabetically. | ||||
|  | ||||
|  | ||||
| 0.7 to 0.8 | ||||
| ********** | ||||
|  | ||||
| There have been numerous backwards breaking changes in 0.8.  The reasons for | ||||
| these are to ensure that MongoEngine has sane defaults going forward and that it | ||||
| performs the best it can out of the box.  Where possible there have been | ||||
| FutureWarnings to help get you ready for the change, but that hasn't been | ||||
| possible for the whole of the release. | ||||
|  | ||||
| .. warning:: Breaking changes - test upgrading on a test system before putting | ||||
|     live. There maybe multiple manual steps in migrating and these are best honed | ||||
|     on a staging / test system. | ||||
|  | ||||
| Python and PyMongo | ||||
| ================== | ||||
|  | ||||
| MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above) | ||||
|  | ||||
| Data Model | ||||
| ========== | ||||
|  | ||||
| Inheritance | ||||
| ----------- | ||||
|  | ||||
| The inheritance model has changed, we no longer need to store an array of | ||||
| :attr:`types` with the model we can just use the classname in :attr:`_cls`. | ||||
| This means that you will have to update your indexes for each of your | ||||
| inherited classes like so: :: | ||||
|  | ||||
|     # 1. Declaration of the class | ||||
|     class Animal(Document): | ||||
|         name = StringField() | ||||
|         meta = { | ||||
|             'allow_inheritance': True, | ||||
|             'indexes': ['name'] | ||||
|         } | ||||
|  | ||||
|     # 2. Remove _types | ||||
|     collection = Animal._get_collection() | ||||
|     collection.update({}, {"$unset": {"_types": 1}}, multi=True) | ||||
|  | ||||
|     # 3. Confirm extra data is removed | ||||
|     count = collection.find({'_types': {"$exists": True}}).count() | ||||
|     assert count == 0 | ||||
|  | ||||
|     # 4. Remove indexes | ||||
|     info = collection.index_information() | ||||
|     indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                        if '_types' in dict(value['key'])] | ||||
|     for index in indexes_to_drop: | ||||
|         collection.drop_index(index) | ||||
|  | ||||
|     # 5. Recreate indexes | ||||
|     Animal.ensure_indexes() | ||||
|  | ||||
|  | ||||
| Document Definition | ||||
| ------------------- | ||||
|  | ||||
| The default for inheritance has changed - it is now off by default and | ||||
| :attr:`_cls` will not be stored automatically with the class.  So if you extend | ||||
| your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` | ||||
| you will need to declare :attr:`allow_inheritance` in the meta data like so: :: | ||||
|  | ||||
|     class Animal(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|         meta = {'allow_inheritance': True} | ||||
|  | ||||
| Previously, if you had data in the database that wasn't defined in the Document | ||||
| definition, it would set it as an attribute on the document.  This is no longer | ||||
| the case and the data is set only in the ``document._data`` dictionary: :: | ||||
|  | ||||
|     >>> from mongoengine import * | ||||
|     >>> class Animal(Document): | ||||
|     ...    name = StringField() | ||||
|     ... | ||||
|     >>> cat = Animal(name="kit", size="small") | ||||
|  | ||||
|     # 0.7 | ||||
|     >>> cat.size | ||||
|     u'small' | ||||
|  | ||||
|     # 0.8 | ||||
|     >>> cat.size | ||||
|     Traceback (most recent call last): | ||||
|       File "<stdin>", line 1, in <module> | ||||
|     AttributeError: 'Animal' object has no attribute 'size' | ||||
|  | ||||
| The Document class has introduced a reserved function `clean()`, which will be | ||||
| called before saving the document. If your document class happens to have a method | ||||
| with the same name, please try to rename it. | ||||
|  | ||||
|     def clean(self): | ||||
|         pass | ||||
|  | ||||
| ReferenceField | ||||
| -------------- | ||||
|  | ||||
| ReferenceFields now store ObjectIds by default - this is more efficient than | ||||
| DBRefs as we already know what Document types they reference:: | ||||
|  | ||||
|     # Old code | ||||
|     class Animal(Document): | ||||
|         name = ReferenceField('self') | ||||
|  | ||||
|     # New code to keep dbrefs | ||||
|     class Animal(Document): | ||||
|         name = ReferenceField('self', dbref=True) | ||||
|  | ||||
| To migrate all the references you need to touch each object and mark it as dirty | ||||
| eg:: | ||||
|  | ||||
|     # Doc definition | ||||
|     class Person(Document): | ||||
|         name = StringField() | ||||
|         parent = ReferenceField('self') | ||||
|         friends = ListField(ReferenceField('self')) | ||||
|  | ||||
|     # Mark all ReferenceFields as dirty and save | ||||
|     for p in Person.objects: | ||||
|         p._mark_as_changed('parent') | ||||
|         p._mark_as_changed('friends') | ||||
|         p.save() | ||||
|  | ||||
| `An example test migration for ReferenceFields is available on github | ||||
| <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_. | ||||
|  | ||||
| .. Note:: Internally mongoengine handles ReferenceFields the same, so they are | ||||
|    converted to DBRef on loading and ObjectIds or DBRefs depending on settings | ||||
|    on storage. | ||||
|  | ||||
| UUIDField | ||||
| --------- | ||||
|  | ||||
| UUIDFields now default to storing binary values:: | ||||
|  | ||||
|     # Old code | ||||
|     class Animal(Document): | ||||
|         uuid = UUIDField() | ||||
|  | ||||
|     # New code | ||||
|     class Animal(Document): | ||||
|         uuid = UUIDField(binary=False) | ||||
|  | ||||
| To migrate all the uuids you need to touch each object and mark it as dirty | ||||
| eg:: | ||||
|  | ||||
|     # Doc definition | ||||
|     class Animal(Document): | ||||
|         uuid = UUIDField() | ||||
|  | ||||
|     # Mark all UUIDFields as dirty and save | ||||
|     for a in Animal.objects: | ||||
|         a._mark_as_changed('uuid') | ||||
|         a.save() | ||||
|  | ||||
| `An example test migration for UUIDFields is available on github | ||||
| <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_. | ||||
|  | ||||
| DecimalField | ||||
| ------------ | ||||
|  | ||||
| DecimalFields now store floats - previously it was storing strings and that | ||||
| made it impossible to do comparisons when querying correctly.:: | ||||
|  | ||||
|     # Old code | ||||
|     class Person(Document): | ||||
|         balance = DecimalField() | ||||
|  | ||||
|     # New code | ||||
|     class Person(Document): | ||||
|         balance = DecimalField(force_string=True) | ||||
|  | ||||
| To migrate all the DecimalFields you need to touch each object and mark it as dirty | ||||
| eg:: | ||||
|  | ||||
|     # Doc definition | ||||
|     class Person(Document): | ||||
|         balance = DecimalField() | ||||
|  | ||||
|     # Mark all DecimalField's as dirty and save | ||||
|     for p in Person.objects: | ||||
|         p._mark_as_changed('balance') | ||||
|         p.save() | ||||
|  | ||||
| .. note:: DecimalFields have also been improved with the addition of precision | ||||
|     and rounding.  See :class:`~mongoengine.fields.DecimalField` for more information. | ||||
|  | ||||
| `An example test migration for DecimalFields is available on github | ||||
| <https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_. | ||||
|  | ||||
| Cascading Saves | ||||
| --------------- | ||||
| To improve performance document saves will no longer automatically cascade. | ||||
| Any changes to a Document's references will either have to be saved manually or | ||||
| you will have to explicitly tell it to cascade on save:: | ||||
|  | ||||
|     # At the class level: | ||||
|     class Person(Document): | ||||
|         meta = {'cascade': True} | ||||
|  | ||||
|     # Or on save: | ||||
|     my_document.save(cascade=True) | ||||
|  | ||||
| Storage | ||||
| ------- | ||||
|  | ||||
| Document and Embedded Documents are now serialized based on declared field order. | ||||
| Previously, the data was passed to mongodb as a dictionary and which meant that | ||||
| order wasn't guaranteed - so things like ``$addToSet`` operations on | ||||
| :class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected | ||||
| ways. | ||||
|  | ||||
| If this impacts you, you may want to rewrite the objects using the | ||||
| ``doc.mark_as_dirty('field')`` pattern described above.  If you are using a | ||||
| compound primary key then you will need to ensure the order is fixed and match | ||||
| your EmbeddedDocument to that order. | ||||
|  | ||||
| Querysets | ||||
| ========= | ||||
|  | ||||
| Attack of the clones | ||||
| -------------------- | ||||
|  | ||||
| Querysets now return clones and should no longer be considered editable in | ||||
| place.  This brings us in line with how Django's querysets work and removes a | ||||
| long running gotcha.  If you edit your querysets inplace you will have to | ||||
| update your code like so: :: | ||||
|  | ||||
|     # Old code: | ||||
|     mammals = Animal.objects(type="mammal") | ||||
|     mammals.filter(order="Carnivora")       # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8 | ||||
|     [m for m in mammals]                    # This will return all mammals in 0.8 as the 2nd filter returned a new queryset | ||||
|  | ||||
|     # Update example a) assign queryset after a change: | ||||
|     mammals = Animal.objects(type="mammal") | ||||
|     carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied | ||||
|     [m for m in carnivores]                        # This will return all carnivores | ||||
|  | ||||
|     # Update example b) chain the queryset: | ||||
|     mammals = Animal.objects(type="mammal").filter(order="Carnivora")  # The final queryset is assgined to mammals | ||||
|     [m for m in mammals]                                               # This will return all carnivores | ||||
|  | ||||
| Len iterates the queryset | ||||
| -------------------------- | ||||
|  | ||||
| If you ever did `len(queryset)` it previously did a `count()` under the covers, | ||||
| this caused some unusual issues.  As `len(queryset)` is most often used by | ||||
| `list(queryset)` we now cache the queryset results and use that for the length. | ||||
|  | ||||
| This isn't as performant as a `count()` and if you aren't iterating the | ||||
| queryset you should upgrade to use count:: | ||||
|  | ||||
|     # Old code | ||||
|     len(Animal.objects(type="mammal")) | ||||
|  | ||||
|     # New code | ||||
|     Animal.objects(type="mammal").count() | ||||
|  | ||||
|  | ||||
| .only() now inline with .exclude() | ||||
| ---------------------------------- | ||||
|  | ||||
| The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion | ||||
| to `.exclude()`.  Chaining `.only()` calls will increase the fields required:: | ||||
|  | ||||
|     # Old code | ||||
|     Animal.objects().only(['type', 'name']).only('name', 'order')  # Would have returned just `name` | ||||
|  | ||||
|     # New code | ||||
|     Animal.objects().only('name') | ||||
|  | ||||
|     # Note: | ||||
|     Animal.objects().only(['name']).only('order')  # Now returns `name` *and* `order` | ||||
|  | ||||
|  | ||||
| Client | ||||
| ====== | ||||
| PyMongo 2.4 came with a new connection client; MongoClient_ and started the | ||||
| depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine | ||||
| now uses the latest `MongoClient` for connections.  By default operations were | ||||
| `safe` but if you turned them off or used the connection directly this will | ||||
| impact your queries. | ||||
|  | ||||
| Querysets | ||||
| --------- | ||||
|  | ||||
| Safe | ||||
| ^^^^ | ||||
|  | ||||
| `safe` has been depreciated in the new MongoClient connection.  Please use | ||||
| `write_concern` instead.  As `safe` always defaulted as `True` normally no code | ||||
| change is required. To disable confirmation of the write just pass `{"w": 0}` | ||||
| eg: :: | ||||
|  | ||||
|    # Old | ||||
|    Animal(name="Dinasour").save(safe=False) | ||||
|  | ||||
|    # new code: | ||||
|    Animal(name="Dinasour").save(write_concern={"w": 0}) | ||||
|  | ||||
| Write Concern | ||||
| ^^^^^^^^^^^^^ | ||||
|  | ||||
| `write_options` has been replaced with `write_concern` to bring it inline with | ||||
| pymongo. To upgrade simply rename any instances where you used the `write_option` | ||||
| keyword  to `write_concern` like so:: | ||||
|  | ||||
|    # Old code: | ||||
|    Animal(name="Dinasour").save(write_options={"w": 2}) | ||||
|  | ||||
|    # new code: | ||||
|    Animal(name="Dinasour").save(write_concern={"w": 2}) | ||||
|  | ||||
|  | ||||
| Indexes | ||||
| ======= | ||||
|  | ||||
| Index methods are no longer tied to querysets but rather to the document class. | ||||
| Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist. | ||||
| They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / | ||||
| :func:`~mongoengine.Document.ensure_index`. | ||||
|  | ||||
| SequenceFields | ||||
| ============== | ||||
|  | ||||
| :class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to | ||||
| allow flexible storage of the calculated value.  As such MIN and MAX settings | ||||
| are no longer handled. | ||||
|  | ||||
| .. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient | ||||
|  | ||||
| 0.6 to 0.7 | ||||
| ********** | ||||
|  | ||||
| Cascade saves | ||||
| ------------- | ||||
| ============= | ||||
|  | ||||
| Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set | ||||
| to True.  This is because in 0.8 it will default to False.  If you require | ||||
| @@ -20,11 +369,11 @@ via `save` eg :: | ||||
|     # Or in code: | ||||
|     my_document.save(cascade=True) | ||||
|  | ||||
| .. note :: | ||||
| .. note:: | ||||
|     Remember: cascading saves **do not** cascade through lists. | ||||
|  | ||||
| ReferenceFields | ||||
| --------------- | ||||
| =============== | ||||
|  | ||||
| ReferenceFields now can store references as ObjectId strings instead of DBRefs. | ||||
| This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` | ||||
| @@ -53,7 +402,7 @@ migrate :: | ||||
|  | ||||
|  | ||||
| item_frequencies | ||||
| ---------------- | ||||
| ================ | ||||
|  | ||||
| In the 0.6 series we added support for null / zero / false values in | ||||
| item_frequencies.  A side effect was to return keys in the value they are | ||||
| @@ -62,14 +411,14 @@ updated to handle native types rather than strings keys for the results of | ||||
| item frequency queries. | ||||
|  | ||||
| BinaryFields | ||||
| ------------ | ||||
| ============ | ||||
|  | ||||
| Binary fields have been updated so that they are native binary types.  If you | ||||
| previously were doing `str` comparisons with binary field values you will have | ||||
| to update and wrap the value in a `str`. | ||||
|  | ||||
| 0.5 to 0.6 | ||||
| ========== | ||||
| ********** | ||||
|  | ||||
| Embedded Documents - if you had a `pk` field you will have to rename it from | ||||
| `_id` to `pk` as pk is no longer a property of Embedded Documents. | ||||
| @@ -84,26 +433,26 @@ Document.objects.with_id - now raises an InvalidQueryError if used with a | ||||
| filter. | ||||
|  | ||||
| FutureWarning - A future warning has been added to all inherited classes that | ||||
| don't define `allow_inheritance` in their meta. | ||||
| don't define :attr:`allow_inheritance` in their meta. | ||||
|  | ||||
| You may need to update pyMongo to 2.0 for use with Sharding. | ||||
|  | ||||
| 0.4 to 0.5 | ||||
| =========== | ||||
| ********** | ||||
|  | ||||
| There have been the following backwards incompatibilities from 0.4 to 0.5.  The | ||||
| main areas of changed are: choices in fields, map_reduce and collection names. | ||||
|  | ||||
| Choice options: | ||||
| --------------- | ||||
| =============== | ||||
|  | ||||
| Are now expected to be an iterable of tuples, with  the first element in each | ||||
| Are now expected to be an iterable of tuples, with the first element in each | ||||
| tuple being the actual value to be stored. The second element is the | ||||
| human-readable name for the option. | ||||
|  | ||||
|  | ||||
| PyMongo / MongoDB | ||||
| ----------------- | ||||
| ================= | ||||
|  | ||||
| map reduce now requires pymongo 1.11+- The pymongo `merge_output` and | ||||
| `reduce_output` parameters, have been depreciated. | ||||
| @@ -117,10 +466,10 @@ such the following have been changed: | ||||
|  | ||||
|  | ||||
| Default collection naming | ||||
| ------------------------- | ||||
| ========================= | ||||
|  | ||||
| Previously it was just lowercase, its now much more pythonic and readable as | ||||
| its lowercase and underscores, previously :: | ||||
| Previously it was just lowercase, it's now much more pythonic and readable as | ||||
| it's lowercase and underscores, previously :: | ||||
|  | ||||
|     class MyAceDocument(Document): | ||||
|         pass | ||||
| @@ -183,3 +532,9 @@ Alternatively, you can rename your collections eg :: | ||||
|         else: | ||||
|             print "Upgraded collection names" | ||||
|  | ||||
|  | ||||
| mongodb 1.8 > 2.0 + | ||||
| =================== | ||||
|  | ||||
| It's been reported that indexes may need to be recreated to the newer version of indexes. | ||||
| To do this drop indexes and call ``ensure_indexes`` on each model. | ||||
|   | ||||
| @@ -8,11 +8,14 @@ import queryset | ||||
| from queryset import * | ||||
| import signals | ||||
| from signals import * | ||||
| from errors import * | ||||
| import errors | ||||
| import django | ||||
|  | ||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||
|            queryset.__all__ + signals.__all__) | ||||
| __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + | ||||
|            list(queryset.__all__) + signals.__all__ + list(errors.__all__)) | ||||
|  | ||||
| VERSION = (0, 7, 9) | ||||
| VERSION = (0, 8, 7) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|   | ||||
							
								
								
									
										1524
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
							
						
						
									
										1524
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										8
									
								
								mongoengine/base/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								mongoengine/base/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| from mongoengine.base.common import * | ||||
| from mongoengine.base.datastructures import * | ||||
| from mongoengine.base.document import * | ||||
| from mongoengine.base.fields import * | ||||
| from mongoengine.base.metaclasses import * | ||||
|  | ||||
| # Help with backwards compatibility | ||||
| from mongoengine.errors import * | ||||
							
								
								
									
										26
									
								
								mongoengine/base/common.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								mongoengine/base/common.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| from mongoengine.errors import NotRegistered | ||||
|  | ||||
| __all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') | ||||
|  | ||||
| ALLOW_INHERITANCE = False | ||||
|  | ||||
| _document_registry = {} | ||||
|  | ||||
|  | ||||
| def get_document(name): | ||||
|     doc = _document_registry.get(name, None) | ||||
|     if not doc: | ||||
|         # Possible old style name | ||||
|         single_end = name.split('.')[-1] | ||||
|         compound_end = '.%s' % single_end | ||||
|         possible_match = [k for k in _document_registry.keys() | ||||
|                           if k.endswith(compound_end) or k == single_end] | ||||
|         if len(possible_match) == 1: | ||||
|             doc = _document_registry.get(possible_match.pop(), None) | ||||
|     if not doc: | ||||
|         raise NotRegistered(""" | ||||
|             `%s` has not been registered in the document registry. | ||||
|             Importing the document class automatically registers it, has it | ||||
|             been imported? | ||||
|         """.strip() % name) | ||||
|     return doc | ||||
							
								
								
									
										158
									
								
								mongoengine/base/datastructures.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										158
									
								
								mongoengine/base/datastructures.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,158 @@ | ||||
| import weakref | ||||
| from mongoengine.common import _import_class | ||||
|  | ||||
| __all__ = ("BaseDict", "BaseList") | ||||
|  | ||||
|  | ||||
| class BaseDict(dict): | ||||
|     """A special dict so we can watch any changes | ||||
|     """ | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
|     _name = None | ||||
|  | ||||
|     def __init__(self, dict_items, instance, name): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|  | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         return super(BaseDict, self).__init__(dict_items) | ||||
|  | ||||
|     def __getitem__(self, *args, **kwargs): | ||||
|         value = super(BaseDict, self).__getitem__(*args, **kwargs) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__setitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delete__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
|         self._dereferenced = False | ||||
|         return self | ||||
|  | ||||
|     def __setstate__(self, state): | ||||
|         self = state | ||||
|         return self | ||||
|  | ||||
|     def clear(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).clear(*args, **kwargs) | ||||
|  | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).pop(*args, **kwargs) | ||||
|  | ||||
|     def popitem(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).popitem(*args, **kwargs) | ||||
|  | ||||
|     def update(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).update(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             self._instance._mark_as_changed(self._name) | ||||
|  | ||||
|  | ||||
| class BaseList(list): | ||||
|     """A special list so we can watch any changes | ||||
|     """ | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
|     _name = None | ||||
|  | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|  | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         return super(BaseList, self).__init__(list_items) | ||||
|  | ||||
|     def __getitem__(self, *args, **kwargs): | ||||
|         value = super(BaseList, self).__getitem__(*args, **kwargs) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__setitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__delitem__(*args, **kwargs) | ||||
|  | ||||
|     def __setslice__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__setslice__(*args, **kwargs) | ||||
|  | ||||
|     def __delslice__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__delslice__(*args, **kwargs) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
|         self._dereferenced = False | ||||
|         return self | ||||
|  | ||||
|     def __setstate__(self, state): | ||||
|         self = state | ||||
|         return self | ||||
|  | ||||
|     def append(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).append(*args, **kwargs) | ||||
|  | ||||
|     def extend(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).extend(*args, **kwargs) | ||||
|  | ||||
|     def insert(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).insert(*args, **kwargs) | ||||
|  | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).pop(*args, **kwargs) | ||||
|  | ||||
|     def remove(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).remove(*args, **kwargs) | ||||
|  | ||||
|     def reverse(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).reverse(*args, **kwargs) | ||||
|  | ||||
|     def sort(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).sort(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             self._instance._mark_as_changed(self._name) | ||||
							
								
								
									
										837
									
								
								mongoengine/base/document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										837
									
								
								mongoengine/base/document.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,837 @@ | ||||
| import copy | ||||
| import operator | ||||
| import numbers | ||||
| from collections import Hashable | ||||
| from functools import partial | ||||
|  | ||||
| import pymongo | ||||
| from bson import json_util, ObjectId | ||||
| from bson.dbref import DBRef | ||||
| from bson.son import SON | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import (ValidationError, InvalidDocumentError, | ||||
|                                 LookUpError) | ||||
| from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type, | ||||
|                                         to_str_keys_recursive) | ||||
|  | ||||
| from mongoengine.base.common import get_document, ALLOW_INHERITANCE | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList | ||||
| from mongoengine.base.fields import ComplexBaseField | ||||
|  | ||||
| __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') | ||||
|  | ||||
| NON_FIELD_ERRORS = '__all__' | ||||
|  | ||||
|  | ||||
| class BaseDocument(object): | ||||
|  | ||||
|     _dynamic = False | ||||
|     _created = True | ||||
|     _dynamic_lock = True | ||||
|     _initialised = False | ||||
|  | ||||
|     def __init__(self, *args, **values): | ||||
|         """ | ||||
|         Initialise a document or embedded document | ||||
|  | ||||
|         :param __auto_convert: Try and will cast python objects to Object types | ||||
|         :param values: A dictionary of values for the document | ||||
|         """ | ||||
|         if args: | ||||
|             # Combine positional arguments with named arguments. | ||||
|             # We only want named arguments. | ||||
|             field = iter(self._fields_ordered) | ||||
|             # If its an automatic id field then skip to the first defined field | ||||
|             if self._auto_id_field: | ||||
|                 next(field) | ||||
|             for value in args: | ||||
|                 name = next(field) | ||||
|                 if name in values: | ||||
|                     raise TypeError("Multiple values for keyword argument '" + name + "'") | ||||
|                 values[name] = value | ||||
|         __auto_convert = values.pop("__auto_convert", True) | ||||
|         signals.pre_init.send(self.__class__, document=self, values=values) | ||||
|  | ||||
|         self._data = {} | ||||
|         self._dynamic_fields = SON() | ||||
|  | ||||
|         # Assign default values to instance | ||||
|         for key, field in self._fields.iteritems(): | ||||
|             if self._db_field_map.get(key, key) in values: | ||||
|                 continue | ||||
|             value = getattr(self, key, None) | ||||
|             setattr(self, key, value) | ||||
|  | ||||
|         # Set passed values after initialisation | ||||
|         if self._dynamic: | ||||
|             dynamic_data = {} | ||||
|             for key, value in values.iteritems(): | ||||
|                 if key in self._fields or key == '_id': | ||||
|                     setattr(self, key, value) | ||||
|                 elif self._dynamic: | ||||
|                     dynamic_data[key] = value | ||||
|         else: | ||||
|             FileField = _import_class('FileField') | ||||
|             for key, value in values.iteritems(): | ||||
|                 if key == '__auto_convert': | ||||
|                     continue | ||||
|                 key = self._reverse_db_field_map.get(key, key) | ||||
|                 if key in self._fields or key in ('id', 'pk', '_cls'): | ||||
|                     if __auto_convert and value is not None: | ||||
|                         field = self._fields.get(key) | ||||
|                         if field and not isinstance(field, FileField): | ||||
|                             value = field.to_python(value) | ||||
|                     setattr(self, key, value) | ||||
|                 else: | ||||
|                     self._data[key] = value | ||||
|  | ||||
|         # Set any get_fieldname_display methods | ||||
|         self.__set_field_display() | ||||
|  | ||||
|         if self._dynamic: | ||||
|             self._dynamic_lock = False | ||||
|             for key, value in dynamic_data.iteritems(): | ||||
|                 setattr(self, key, value) | ||||
|  | ||||
|         # Flag initialised | ||||
|         self._initialised = True | ||||
|         signals.post_init.send(self.__class__, document=self) | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         """Handle deletions of fields""" | ||||
|         field_name = args[0] | ||||
|         if field_name in self._fields: | ||||
|             default = self._fields[field_name].default | ||||
|             if callable(default): | ||||
|                 default = default() | ||||
|             setattr(self, field_name, default) | ||||
|         else: | ||||
|             super(BaseDocument, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __setattr__(self, name, value): | ||||
|         # Handle dynamic data only if an initialised dynamic document | ||||
|         if self._dynamic and not self._dynamic_lock: | ||||
|  | ||||
|             field = None | ||||
|             if not hasattr(self, name) and not name.startswith('_'): | ||||
|                 DynamicField = _import_class("DynamicField") | ||||
|                 field = DynamicField(db_field=name) | ||||
|                 field.name = name | ||||
|                 self._dynamic_fields[name] = field | ||||
|                 self._fields_ordered += (name,) | ||||
|  | ||||
|             if not name.startswith('_'): | ||||
|                 value = self.__expand_dynamic_values(name, value) | ||||
|  | ||||
|             # Handle marking data as changed | ||||
|             if name in self._dynamic_fields: | ||||
|                 self._data[name] = value | ||||
|                 if hasattr(self, '_changed_fields'): | ||||
|                     self._mark_as_changed(name) | ||||
|  | ||||
|         if (self._is_document and not self._created and | ||||
|            name in self._meta.get('shard_key', tuple()) and | ||||
|            self._data.get(name) != value): | ||||
|             OperationError = _import_class('OperationError') | ||||
|             msg = "Shard Keys are immutable. Tried to update %s" % name | ||||
|             raise OperationError(msg) | ||||
|  | ||||
|         # Check if the user has created a new instance of a class | ||||
|         if (self._is_document and self._initialised | ||||
|            and self._created and name == self._meta['id_field']): | ||||
|                 super(BaseDocument, self).__setattr__('_created', False) | ||||
|  | ||||
|         super(BaseDocument, self).__setattr__(name, value) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         data = {} | ||||
|         for k in ('_changed_fields', '_initialised', '_created', | ||||
|                   '_dynamic_fields', '_fields_ordered'): | ||||
|             if hasattr(self, k): | ||||
|                 data[k] = getattr(self, k) | ||||
|         data['_data'] = self.to_mongo() | ||||
|         return data | ||||
|  | ||||
|     def __setstate__(self, data): | ||||
|         if isinstance(data["_data"], SON): | ||||
|             data["_data"] = self.__class__._from_son(data["_data"])._data | ||||
|         for k in ('_changed_fields', '_initialised', '_created', '_data', | ||||
|                   '_fields_ordered', '_dynamic_fields'): | ||||
|             if k in data: | ||||
|                 setattr(self, k, data[k]) | ||||
|         dynamic_fields = data.get('_dynamic_fields') or SON() | ||||
|         for k in dynamic_fields.keys(): | ||||
|             setattr(self, k, data["_data"].get(k)) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         return iter(self._fields_ordered) | ||||
|  | ||||
|     def __getitem__(self, name): | ||||
|         """Dictionary-style field access, return a field's value if present. | ||||
|         """ | ||||
|         try: | ||||
|             if name in self._fields_ordered: | ||||
|                 return getattr(self, name) | ||||
|         except AttributeError: | ||||
|             pass | ||||
|         raise KeyError(name) | ||||
|  | ||||
|     def __setitem__(self, name, value): | ||||
|         """Dictionary-style field access, set a field's value. | ||||
|         """ | ||||
|         # Ensure that the field exists before settings its value | ||||
|         if name not in self._fields: | ||||
|             raise KeyError(name) | ||||
|         return setattr(self, name, value) | ||||
|  | ||||
|     def __contains__(self, name): | ||||
|         try: | ||||
|             val = getattr(self, name) | ||||
|             return val is not None | ||||
|         except AttributeError: | ||||
|             return False | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(self._data) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         try: | ||||
|             u = self.__str__() | ||||
|         except (UnicodeEncodeError, UnicodeDecodeError): | ||||
|             u = '[Bad Unicode data]' | ||||
|         repr_type = type(u) | ||||
|         return repr_type('<%s: %s>' % (self.__class__.__name__, u)) | ||||
|  | ||||
|     def __str__(self): | ||||
|         if hasattr(self, '__unicode__'): | ||||
|             if PY3: | ||||
|                 return self.__unicode__() | ||||
|             else: | ||||
|                 return unicode(self).encode('utf-8') | ||||
|         return txt_type('%s object' % self.__class__.__name__) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, self.__class__) and hasattr(other, 'id'): | ||||
|             if self.id == other.id: | ||||
|                 return True | ||||
|         return False | ||||
|  | ||||
|     def __ne__(self, other): | ||||
|         return not self.__eq__(other) | ||||
|  | ||||
|     def __hash__(self): | ||||
|         if getattr(self, 'pk', None) is None: | ||||
|             # For new object | ||||
|             return super(BaseDocument, self).__hash__() | ||||
|         else: | ||||
|             return hash(self.pk) | ||||
|  | ||||
|     def clean(self): | ||||
|         """ | ||||
|         Hook for doing document level data cleaning before validation is run. | ||||
|  | ||||
|         Any ValidationError raised by this method will not be associated with | ||||
|         a particular field; it will have a special-case association with the | ||||
|         field defined by NON_FIELD_ERRORS. | ||||
|         """ | ||||
|         pass | ||||
|  | ||||
|     def to_mongo(self): | ||||
|         """Return as SON data ready for use with MongoDB. | ||||
|         """ | ||||
|         data = SON() | ||||
|         data["_id"] = None | ||||
|         data['_cls'] = self._class_name | ||||
|  | ||||
|         for field_name in self: | ||||
|             value = self._data.get(field_name, None) | ||||
|             field = self._fields.get(field_name) | ||||
|             if field is None and self._dynamic: | ||||
|                 field = self._dynamic_fields.get(field_name) | ||||
|  | ||||
|             if value is not None: | ||||
|                 value = field.to_mongo(value) | ||||
|  | ||||
|             # Handle self generating fields | ||||
|             if value is None and field._auto_gen: | ||||
|                 value = field.generate() | ||||
|                 self._data[field_name] = value | ||||
|  | ||||
|             if value is not None: | ||||
|                 data[field.db_field] = value | ||||
|  | ||||
|         # If "_id" has not been set, then try and set it | ||||
|         Document = _import_class("Document") | ||||
|         if isinstance(self, Document): | ||||
|             if data["_id"] is None: | ||||
|                 data["_id"] = self._data.get("id", None) | ||||
|  | ||||
|         if data['_id'] is None: | ||||
|             data.pop('_id') | ||||
|  | ||||
|         # Only add _cls if allow_inheritance is True | ||||
|         if (not hasattr(self, '_meta') or | ||||
|            not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): | ||||
|             data.pop('_cls') | ||||
|  | ||||
|         return data | ||||
|  | ||||
|     def validate(self, clean=True): | ||||
|         """Ensure that all fields' values are valid and that required fields | ||||
|         are present. | ||||
|         """ | ||||
|         # Ensure that each field is matched to a valid value | ||||
|         errors = {} | ||||
|         if clean: | ||||
|             try: | ||||
|                 self.clean() | ||||
|             except ValidationError, error: | ||||
|                 errors[NON_FIELD_ERRORS] = error | ||||
|  | ||||
|         # Get a list of tuples of field names and their current values | ||||
|         fields = [(self._fields.get(name, self._dynamic_fields.get(name)), | ||||
|                    self._data.get(name)) for name in self._fields_ordered] | ||||
|  | ||||
|         EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||
|         GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") | ||||
|  | ||||
|         for field, value in fields: | ||||
|             if value is not None: | ||||
|                 try: | ||||
|                     if isinstance(field, (EmbeddedDocumentField, | ||||
|                                           GenericEmbeddedDocumentField)): | ||||
|                         field._validate(value, clean=clean) | ||||
|                     else: | ||||
|                         field._validate(value) | ||||
|                 except ValidationError, error: | ||||
|                     errors[field.name] = error.errors or error | ||||
|                 except (ValueError, AttributeError, AssertionError), error: | ||||
|                     errors[field.name] = error | ||||
|             elif field.required and not getattr(field, '_auto_gen', False): | ||||
|                 errors[field.name] = ValidationError('Field is required', | ||||
|                                                      field_name=field.name) | ||||
|  | ||||
|         if errors: | ||||
|             pk = "None" | ||||
|             if hasattr(self, 'pk'): | ||||
|                 pk = self.pk | ||||
|             elif self._instance: | ||||
|                 pk = self._instance.pk | ||||
|             message = "ValidationError (%s:%s) " % (self._class_name, pk) | ||||
|             raise ValidationError(message, errors=errors) | ||||
|  | ||||
|     def to_json(self, *args, **kwargs): | ||||
|         """Converts a document to JSON""" | ||||
|         return json_util.dumps(self.to_mongo(),  *args, **kwargs) | ||||
|  | ||||
|     @classmethod | ||||
|     def from_json(cls, json_data): | ||||
|         """Converts json data to an unsaved document instance""" | ||||
|         return cls._from_son(json_util.loads(json_data)) | ||||
|  | ||||
|     def __expand_dynamic_values(self, name, value): | ||||
|         """expand any dynamic values to their correct types / values""" | ||||
|         if not isinstance(value, (dict, list, tuple)): | ||||
|             return value | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             is_list = True | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|  | ||||
|         if not is_list and '_cls' in value: | ||||
|             cls = get_document(value['_cls']) | ||||
|             return cls(**value) | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.items(): | ||||
|             key = name if is_list else k | ||||
|             data[k] = self.__expand_dynamic_values(key, v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             data_items = sorted(data.items(), key=operator.itemgetter(0)) | ||||
|             value = [v for k, v in data_items] | ||||
|         else: | ||||
|             value = data | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if (isinstance(value, (list, tuple)) and | ||||
|            not isinstance(value, BaseList)): | ||||
|             value = BaseList(value, self, name) | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             value = BaseDict(value, self, name) | ||||
|  | ||||
|         return value | ||||
|  | ||||
|     def _mark_as_changed(self, key): | ||||
|         """Marks a key as explicitly changed by the user | ||||
|         """ | ||||
|         if not key: | ||||
|             return | ||||
|         key = self._db_field_map.get(key, key) | ||||
|         if (hasattr(self, '_changed_fields') and | ||||
|            key not in self._changed_fields): | ||||
|             self._changed_fields.append(key) | ||||
|  | ||||
|     def _clear_changed_fields(self): | ||||
|         """Using get_changed_fields iterate and remove any fields that are | ||||
|         marked as changed""" | ||||
|         for changed in self._get_changed_fields(): | ||||
|             parts = changed.split(".") | ||||
|             data = self | ||||
|             for part in parts: | ||||
|                 if isinstance(data, list): | ||||
|                     try: | ||||
|                         data = data[int(part)] | ||||
|                     except IndexError: | ||||
|                         data = None | ||||
|                 elif isinstance(data, dict): | ||||
|                     data = data.get(part, None) | ||||
|                 else: | ||||
|                     data = getattr(data, part, None) | ||||
|                 if hasattr(data, "_changed_fields"): | ||||
|                     data._changed_fields = [] | ||||
|         self._changed_fields = [] | ||||
|  | ||||
|     def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): | ||||
|         # Loop list / dict fields as they contain documents | ||||
|         # Determine the iterator to use | ||||
|         if not hasattr(data, 'items'): | ||||
|             iterator = enumerate(data) | ||||
|         else: | ||||
|             iterator = data.iteritems() | ||||
|  | ||||
|         for index, value in iterator: | ||||
|             list_key = "%s%s." % (key, index) | ||||
|             if hasattr(value, '_get_changed_fields'): | ||||
|                 changed = value._get_changed_fields(inspected) | ||||
|                 changed_fields += ["%s%s" % (list_key, k) | ||||
|                                     for k in changed if k] | ||||
|             elif isinstance(value, (list, tuple, dict)): | ||||
|                 self._nestable_types_changed_fields(changed_fields, list_key, value, inspected) | ||||
|  | ||||
|     def _get_changed_fields(self, inspected=None): | ||||
|         """Returns a list of all fields that have explicitly been changed. | ||||
|         """ | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") | ||||
|         ReferenceField = _import_class("ReferenceField") | ||||
|         changed_fields = [] | ||||
|         changed_fields += getattr(self, '_changed_fields', []) | ||||
|         inspected = inspected or set() | ||||
|         if hasattr(self, 'id') and isinstance(self.id, Hashable): | ||||
|             if self.id in inspected: | ||||
|                 return changed_fields | ||||
|             inspected.add(self.id) | ||||
|  | ||||
|         for field_name in self._fields_ordered: | ||||
|             db_field_name = self._db_field_map.get(field_name, field_name) | ||||
|             key = '%s.' % db_field_name | ||||
|             data = self._data.get(field_name, None) | ||||
|             field = self._fields.get(field_name) | ||||
|  | ||||
|             if hasattr(data, 'id'): | ||||
|                 if data.id in inspected: | ||||
|                     continue | ||||
|                 inspected.add(data.id) | ||||
|             if isinstance(field, ReferenceField): | ||||
|                 continue | ||||
|             elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) | ||||
|                and db_field_name not in changed_fields): | ||||
|                  # Find all embedded fields that have been changed | ||||
|                 changed = data._get_changed_fields(inspected) | ||||
|                 changed_fields += ["%s%s" % (key, k) for k in changed if k] | ||||
|             elif (isinstance(data, (list, tuple, dict)) and | ||||
|                     db_field_name not in changed_fields): | ||||
|                 if (hasattr(field, 'field') and | ||||
|                     isinstance(field.field, ReferenceField)): | ||||
|                     continue | ||||
|                 self._nestable_types_changed_fields(changed_fields, key, data, inspected) | ||||
|         return changed_fields | ||||
|  | ||||
|     def _delta(self): | ||||
|         """Returns the delta (set, unset) of the changes for a document. | ||||
|         Gets any values that have been explicitly changed. | ||||
|         """ | ||||
|         # Handles cases where not loaded from_son but has _id | ||||
|         doc = self.to_mongo() | ||||
|  | ||||
|         set_fields = self._get_changed_fields() | ||||
|         unset_data = {} | ||||
|         parts = [] | ||||
|         if hasattr(self, '_changed_fields'): | ||||
|             set_data = {} | ||||
|             # Fetch each set item from its path | ||||
|             for path in set_fields: | ||||
|                 parts = path.split('.') | ||||
|                 d = doc | ||||
|                 new_path = [] | ||||
|                 for p in parts: | ||||
|                     if isinstance(d, (ObjectId, DBRef)): | ||||
|                         break | ||||
|                     elif isinstance(d, list) and p.isdigit(): | ||||
|                         d = d[int(p)] | ||||
|                     elif hasattr(d, 'get'): | ||||
|                         d = d.get(p) | ||||
|                     new_path.append(p) | ||||
|                 path = '.'.join(new_path) | ||||
|                 set_data[path] = d | ||||
|         else: | ||||
|             set_data = doc | ||||
|             if '_id' in set_data: | ||||
|                 del(set_data['_id']) | ||||
|  | ||||
|         # Determine if any changed items were actually unset. | ||||
|         for path, value in set_data.items(): | ||||
|             if value or isinstance(value, (numbers.Number, bool)): | ||||
|                 continue | ||||
|  | ||||
|             # If we've set a value that ain't the default value dont unset it. | ||||
|             default = None | ||||
|             if (self._dynamic and len(parts) and parts[0] in | ||||
|                self._dynamic_fields): | ||||
|                 del(set_data[path]) | ||||
|                 unset_data[path] = 1 | ||||
|                 continue | ||||
|             elif path in self._fields: | ||||
|                 default = self._fields[path].default | ||||
|             else:  # Perform a full lookup for lists / embedded lookups | ||||
|                 d = self | ||||
|                 parts = path.split('.') | ||||
|                 db_field_name = parts.pop() | ||||
|                 for p in parts: | ||||
|                     if isinstance(d, list) and p.isdigit(): | ||||
|                         d = d[int(p)] | ||||
|                     elif (hasattr(d, '__getattribute__') and | ||||
|                           not isinstance(d, dict)): | ||||
|                         real_path = d._reverse_db_field_map.get(p, p) | ||||
|                         d = getattr(d, real_path) | ||||
|                     else: | ||||
|                         d = d.get(p) | ||||
|  | ||||
|                 if hasattr(d, '_fields'): | ||||
|                     field_name = d._reverse_db_field_map.get(db_field_name, | ||||
|                                                              db_field_name) | ||||
|                     if field_name in d._fields: | ||||
|                         default = d._fields.get(field_name).default | ||||
|                     else: | ||||
|                         default = None | ||||
|  | ||||
|             if default is not None: | ||||
|                 if callable(default): | ||||
|                     default = default() | ||||
|  | ||||
|             if default != value: | ||||
|                 continue | ||||
|  | ||||
|             del(set_data[path]) | ||||
|             unset_data[path] = 1 | ||||
|         return set_data, unset_data | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_collection_name(cls): | ||||
|         """Returns the collection name for this class. | ||||
|         """ | ||||
|         return cls._meta.get('collection', None) | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_son(cls, son, _auto_dereference=True): | ||||
|         """Create an instance of a Document (subclass) from a PyMongo SON. | ||||
|         """ | ||||
|  | ||||
|         # get the class name from the document, falling back to the given | ||||
|         # class if unavailable | ||||
|         class_name = son.get('_cls', cls._class_name) | ||||
|         data = dict(("%s" % key, value) for key, value in son.iteritems()) | ||||
|         if not UNICODE_KWARGS: | ||||
|             # python 2.6.4 and lower cannot handle unicode keys | ||||
|             # passed to class constructor example: cls(**data) | ||||
|             to_str_keys_recursive(data) | ||||
|  | ||||
|         # Return correct subclass for document type | ||||
|         if class_name != cls._class_name: | ||||
|             cls = get_document(class_name) | ||||
|  | ||||
|         changed_fields = [] | ||||
|         errors_dict = {} | ||||
|  | ||||
|         fields = cls._fields | ||||
|         if not _auto_dereference: | ||||
|             fields = copy.copy(fields) | ||||
|  | ||||
|         for field_name, field in fields.iteritems(): | ||||
|             field._auto_dereference = _auto_dereference | ||||
|             if field.db_field in data: | ||||
|                 value = data[field.db_field] | ||||
|                 try: | ||||
|                     data[field_name] = (value if value is None | ||||
|                                         else field.to_python(value)) | ||||
|                     if field_name != field.db_field: | ||||
|                         del data[field.db_field] | ||||
|                 except (AttributeError, ValueError), e: | ||||
|                     errors_dict[field_name] = e | ||||
|             elif field.default: | ||||
|                 default = field.default | ||||
|                 if callable(default): | ||||
|                     default = default() | ||||
|                 if isinstance(default, BaseDocument): | ||||
|                     changed_fields.append(field_name) | ||||
|  | ||||
|         if errors_dict: | ||||
|             errors = "\n".join(["%s - %s" % (k, v) | ||||
|                      for k, v in errors_dict.items()]) | ||||
|             msg = ("Invalid data to create a `%s` instance.\n%s" | ||||
|                    % (cls._class_name, errors)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         obj = cls(__auto_convert=False, **data) | ||||
|         obj._changed_fields = changed_fields | ||||
|         obj._created = False | ||||
|         if not _auto_dereference: | ||||
|             obj._fields = fields | ||||
|         return obj | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_specs(cls, meta_indexes): | ||||
|         """Generate and merge the full index specs | ||||
|         """ | ||||
|  | ||||
|         geo_indices = cls._geo_indices() | ||||
|         unique_indices = cls._unique_with_indexes() | ||||
|         index_specs = [cls._build_index_spec(spec) | ||||
|                        for spec in meta_indexes] | ||||
|  | ||||
|         def merge_index_specs(index_specs, indices): | ||||
|             if not indices: | ||||
|                 return index_specs | ||||
|  | ||||
|             spec_fields = [v['fields'] | ||||
|                            for k, v in enumerate(index_specs)] | ||||
|             # Merge unqiue_indexes with existing specs | ||||
|             for k, v in enumerate(indices): | ||||
|                 if v['fields'] in spec_fields: | ||||
|                     index_specs[spec_fields.index(v['fields'])].update(v) | ||||
|                 else: | ||||
|                     index_specs.append(v) | ||||
|             return index_specs | ||||
|  | ||||
|         index_specs = merge_index_specs(index_specs, geo_indices) | ||||
|         index_specs = merge_index_specs(index_specs, unique_indices) | ||||
|         return index_specs | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_spec(cls, spec): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec. | ||||
|         """ | ||||
|         if isinstance(spec, basestring): | ||||
|             spec = {'fields': [spec]} | ||||
|         elif isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': list(spec)} | ||||
|         elif isinstance(spec, dict): | ||||
|             spec = dict(spec) | ||||
|  | ||||
|         index_list = [] | ||||
|         direction = None | ||||
|  | ||||
|         # Check to see if we need to include _cls | ||||
|         allow_inheritance = cls._meta.get('allow_inheritance', | ||||
|                                           ALLOW_INHERITANCE) | ||||
|         include_cls = (allow_inheritance and not spec.get('sparse', False) and | ||||
|                        spec.get('cls',  True)) | ||||
|         if "cls" in spec: | ||||
|             spec.pop('cls') | ||||
|         for key in spec['fields']: | ||||
|             # If inherited spec continue | ||||
|             if isinstance(key, (list, tuple)): | ||||
|                 continue | ||||
|  | ||||
|             # ASCENDING from +, | ||||
|             # DESCENDING from - | ||||
|             # GEO2D from * | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key.startswith("-"): | ||||
|                 direction = pymongo.DESCENDING | ||||
|             elif key.startswith("*"): | ||||
|                 direction = pymongo.GEO2D | ||||
|             if key.startswith(("+", "-", "*")): | ||||
|                 key = key[1:] | ||||
|  | ||||
|             # Use real field name, do it manually because we need field | ||||
|             # objects for the next part (list field checking) | ||||
|             parts = key.split('.') | ||||
|             if parts in (['pk'], ['id'], ['_id']): | ||||
|                 key = '_id' | ||||
|                 fields = [] | ||||
|             else: | ||||
|                 fields = cls._lookup_field(parts) | ||||
|                 parts = [field if field == '_id' else field.db_field | ||||
|                          for field in fields] | ||||
|                 key = '.'.join(parts) | ||||
|             index_list.append((key, direction)) | ||||
|  | ||||
|         # Don't add cls to a geo index | ||||
|         if include_cls and direction is not pymongo.GEO2D: | ||||
|             index_list.insert(0, ('_cls', 1)) | ||||
|  | ||||
|         if index_list: | ||||
|             spec['fields'] = index_list | ||||
|         if spec.get('sparse', False) and len(spec['fields']) > 1: | ||||
|             raise ValueError( | ||||
|                 'Sparse indexes can only have one field in them. ' | ||||
|                 'See https://jira.mongodb.org/browse/SERVER-2193') | ||||
|  | ||||
|         return spec | ||||
|  | ||||
|     @classmethod | ||||
|     def _unique_with_indexes(cls, namespace=""): | ||||
|         """ | ||||
|         Find and set unique indexes | ||||
|         """ | ||||
|         unique_indexes = [] | ||||
|         for field_name, field in cls._fields.items(): | ||||
|             sparse = False | ||||
|             # Generate a list of indexes needed by uniqueness constraints | ||||
|             if field.unique: | ||||
|                 field.required = True | ||||
|                 unique_fields = [field.db_field] | ||||
|  | ||||
|                 # Add any unique_with fields to the back of the index spec | ||||
|                 if field.unique_with: | ||||
|                     if isinstance(field.unique_with, basestring): | ||||
|                         field.unique_with = [field.unique_with] | ||||
|  | ||||
|                     # Convert unique_with field names to real field names | ||||
|                     unique_with = [] | ||||
|                     for other_name in field.unique_with: | ||||
|                         parts = other_name.split('.') | ||||
|                         # Lookup real name | ||||
|                         parts = cls._lookup_field(parts) | ||||
|                         name_parts = [part.db_field for part in parts] | ||||
|                         unique_with.append('.'.join(name_parts)) | ||||
|                         # Unique field should be required | ||||
|                         parts[-1].required = True | ||||
|                         sparse = (not sparse and | ||||
|                                   parts[-1].name not in cls.__dict__) | ||||
|                     unique_fields += unique_with | ||||
|  | ||||
|                 # Add the new index to the list | ||||
|                 fields = [("%s%s" % (namespace, f), pymongo.ASCENDING) | ||||
|                           for f in unique_fields] | ||||
|                 index = {'fields': fields, 'unique': True, 'sparse': sparse} | ||||
|                 unique_indexes.append(index) | ||||
|  | ||||
|             # Grab any embedded document field unique indexes | ||||
|             if (field.__class__.__name__ == "EmbeddedDocumentField" and | ||||
|                field.document_type != cls): | ||||
|                 field_namespace = "%s." % field_name | ||||
|                 doc_cls = field.document_type | ||||
|                 unique_indexes += doc_cls._unique_with_indexes(field_namespace) | ||||
|  | ||||
|         return unique_indexes | ||||
|  | ||||
|     @classmethod | ||||
|     def _geo_indices(cls, inspected=None, parent_field=None): | ||||
|         inspected = inspected or [] | ||||
|         geo_indices = [] | ||||
|         inspected.append(cls) | ||||
|  | ||||
|         geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField", | ||||
|                                 "PointField", "LineStringField", "PolygonField"] | ||||
|  | ||||
|         geo_field_types = tuple([_import_class(field) for field in geo_field_type_names]) | ||||
|  | ||||
|         for field in cls._fields.values(): | ||||
|             if not isinstance(field, geo_field_types): | ||||
|                 continue | ||||
|             if hasattr(field, 'document_type'): | ||||
|                 field_cls = field.document_type | ||||
|                 if field_cls in inspected: | ||||
|                     continue | ||||
|                 if hasattr(field_cls, '_geo_indices'): | ||||
|                     geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field) | ||||
|             elif field._geo_index: | ||||
|                 field_name = field.db_field | ||||
|                 if parent_field: | ||||
|                     field_name = "%s.%s" % (parent_field, field_name) | ||||
|                 geo_indices.append({'fields': | ||||
|                                    [(field_name, field._geo_index)]}) | ||||
|         return geo_indices | ||||
|  | ||||
|     @classmethod | ||||
|     def _lookup_field(cls, parts): | ||||
|         """Lookup a field based on its attribute and return a list containing | ||||
|         the field's parents and the field. | ||||
|         """ | ||||
|  | ||||
|         ListField = _import_class("ListField") | ||||
|  | ||||
|         if not isinstance(parts, (list, tuple)): | ||||
|             parts = [parts] | ||||
|         fields = [] | ||||
|         field = None | ||||
|  | ||||
|         for field_name in parts: | ||||
|             # Handle ListField indexing: | ||||
|             if field_name.isdigit() and isinstance(field, ListField): | ||||
|                 new_field = field.field | ||||
|                 fields.append(field_name) | ||||
|                 continue | ||||
|  | ||||
|             if field is None: | ||||
|                 # Look up first field from the document | ||||
|                 if field_name == 'pk': | ||||
|                     # Deal with "primary key" alias | ||||
|                     field_name = cls._meta['id_field'] | ||||
|                 if field_name in cls._fields: | ||||
|                     field = cls._fields[field_name] | ||||
|                 elif cls._dynamic: | ||||
|                     DynamicField = _import_class('DynamicField') | ||||
|                     field = DynamicField(db_field=field_name) | ||||
|                 else: | ||||
|                     raise LookUpError('Cannot resolve field "%s"' | ||||
|                                       % field_name) | ||||
|             else: | ||||
|                 ReferenceField = _import_class('ReferenceField') | ||||
|                 GenericReferenceField = _import_class('GenericReferenceField') | ||||
|                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||
|                     raise LookUpError('Cannot perform join in mongoDB: %s' % | ||||
|                                       '__'.join(parts)) | ||||
|                 if hasattr(getattr(field, 'field', None), 'lookup_member'): | ||||
|                     new_field = field.field.lookup_member(field_name) | ||||
|                 else: | ||||
|                    # Look up subfield on the previous field | ||||
|                     new_field = field.lookup_member(field_name) | ||||
|                 if not new_field and isinstance(field, ComplexBaseField): | ||||
|                     fields.append(field_name) | ||||
|                     continue | ||||
|                 elif not new_field: | ||||
|                     raise LookUpError('Cannot resolve field "%s"' | ||||
|                                       % field_name) | ||||
|                 field = new_field  # update field to the new field type | ||||
|             fields.append(field) | ||||
|         return fields | ||||
|  | ||||
|     @classmethod | ||||
|     def _translate_field_name(cls, field, sep='.'): | ||||
|         """Translate a field attribute name to a database field name. | ||||
|         """ | ||||
|         parts = field.split(sep) | ||||
|         parts = [f.db_field for f in cls._lookup_field(parts)] | ||||
|         return '.'.join(parts) | ||||
|  | ||||
|     def __set_field_display(self): | ||||
|         """Dynamically set the display value for a field with choices""" | ||||
|         for attr_name, field in self._fields.items(): | ||||
|             if field.choices: | ||||
|                 setattr(self, | ||||
|                         'get_%s_display' % attr_name, | ||||
|                         partial(self.__get_field_display, field=field)) | ||||
|  | ||||
|     def __get_field_display(self, field): | ||||
|         """Returns the display value for a choice field""" | ||||
|         value = getattr(self, field.name) | ||||
|         if field.choices and isinstance(field.choices[0], (list, tuple)): | ||||
|             return dict(field.choices).get(value, value) | ||||
|         return value | ||||
							
								
								
									
										508
									
								
								mongoengine/base/fields.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										508
									
								
								mongoengine/base/fields.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,508 @@ | ||||
| import operator | ||||
| import warnings | ||||
| import weakref | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| from mongoengine.base.common import ALLOW_INHERITANCE | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList | ||||
|  | ||||
| __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
|     """A base class for fields in a MongoDB document. Instances of this class | ||||
|     may be added to subclasses of `Document` to define a document's schema. | ||||
|  | ||||
|     .. versionchanged:: 0.5 - added verbose and help text | ||||
|     """ | ||||
|  | ||||
|     name = None | ||||
|     _geo_index = False | ||||
|     _auto_gen = False  # Call `generate` to generate a value | ||||
|     _auto_dereference = True | ||||
|  | ||||
|     # These track each time a Field instance is created. Used to retain order. | ||||
|     # The auto_creation_counter is used for fields that MongoEngine implicitly | ||||
|     # creates, creation_counter is used for all user-specified fields. | ||||
|     creation_counter = 0 | ||||
|     auto_creation_counter = -1 | ||||
|  | ||||
|     def __init__(self, db_field=None, name=None, required=False, default=None, | ||||
|                  unique=False, unique_with=None, primary_key=False, | ||||
|                  validation=None, choices=None, verbose_name=None, | ||||
|                  help_text=None): | ||||
|         """ | ||||
|         :param db_field: The database field to store this field in | ||||
|             (defaults to the name of the field) | ||||
|         :param name: Depreciated - use db_field | ||||
|         :param required: If the field is required. Whether it has to have a | ||||
|             value or not. Defaults to False. | ||||
|         :param default: (optional) The default value for this field if no value | ||||
|             has been set (or if the value has been unset).  It Can be a | ||||
|             callable. | ||||
|         :param unique: Is the field value unique or not.  Defaults to False. | ||||
|         :param unique_with: (optional) The other field this field should be | ||||
|             unique with. | ||||
|         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||
|         :param validation: (optional) A callable to validate the value of the | ||||
|             field.  Generally this is deprecated in favour of the | ||||
|             `FIELD.validate` method | ||||
|         :param choices: (optional) The valid choices | ||||
|         :param verbose_name: (optional)  The verbose name for the field. | ||||
|             Designed to be human readable and is often used when generating | ||||
|             model forms from the document model. | ||||
|         :param help_text: (optional) The help text for this field and is often | ||||
|             used when generating model forms from the document model. | ||||
|         """ | ||||
|         self.db_field = (db_field or name) if not primary_key else '_id' | ||||
|         if name: | ||||
|             msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         self.required = required or primary_key | ||||
|         self.default = default | ||||
|         self.unique = bool(unique or unique_with) | ||||
|         self.unique_with = unique_with | ||||
|         self.primary_key = primary_key | ||||
|         self.validation = validation | ||||
|         self.choices = choices | ||||
|         self.verbose_name = verbose_name | ||||
|         self.help_text = help_text | ||||
|  | ||||
|         # Adjust the appropriate creation counter, and save our local copy. | ||||
|         if self.db_field == '_id': | ||||
|             self.creation_counter = BaseField.auto_creation_counter | ||||
|             BaseField.auto_creation_counter -= 1 | ||||
|         else: | ||||
|             self.creation_counter = BaseField.creation_counter | ||||
|             BaseField.creation_counter += 1 | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor for retrieving a value from a field in a document. | ||||
|         """ | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         # Get value from document instance if available | ||||
|         return instance._data.get(self.name) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Descriptor for assigning a value to a field in a document. | ||||
|         """ | ||||
|  | ||||
|         # If setting to None and theres a default | ||||
|         # Then set the value to the default value | ||||
|         if value is None and self.default is not None: | ||||
|             value = self.default | ||||
|             if callable(value): | ||||
|                 value = value() | ||||
|  | ||||
|         if instance._initialised: | ||||
|             try: | ||||
|                 if (self.name not in instance._data or | ||||
|                    instance._data[self.name] != value): | ||||
|                     instance._mark_as_changed(self.name) | ||||
|             except: | ||||
|                 # Values cant be compared eg: naive and tz datetimes | ||||
|                 # So mark it as changed | ||||
|                 instance._mark_as_changed(self.name) | ||||
|  | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = weakref.proxy(instance) | ||||
|         instance._data[self.name] = value | ||||
|  | ||||
|     def error(self, message="", errors=None, field_name=None): | ||||
|         """Raises a ValidationError. | ||||
|         """ | ||||
|         field_name = field_name if field_name else self.name | ||||
|         raise ValidationError(message, errors=errors, field_name=field_name) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         return self.to_python(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         """Prepare a value that is being used in a query for PyMongo. | ||||
|         """ | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         """Perform validation on a value. | ||||
|         """ | ||||
|         pass | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         # check choices | ||||
|         if self.choices: | ||||
|             is_cls = isinstance(value, (Document, EmbeddedDocument)) | ||||
|             value_to_check = value.__class__ if is_cls else value | ||||
|             err_msg = 'an instance' if is_cls else 'one' | ||||
|             if isinstance(self.choices[0], (list, tuple)): | ||||
|                 option_keys = [k for k, v in self.choices] | ||||
|                 if value_to_check not in option_keys: | ||||
|                     msg = ('Value must be %s of %s' % | ||||
|                            (err_msg, unicode(option_keys))) | ||||
|                     self.error(msg) | ||||
|             elif value_to_check not in self.choices: | ||||
|                 msg = ('Value must be %s of %s' % | ||||
|                        (err_msg, unicode(self.choices))) | ||||
|                 self.error(msg) | ||||
|  | ||||
|         # check validation argument | ||||
|         if self.validation is not None: | ||||
|             if callable(self.validation): | ||||
|                 if not self.validation(value): | ||||
|                     self.error('Value does not match custom validation method') | ||||
|             else: | ||||
|                 raise ValueError('validation argument for "%s" must be a ' | ||||
|                                  'callable.' % self.name) | ||||
|  | ||||
|         self.validate(value, **kwargs) | ||||
|  | ||||
|  | ||||
| class ComplexBaseField(BaseField): | ||||
|     """Handles complex fields, such as lists / dictionaries. | ||||
|  | ||||
|     Allows for nesting of embedded documents inside complex types. | ||||
|     Handles the lazy dereferencing of a queryset by lazily dereferencing all | ||||
|     items in a list / dict rather than one at a time. | ||||
|  | ||||
|     .. versionadded:: 0.5 | ||||
|     """ | ||||
|  | ||||
|     field = None | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor to automatically dereference references. | ||||
|         """ | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|         dereference = (self._auto_dereference and | ||||
|                        (self.field is None or isinstance(self.field, | ||||
|                         (GenericReferenceField, ReferenceField)))) | ||||
|  | ||||
|         _dereference = _import_class("DeReference")() | ||||
|  | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         if instance._initialised and dereference and instance._data.get(self.name): | ||||
|             instance._data[self.name] = _dereference( | ||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, | ||||
|                 name=self.name | ||||
|             ) | ||||
|  | ||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if (isinstance(value, (list, tuple)) and | ||||
|            not isinstance(value, BaseList)): | ||||
|             value = BaseList(value, instance, self.name) | ||||
|             instance._data[self.name] = value | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             value = BaseDict(value, instance, self.name) | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         if (self._auto_dereference and instance._initialised and | ||||
|            isinstance(value, (BaseList, BaseDict)) | ||||
|            and not value._dereferenced): | ||||
|             value = _dereference( | ||||
|                 value, max_depth=1, instance=instance, name=self.name | ||||
|             ) | ||||
|             value._dereferenced = True | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         Document = _import_class('Document') | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_python'): | ||||
|             return value.to_python() | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             value_dict = dict([(key, self.field.to_python(item)) | ||||
|                                for key, item in value.items()]) | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in value.items(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
|                         self.error('You can only reference documents once they' | ||||
|                                    ' have been saved to the database') | ||||
|                     collection = v._get_collection_name() | ||||
|                     value_dict[k] = DBRef(collection, v.pk) | ||||
|                 elif hasattr(v, 'to_python'): | ||||
|                     value_dict[k] = v.to_python() | ||||
|                 else: | ||||
|                     value_dict[k] = self.to_python(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [v for k, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             if isinstance(value, Document): | ||||
|                 return GenericReferenceField().to_mongo(value) | ||||
|             cls = value.__class__ | ||||
|             val = value.to_mongo() | ||||
|             # If we its a document thats not inherited add _cls | ||||
|             if (isinstance(value, EmbeddedDocument)): | ||||
|                 val['_cls'] = cls.__name__ | ||||
|             return val | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             value_dict = dict([(key, self.field.to_mongo(item)) | ||||
|                                for key, item in value.iteritems()]) | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in value.iteritems(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
|                         self.error('You can only reference documents once they' | ||||
|                                    ' have been saved to the database') | ||||
|  | ||||
|                     # If its a document that is not inheritable it won't have | ||||
|                     # any _cls data so make it a generic reference allows | ||||
|                     # us to dereference | ||||
|                     meta = getattr(v, '_meta', {}) | ||||
|                     allow_inheritance = ( | ||||
|                         meta.get('allow_inheritance', ALLOW_INHERITANCE) | ||||
|                         is True) | ||||
|                     if not allow_inheritance and not self.field: | ||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||
|                     else: | ||||
|                         collection = v._get_collection_name() | ||||
|                         value_dict[k] = DBRef(collection, v.pk) | ||||
|                 elif hasattr(v, 'to_mongo'): | ||||
|                     cls = v.__class__ | ||||
|                     val = v.to_mongo() | ||||
|                     # If we its a document thats not inherited add _cls | ||||
|                     if (isinstance(v, (Document, EmbeddedDocument))): | ||||
|                         val['_cls'] = cls.__name__ | ||||
|                     value_dict[k] = val | ||||
|                 else: | ||||
|                     value_dict[k] = self.to_mongo(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [v for k, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """If field is provided ensure the value is valid. | ||||
|         """ | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||
|                 sequence = value.iteritems() | ||||
|             else: | ||||
|                 sequence = enumerate(value) | ||||
|             for k, v in sequence: | ||||
|                 try: | ||||
|                     self.field._validate(v) | ||||
|                 except ValidationError, error: | ||||
|                     errors[k] = error.errors or error | ||||
|                 except (ValueError, AssertionError), error: | ||||
|                     errors[k] = error | ||||
|  | ||||
|             if errors: | ||||
|                 field_class = self.field.__class__.__name__ | ||||
|                 self.error('Invalid %s item (%s)' % (field_class, value), | ||||
|                            errors=errors) | ||||
|         # Don't allow empty values if required | ||||
|         if self.required and not value: | ||||
|             self.error('Field is required and cannot be empty') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         if self.field: | ||||
|             return self.field.lookup_member(member_name) | ||||
|         return None | ||||
|  | ||||
|     def _set_owner_document(self, owner_document): | ||||
|         if self.field: | ||||
|             self.field.owner_document = owner_document | ||||
|         self._owner_document = owner_document | ||||
|  | ||||
|     def _get_owner_document(self, owner_document): | ||||
|         self._owner_document = owner_document | ||||
|  | ||||
|     owner_document = property(_get_owner_document, _set_owner_document) | ||||
|  | ||||
|  | ||||
| class ObjectIdField(BaseField): | ||||
|     """A field wrapper around MongoDB's ObjectIds. | ||||
|     """ | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if not isinstance(value, ObjectId): | ||||
|             value = ObjectId(value) | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if not isinstance(value, ObjectId): | ||||
|             try: | ||||
|                 return ObjectId(unicode(value)) | ||||
|             except Exception, e: | ||||
|                 # e.message attribute has been deprecated since Python 2.6 | ||||
|                 self.error(unicode(e)) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             ObjectId(unicode(value)) | ||||
|         except: | ||||
|             self.error('Invalid Object ID') | ||||
|  | ||||
|  | ||||
| class GeoJsonBaseField(BaseField): | ||||
|     """A geo json field storing a geojson style object. | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|  | ||||
|     _geo_index = pymongo.GEOSPHERE | ||||
|     _type = "GeoBase" | ||||
|  | ||||
|     def __init__(self, auto_index=True, *args, **kwargs): | ||||
|         """ | ||||
|         :param auto_index: Automatically create a "2dsphere" index. Defaults | ||||
|             to `True`. | ||||
|         """ | ||||
|         self._name = "%sField" % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type | ||||
|         """ | ||||
|         if isinstance(value, dict): | ||||
|             if set(value.keys()) == set(['type', 'coordinates']): | ||||
|                 if value['type'] != self._type: | ||||
|                     self.error('%s type must be "%s"' % (self._name, self._type)) | ||||
|                 return self.validate(value['coordinates']) | ||||
|             else: | ||||
|                 self.error('%s can only accept a valid GeoJson dictionary' | ||||
|                            ' or lists of (x, y)' % self._name) | ||||
|                 return | ||||
|         elif not isinstance(value, (list, tuple)): | ||||
|             self.error('%s can only accept lists of [x, y]' % self._name) | ||||
|             return | ||||
|  | ||||
|         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||
|         error = validate(value) | ||||
|         if error: | ||||
|             self.error(error) | ||||
|  | ||||
|     def _validate_polygon(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'Polygons must contain list of linestrings' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except: | ||||
|             return "Invalid Polygon must contain at least one valid linestring" | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
|             error = self._validate_linestring(val, False) | ||||
|             if not error and val[0] != val[-1]: | ||||
|                 error = 'LineStrings must start and end at the same point' | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||
|  | ||||
|     def _validate_linestring(self, value, top_level=True): | ||||
|         """Validates a linestring""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'LineStrings must contain list of coordinate pairs' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except: | ||||
|             return "Invalid LineString must contain at least one valid point" | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
|             error = self._validate_point(val) | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|  | ||||
|     def _validate_point(self, value): | ||||
|         """Validate each set of coords""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'Points must be a list of coordinate pairs' | ||||
|         elif not len(value) == 2: | ||||
|             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             return "Both values (%s) in point must be float or int" % repr(value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, dict): | ||||
|             return value | ||||
|         return SON([("type", self._type), ("coordinates", value)]) | ||||
							
								
								
									
										404
									
								
								mongoengine/base/metaclasses.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										404
									
								
								mongoengine/base/metaclasses.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,404 @@ | ||||
| import warnings | ||||
|  | ||||
| import pymongo | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidDocumentError | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, | ||||
|                                   MultipleObjectsReturned, | ||||
|                                   QuerySet, QuerySetManager) | ||||
|  | ||||
| from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
|  | ||||
| __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') | ||||
|  | ||||
|  | ||||
| class DocumentMetaclass(type): | ||||
|     """Metaclass for all documents. | ||||
|     """ | ||||
|  | ||||
|     def __new__(cls, name, bases, attrs): | ||||
|         flattened_bases = cls._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, cls).__new__ | ||||
|  | ||||
|         # If a base class just call super | ||||
|         metaclass = attrs.get('my_metaclass') | ||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         attrs['_is_document'] = attrs.get('_is_document', False) | ||||
|  | ||||
|         # EmbeddedDocuments could have meta data for inheritance | ||||
|         if 'meta' in attrs: | ||||
|             attrs['_meta'] = attrs.pop('meta') | ||||
|  | ||||
|         # EmbeddedDocuments should inherit meta data | ||||
|         if '_meta' not in attrs: | ||||
|             meta = MetaDict() | ||||
|             for base in flattened_bases[::-1]: | ||||
|                 # Add any mixin metadata from plain objects | ||||
|                 if hasattr(base, 'meta'): | ||||
|                     meta.merge(base.meta) | ||||
|                 elif hasattr(base, '_meta'): | ||||
|                     meta.merge(base._meta) | ||||
|             attrs['_meta'] = meta | ||||
|  | ||||
|         # Handle document Fields | ||||
|  | ||||
|         # Merge all fields from subclasses | ||||
|         doc_fields = {} | ||||
|         for base in flattened_bases[::-1]: | ||||
|             if hasattr(base, '_fields'): | ||||
|                 doc_fields.update(base._fields) | ||||
|  | ||||
|             # Standard object mixin - merge in any Fields | ||||
|             if not hasattr(base, '_meta'): | ||||
|                 base_fields = {} | ||||
|                 for attr_name, attr_value in base.__dict__.iteritems(): | ||||
|                     if not isinstance(attr_value, BaseField): | ||||
|                         continue | ||||
|                     attr_value.name = attr_name | ||||
|                     if not attr_value.db_field: | ||||
|                         attr_value.db_field = attr_name | ||||
|                     base_fields[attr_name] = attr_value | ||||
|  | ||||
|                 doc_fields.update(base_fields) | ||||
|  | ||||
|         # Discover any document fields | ||||
|         field_names = {} | ||||
|         for attr_name, attr_value in attrs.iteritems(): | ||||
|             if not isinstance(attr_value, BaseField): | ||||
|                 continue | ||||
|             attr_value.name = attr_name | ||||
|             if not attr_value.db_field: | ||||
|                 attr_value.db_field = attr_name | ||||
|             doc_fields[attr_name] = attr_value | ||||
|  | ||||
|             # Count names to ensure no db_field redefinitions | ||||
|             field_names[attr_value.db_field] = field_names.get( | ||||
|                 attr_value.db_field, 0) + 1 | ||||
|  | ||||
|         # Ensure no duplicate db_fields | ||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||
|         if duplicate_db_fields: | ||||
|             msg = ("Multiple db_fields defined for: %s " % | ||||
|                    ", ".join(duplicate_db_fields)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # Set _fields and db_field maps | ||||
|         attrs['_fields'] = doc_fields | ||||
|         attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) | ||||
|                                       for k, v in doc_fields.iteritems()]) | ||||
|         attrs['_reverse_db_field_map'] = dict( | ||||
|             (v, k) for k, v in attrs['_db_field_map'].iteritems()) | ||||
|  | ||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||
|                                          (v.creation_counter, v.name) | ||||
|                                          for v in doc_fields.itervalues())) | ||||
|  | ||||
|         # | ||||
|         # Set document hierarchy | ||||
|         # | ||||
|         superclasses = () | ||||
|         class_name = [name] | ||||
|         for base in flattened_bases: | ||||
|             if (not getattr(base, '_is_base_cls', True) and | ||||
|                not getattr(base, '_meta', {}).get('abstract', True)): | ||||
|                 # Collate heirarchy for _cls and _subclasses | ||||
|                 class_name.append(base.__name__) | ||||
|  | ||||
|             if hasattr(base, '_meta'): | ||||
|                 # Warn if allow_inheritance isn't set and prevent | ||||
|                 # inheritance of classes where inheritance is set to False | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance', | ||||
|                                                    ALLOW_INHERITANCE) | ||||
|                 if (allow_inheritance is not True and | ||||
|                    not base._meta.get('abstract')): | ||||
|                     raise ValueError('Document %s may not be subclassed' % | ||||
|                                      base.__name__) | ||||
|  | ||||
|         # Get superclasses from last base superclass | ||||
|         document_bases = [b for b in flattened_bases | ||||
|                           if hasattr(b, '_class_name')] | ||||
|         if document_bases: | ||||
|             superclasses = document_bases[0]._superclasses | ||||
|             superclasses += (document_bases[0]._class_name, ) | ||||
|  | ||||
|         _cls = '.'.join(reversed(class_name)) | ||||
|         attrs['_class_name'] = _cls | ||||
|         attrs['_superclasses'] = superclasses | ||||
|         attrs['_subclasses'] = (_cls, ) | ||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types | ||||
|  | ||||
|         # Create the new_class | ||||
|         new_class = super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         # Set _subclasses | ||||
|         for base in document_bases: | ||||
|             if _cls not in base._subclasses: | ||||
|                 base._subclasses += (_cls,) | ||||
|             base._types = base._subclasses   # TODO depreciate _types | ||||
|  | ||||
|         Document, EmbeddedDocument, DictField = cls._import_classes() | ||||
|  | ||||
|         if issubclass(new_class, Document): | ||||
|             new_class._collection = None | ||||
|  | ||||
|         # Add class to the _document_registry | ||||
|         _document_registry[new_class._class_name] = new_class | ||||
|  | ||||
|         # In Python 2, User-defined methods objects have special read-only | ||||
|         # attributes 'im_func' and 'im_self' which contain the function obj | ||||
|         # and class instance object respectively.  With Python 3 these special | ||||
|         # attributes have been replaced by __func__ and __self__.  The Blinker | ||||
|         # module continues to use im_func and im_self, so the code below | ||||
|         # copies __func__ into im_func and __self__ into im_self for | ||||
|         # classmethod objects in Document derived classes. | ||||
|         if PY3: | ||||
|             for key, val in new_class.__dict__.items(): | ||||
|                 if isinstance(val, classmethod): | ||||
|                     f = val.__get__(new_class) | ||||
|                     if hasattr(f, '__func__') and not hasattr(f, 'im_func'): | ||||
|                         f.__dict__.update({'im_func': getattr(f, '__func__')}) | ||||
|                     if hasattr(f, '__self__') and not hasattr(f, 'im_self'): | ||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) | ||||
|  | ||||
|         # Handle delete rules | ||||
|         for field in new_class._fields.itervalues(): | ||||
|             f = field | ||||
|             f.owner_document = new_class | ||||
|             delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) | ||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): | ||||
|                 delete_rule = getattr(f.field, | ||||
|                                       'reverse_delete_rule', | ||||
|                                       DO_NOTHING) | ||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||
|                     msg = ("Reverse delete rules are not supported " | ||||
|                            "for %s (field: %s)" % | ||||
|                            (field.__class__.__name__, field.name)) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|  | ||||
|                 f = field.field | ||||
|  | ||||
|             if delete_rule != DO_NOTHING: | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     msg = ("Reverse delete rules are not supported for " | ||||
|                            "EmbeddedDocuments (field: %s)" % field.name) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|                 f.document_type.register_delete_rule(new_class, | ||||
|                                                      field.name, delete_rule) | ||||
|  | ||||
|             if (field.name and hasattr(Document, field.name) and | ||||
|                EmbeddedDocument not in new_class.mro()): | ||||
|                 msg = ("%s is a document method and not a valid " | ||||
|                        "field name" % field.name) | ||||
|                 raise InvalidDocumentError(msg) | ||||
|  | ||||
|         return new_class | ||||
|  | ||||
|     def add_to_class(self, name, value): | ||||
|         setattr(self, name, value) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_bases(cls, bases): | ||||
|         if isinstance(bases, BasesTuple): | ||||
|             return bases | ||||
|         seen = [] | ||||
|         bases = cls.__get_bases(bases) | ||||
|         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||
|         return BasesTuple(unique_bases) | ||||
|  | ||||
|     @classmethod | ||||
|     def __get_bases(cls, bases): | ||||
|         for base in bases: | ||||
|             if base is object: | ||||
|                 continue | ||||
|             yield base | ||||
|             for child_base in cls.__get_bases(base.__bases__): | ||||
|                 yield child_base | ||||
|  | ||||
|     @classmethod | ||||
|     def _import_classes(cls): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         DictField = _import_class('DictField') | ||||
|         return (Document, EmbeddedDocument, DictField) | ||||
|  | ||||
|  | ||||
| class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|     """Metaclass for top-level documents (i.e. documents that have their own | ||||
|     collection in the database. | ||||
|     """ | ||||
|  | ||||
|     def __new__(cls, name, bases, attrs): | ||||
|         flattened_bases = cls._get_bases(bases) | ||||
|         super_new = super(TopLevelDocumentMetaclass, cls).__new__ | ||||
|  | ||||
|         # Set default _meta data if base class, otherwise get user defined meta | ||||
|         if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass): | ||||
|             # defaults | ||||
|             attrs['_meta'] = { | ||||
|                 'abstract': True, | ||||
|                 'max_documents': None, | ||||
|                 'max_size': None, | ||||
|                 'ordering': [],  # default ordering applied at runtime | ||||
|                 'indexes': [],  # indexes to be ensured at runtime | ||||
|                 'id_field': None, | ||||
|                 'index_background': False, | ||||
|                 'index_drop_dups': False, | ||||
|                 'index_opts': None, | ||||
|                 'delete_rules': None, | ||||
|                 'allow_inheritance': None, | ||||
|             } | ||||
|             attrs['_is_base_cls'] = True | ||||
|             attrs['_meta'].update(attrs.get('meta', {})) | ||||
|         else: | ||||
|             attrs['_meta'] = attrs.get('meta', {}) | ||||
|             # Explictly set abstract to false unless set | ||||
|             attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) | ||||
|             attrs['_is_base_cls'] = False | ||||
|  | ||||
|         # Set flag marking as document class - as opposed to an object mixin | ||||
|         attrs['_is_document'] = True | ||||
|  | ||||
|         # Ensure queryset_class is inherited | ||||
|         if 'objects' in attrs: | ||||
|             manager = attrs['objects'] | ||||
|             if hasattr(manager, 'queryset_class'): | ||||
|                 attrs['_meta']['queryset_class'] = manager.queryset_class | ||||
|  | ||||
|         # Clean up top level meta | ||||
|         if 'meta' in attrs: | ||||
|             del(attrs['meta']) | ||||
|  | ||||
|         # Find the parent document class | ||||
|         parent_doc_cls = [b for b in flattened_bases | ||||
|                         if b.__class__ == TopLevelDocumentMetaclass] | ||||
|         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] | ||||
|  | ||||
|         # Prevent classes setting collection different to their parents | ||||
|         # If parent wasn't an abstract class | ||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) | ||||
|             and not parent_doc_cls._meta.get('abstract', True)): | ||||
|                 msg = "Trying to set a collection on a subclass (%s)" % name | ||||
|                 warnings.warn(msg, SyntaxWarning) | ||||
|                 del(attrs['_meta']['collection']) | ||||
|  | ||||
|         # Ensure abstract documents have abstract bases | ||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): | ||||
|             if (parent_doc_cls and | ||||
|                 not parent_doc_cls._meta.get('abstract', False)): | ||||
|                 msg = "Abstract document cannot have non-abstract base" | ||||
|                 raise ValueError(msg) | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         # Merge base class metas. | ||||
|         # Uses a special MetaDict that handles various merging rules | ||||
|         meta = MetaDict() | ||||
|         for base in flattened_bases[::-1]: | ||||
|             # Add any mixin metadata from plain objects | ||||
|             if hasattr(base, 'meta'): | ||||
|                 meta.merge(base.meta) | ||||
|             elif hasattr(base, '_meta'): | ||||
|                 meta.merge(base._meta) | ||||
|  | ||||
|             # Set collection in the meta if its callable | ||||
|             if (getattr(base, '_is_document', False) and | ||||
|                 not base._meta.get('abstract')): | ||||
|                 collection = meta.get('collection', None) | ||||
|                 if callable(collection): | ||||
|                     meta['collection'] = collection(base) | ||||
|  | ||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta | ||||
|  | ||||
|         # Only simple classes (direct subclasses of Document) | ||||
|         # may set allow_inheritance to False | ||||
|         simple_class = all([b._meta.get('abstract') | ||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) | ||||
|         if (not simple_class and meta['allow_inheritance'] is False and | ||||
|            not meta['abstract']): | ||||
|             raise ValueError('Only direct subclasses of Document may set ' | ||||
|                              '"allow_inheritance" to False') | ||||
|  | ||||
|         # Set default collection name | ||||
|         if 'collection' not in meta: | ||||
|             meta['collection'] = ''.join('_%s' % c if c.isupper() else c | ||||
|                                          for c in name).strip('_').lower() | ||||
|         attrs['_meta'] = meta | ||||
|  | ||||
|         # Call super and get the new class | ||||
|         new_class = super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         meta = new_class._meta | ||||
|  | ||||
|         # Set index specifications | ||||
|         meta['index_specs'] = new_class._build_index_specs(meta['indexes']) | ||||
|  | ||||
|         # If collection is a callable - call it and set the value | ||||
|         collection = meta.get('collection') | ||||
|         if callable(collection): | ||||
|             new_class._meta['collection'] = collection(new_class) | ||||
|  | ||||
|         # Provide a default queryset unless exists or one has been set | ||||
|         if 'objects' not in dir(new_class): | ||||
|             new_class.objects = QuerySetManager() | ||||
|  | ||||
|         # Validate the fields and set primary key if needed | ||||
|         for field_name, field in new_class._fields.iteritems(): | ||||
|             if field.primary_key: | ||||
|                 # Ensure only one primary key is set | ||||
|                 current_pk = new_class._meta.get('id_field') | ||||
|                 if current_pk and current_pk != field_name: | ||||
|                     raise ValueError('Cannot override primary key field') | ||||
|  | ||||
|                 # Set primary key | ||||
|                 if not current_pk: | ||||
|                     new_class._meta['id_field'] = field_name | ||||
|                     new_class.id = field | ||||
|  | ||||
|         # Set primary key if not defined by the document | ||||
|         new_class._auto_id_field = False | ||||
|         if not new_class._meta.get('id_field'): | ||||
|             new_class._auto_id_field = True | ||||
|             new_class._meta['id_field'] = 'id' | ||||
|             new_class._fields['id'] = ObjectIdField(db_field='_id') | ||||
|             new_class._fields['id'].name = 'id' | ||||
|             new_class.id = new_class._fields['id'] | ||||
|  | ||||
|         # Prepend id field to _fields_ordered | ||||
|         if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: | ||||
|             new_class._fields_ordered = ('id', ) + new_class._fields_ordered | ||||
|  | ||||
|         # Merge in exceptions with parent hierarchy | ||||
|         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||
|         module = attrs.get('__module__') | ||||
|         for exc in exceptions_to_merge: | ||||
|             name = exc.__name__ | ||||
|             parents = tuple(getattr(base, name) for base in flattened_bases | ||||
|                          if hasattr(base, name)) or (exc,) | ||||
|             # Create new exception and set to new_class | ||||
|             exception = type(name, parents, {'__module__': module}) | ||||
|             setattr(new_class, name, exception) | ||||
|  | ||||
|         return new_class | ||||
|  | ||||
|  | ||||
| class MetaDict(dict): | ||||
|     """Custom dictionary for meta classes. | ||||
|     Handles the merging of set indexes | ||||
|     """ | ||||
|     _merge_options = ('indexes',) | ||||
|  | ||||
|     def merge(self, new_options): | ||||
|         for k, v in new_options.iteritems(): | ||||
|             if k in self._merge_options: | ||||
|                 self[k] = self.get(k, []) + v | ||||
|             else: | ||||
|                 self[k] = v | ||||
|  | ||||
|  | ||||
| class BasesTuple(tuple): | ||||
|     """Special class to handle introspection of bases tuple in __new__""" | ||||
|     pass | ||||
							
								
								
									
										50
									
								
								mongoengine/common.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										50
									
								
								mongoengine/common.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,50 @@ | ||||
| _class_registry_cache = {} | ||||
|  | ||||
|  | ||||
| def _import_class(cls_name): | ||||
|     """Cache mechanism for imports. | ||||
|  | ||||
|     Due to complications of circular imports mongoengine needs to do lots of | ||||
|     inline imports in functions.  This is inefficient as classes are | ||||
|     imported repeated throughout the mongoengine code.  This is | ||||
|     compounded by some recursive functions requiring inline imports. | ||||
|  | ||||
|     :mod:`mongoengine.common` provides a single point to import all these | ||||
|     classes.  Circular imports aren't an issue as it dynamically imports the | ||||
|     class when first needed.  Subsequent calls to the | ||||
|     :func:`~mongoengine.common._import_class` can then directly retrieve the | ||||
|     class from the :data:`mongoengine.common._class_registry_cache`. | ||||
|     """ | ||||
|     if cls_name in _class_registry_cache: | ||||
|         return _class_registry_cache.get(cls_name) | ||||
|  | ||||
|     doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', | ||||
|                    'MapReduceDocument') | ||||
|     field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', | ||||
|                      'FileField', 'GenericReferenceField', | ||||
|                      'GenericEmbeddedDocumentField', 'GeoPointField', | ||||
|                      'PointField', 'LineStringField', 'ListField', | ||||
|                      'PolygonField', 'ReferenceField', 'StringField', | ||||
|                      'ComplexBaseField', 'GeoJsonBaseField') | ||||
|     queryset_classes = ('OperationError',) | ||||
|     deref_classes = ('DeReference',) | ||||
|  | ||||
|     if cls_name in doc_classes: | ||||
|         from mongoengine import document as module | ||||
|         import_classes = doc_classes | ||||
|     elif cls_name in field_classes: | ||||
|         from mongoengine import fields as module | ||||
|         import_classes = field_classes | ||||
|     elif cls_name in queryset_classes: | ||||
|         from mongoengine import queryset as module | ||||
|         import_classes = queryset_classes | ||||
|     elif cls_name in deref_classes: | ||||
|         from mongoengine import dereference as module | ||||
|         import_classes = deref_classes | ||||
|     else: | ||||
|         raise ValueError('No import set for: ' % cls_name) | ||||
|  | ||||
|     for cls in import_classes: | ||||
|         _class_registry_cache[cls] = getattr(module, cls) | ||||
|  | ||||
|     return _class_registry_cache.get(cls_name) | ||||
| @@ -1,5 +1,5 @@ | ||||
| import pymongo | ||||
| from pymongo import Connection, ReplicaSetConnection, uri_parser | ||||
| from pymongo import MongoClient, MongoReplicaSetClient, uri_parser | ||||
|  | ||||
|  | ||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | ||||
| @@ -18,7 +18,7 @@ _connections = {} | ||||
| _dbs = {} | ||||
|  | ||||
|  | ||||
| def register_connection(alias, name, host='localhost', port=27017, | ||||
| def register_connection(alias, name, host=None, port=None, | ||||
|                         is_slave=False, read_preference=False, slaves=None, | ||||
|                         username=None, password=None, **kwargs): | ||||
|     """Add a connection. | ||||
| @@ -28,8 +28,10 @@ def register_connection(alias, name, host='localhost', port=27017, | ||||
|     :param name: the name of the specific database to use | ||||
|     :param host: the host name of the :program:`mongod` instance to connect to | ||||
|     :param port: the port that the :program:`mongod` instance is running on | ||||
|     :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ | ||||
|     :param read_preference: The read preference for the collection ** Added pymongo 2.1 | ||||
|     :param is_slave: whether the connection can act as a slave | ||||
|       ** Depreciated pymongo 2.0.1+ | ||||
|     :param read_preference: The read preference for the collection | ||||
|        ** Added pymongo 2.1 | ||||
|     :param slaves: a list of aliases of slave connections; each of these must | ||||
|         be a registered connection that has :attr:`is_slave` set to ``True`` | ||||
|     :param username: username to authenticate with | ||||
| @@ -41,8 +43,8 @@ def register_connection(alias, name, host='localhost', port=27017, | ||||
|  | ||||
|     conn_settings = { | ||||
|         'name': name, | ||||
|         'host': host, | ||||
|         'port': port, | ||||
|         'host': host or 'localhost', | ||||
|         'port': port or 27017, | ||||
|         'is_slave': is_slave, | ||||
|         'slaves': slaves or [], | ||||
|         'username': username, | ||||
| @@ -51,19 +53,15 @@ def register_connection(alias, name, host='localhost', port=27017, | ||||
|     } | ||||
|  | ||||
|     # Handle uri style connections | ||||
|     if "://" in host: | ||||
|         uri_dict = uri_parser.parse_uri(host) | ||||
|         if uri_dict.get('database') is None: | ||||
|             raise ConnectionError("If using URI style connection include "\ | ||||
|                                   "database name in string") | ||||
|     if "://" in conn_settings['host']: | ||||
|         uri_dict = uri_parser.parse_uri(conn_settings['host']) | ||||
|         conn_settings.update({ | ||||
|             'host': host, | ||||
|             'name': uri_dict.get('database'), | ||||
|             'name': uri_dict.get('database') or name, | ||||
|             'username': uri_dict.get('username'), | ||||
|             'password': uri_dict.get('password'), | ||||
|             'read_preference': read_preference, | ||||
|         }) | ||||
|         if "replicaSet" in host: | ||||
|         if "replicaSet" in conn_settings['host']: | ||||
|             conn_settings['replicaSet'] = True | ||||
|  | ||||
|     conn_settings.update(kwargs) | ||||
| @@ -110,15 +108,15 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|                 conn_settings['slaves'] = slaves | ||||
|                 conn_settings.pop('read_preference', None) | ||||
|  | ||||
|         connection_class = Connection | ||||
|         connection_class = MongoClient | ||||
|         if 'replicaSet' in conn_settings: | ||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|             # Discard port since it can't be used on ReplicaSetConnection | ||||
|             # Discard port since it can't be used on MongoReplicaSetClient | ||||
|             conn_settings.pop('port', None) | ||||
|             # Discard replicaSet if not base string | ||||
|             if not isinstance(conn_settings['replicaSet'], basestring): | ||||
|                 conn_settings.pop('replicaSet', None) | ||||
|             connection_class = ReplicaSetConnection | ||||
|             connection_class = MongoReplicaSetClient | ||||
|  | ||||
|         try: | ||||
|             _connections[alias] = connection_class(**conn_settings) | ||||
| @@ -135,11 +133,12 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     if alias not in _dbs: | ||||
|         conn = get_connection(alias) | ||||
|         conn_settings = _connection_settings[alias] | ||||
|         _dbs[alias] = conn[conn_settings['name']] | ||||
|         db = conn[conn_settings['name']] | ||||
|         # Authenticate if necessary | ||||
|         if conn_settings['username'] and conn_settings['password']: | ||||
|             _dbs[alias].authenticate(conn_settings['username'], | ||||
|                                      conn_settings['password']) | ||||
|             db.authenticate(conn_settings['username'], | ||||
|                             conn_settings['password']) | ||||
|         _dbs[alias] = db | ||||
|     return _dbs[alias] | ||||
|  | ||||
|  | ||||
| @@ -161,6 +160,7 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|  | ||||
|     return get_connection(alias) | ||||
|  | ||||
|  | ||||
| # Support old naming convention | ||||
| _get_connection = get_connection | ||||
| _get_db = get_db | ||||
|   | ||||
							
								
								
									
										228
									
								
								mongoengine/context_managers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										228
									
								
								mongoengine/context_managers.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,228 @@ | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| __all__ = ("switch_db", "switch_collection", "no_dereference", | ||||
|            "no_sub_classes", "query_counter") | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
|     """ switch_db alias context manager. | ||||
|  | ||||
|     Example :: | ||||
|  | ||||
|         # Register connections | ||||
|         register_connection('default', 'mongoenginetest') | ||||
|         register_connection('testdb-1', 'mongoenginetest2') | ||||
|  | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|  | ||||
|         with switch_db(Group, 'testdb-1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in testdb-1 | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, db_alias): | ||||
|         """ Construct the switch_db context manager | ||||
|  | ||||
|         :param cls: the class to change the registered db | ||||
|         :param db_alias: the name of the specific database to use | ||||
|         """ | ||||
|         self.cls = cls | ||||
|         self.collection = cls._get_collection() | ||||
|         self.db_alias = db_alias | ||||
|         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the db_alias and clear the cached collection """ | ||||
|         self.cls._meta["db_alias"] = self.db_alias | ||||
|         self.cls._collection = None | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the db_alias and collection """ | ||||
|         self.cls._meta["db_alias"] = self.ori_db_alias | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| class switch_collection(object): | ||||
|     """ switch_collection alias context manager. | ||||
|  | ||||
|     Example :: | ||||
|  | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|  | ||||
|         with switch_collection(Group, 'group1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in group1 collection | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, collection_name): | ||||
|         """ Construct the switch_collection context manager | ||||
|  | ||||
|         :param cls: the class to change the registered db | ||||
|         :param collection_name: the name of the collection to use | ||||
|         """ | ||||
|         self.cls = cls | ||||
|         self.ori_collection = cls._get_collection() | ||||
|         self.ori_get_collection_name = cls._get_collection_name | ||||
|         self.collection_name = collection_name | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the _get_collection_name and clear the cached collection """ | ||||
|  | ||||
|         @classmethod | ||||
|         def _get_collection_name(cls): | ||||
|             return self.collection_name | ||||
|  | ||||
|         self.cls._get_collection_name = _get_collection_name | ||||
|         self.cls._collection = None | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the collection """ | ||||
|         self.cls._collection = self.ori_collection | ||||
|         self.cls._get_collection_name = self.ori_get_collection_name | ||||
|  | ||||
|  | ||||
| class no_dereference(object): | ||||
|     """ no_dereference context manager. | ||||
|  | ||||
|     Turns off all dereferencing in Documents for the duration of the context | ||||
|     manager:: | ||||
|  | ||||
|         with no_dereference(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
|         """ Construct the no_dereference context manager. | ||||
|  | ||||
|         :param cls: the class to turn dereferencing off on | ||||
|         """ | ||||
|         self.cls = cls | ||||
|  | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|         ComplexBaseField = _import_class('ComplexBaseField') | ||||
|  | ||||
|         self.deref_fields = [k for k, v in self.cls._fields.iteritems() | ||||
|                              if isinstance(v, (ReferenceField, | ||||
|                                                GenericReferenceField, | ||||
|                                                ComplexBaseField))] | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = False | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = True | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class no_sub_classes(object): | ||||
|     """ no_sub_classes context manager. | ||||
|  | ||||
|     Only returns instances of this class and no sub (inherited) classes:: | ||||
|  | ||||
|         with no_sub_classes(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
|         """ Construct the no_sub_classes context manager. | ||||
|  | ||||
|         :param cls: the class to turn querying sub classes on | ||||
|         """ | ||||
|         self.cls = cls | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         self.cls._all_subclasses = self.cls._subclasses | ||||
|         self.cls._subclasses = (self.cls,) | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         self.cls._subclasses = self.cls._all_subclasses | ||||
|         delattr(self.cls, '_all_subclasses') | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class QuerySetNoDeRef(QuerySet): | ||||
|     """Special no_dereference QuerySet""" | ||||
|     def __dereference(items, max_depth=1, instance=None, name=None): | ||||
|             return items | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
|     """ Query_counter context manager to get the number of queries. """ | ||||
|  | ||||
|     def __init__(self): | ||||
|         """ Construct the query_counter. """ | ||||
|         self.counter = 0 | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ On every with block we need to drop the profile collection. """ | ||||
|         self.db.set_profiling_level(0) | ||||
|         self.db.system.profile.drop() | ||||
|         self.db.set_profiling_level(2) | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the profiling level. """ | ||||
|         self.db.set_profiling_level(0) | ||||
|  | ||||
|     def __eq__(self, value): | ||||
|         """ == Compare querycounter. """ | ||||
|         counter = self._get_count() | ||||
|         return value == counter | ||||
|  | ||||
|     def __ne__(self, value): | ||||
|         """ != Compare querycounter. """ | ||||
|         return not self.__eq__(value) | ||||
|  | ||||
|     def __lt__(self, value): | ||||
|         """ < Compare querycounter. """ | ||||
|         return self._get_count() < value | ||||
|  | ||||
|     def __le__(self, value): | ||||
|         """ <= Compare querycounter. """ | ||||
|         return self._get_count() <= value | ||||
|  | ||||
|     def __gt__(self, value): | ||||
|         """ > Compare querycounter. """ | ||||
|         return self._get_count() > value | ||||
|  | ||||
|     def __ge__(self, value): | ||||
|         """ >= Compare querycounter. """ | ||||
|         return self._get_count() >= value | ||||
|  | ||||
|     def __int__(self): | ||||
|         """ int representation. """ | ||||
|         return self._get_count() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """ repr query_counter as the number of queries. """ | ||||
|         return u"%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """ Get the number of queries. """ | ||||
|         ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} | ||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter | ||||
|         self.counter += 1 | ||||
|         return count | ||||
| @@ -4,7 +4,7 @@ from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) | ||||
| from fields import (ReferenceField, ListField, DictField, MapField) | ||||
| from connection import get_db | ||||
| from queryset import QuerySet | ||||
| from document import Document | ||||
| from document import Document, EmbeddedDocument | ||||
|  | ||||
|  | ||||
| class DeReference(object): | ||||
| @@ -33,7 +33,8 @@ class DeReference(object): | ||||
|         self.max_depth = max_depth | ||||
|         doc_type = None | ||||
|  | ||||
|         if instance and instance._fields: | ||||
|         if instance and isinstance(instance, (Document, EmbeddedDocument, | ||||
|                                               TopLevelDocumentMetaclass)): | ||||
|             doc_type = instance._fields.get(name) | ||||
|             if hasattr(doc_type, 'field'): | ||||
|                 doc_type = doc_type.field | ||||
| @@ -84,7 +85,7 @@ class DeReference(object): | ||||
|         # Recursively find dbreferences | ||||
|         depth += 1 | ||||
|         for k, item in iterator: | ||||
|             if hasattr(item, '_fields'): | ||||
|             if isinstance(item, Document): | ||||
|                 for field_name, field in item._fields.iteritems(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
| @@ -115,13 +116,16 @@ class DeReference(object): | ||||
|         object_map = {} | ||||
|         for col, dbrefs in self.reference_map.iteritems(): | ||||
|             keys = object_map.keys() | ||||
|             refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) | ||||
|             refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys])) | ||||
|             if hasattr(col, 'objects'):  # We have a document class for the refs | ||||
|                 references = col.objects.in_bulk(refs) | ||||
|                 for key, doc in references.iteritems(): | ||||
|                     object_map[key] = doc | ||||
|             else:  # Generic reference: use the refs data to convert to document | ||||
|                 if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField,)): | ||||
|                     continue | ||||
|  | ||||
|                 if doc_type: | ||||
|                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         doc = doc_type._from_son(ref) | ||||
| @@ -164,13 +168,14 @@ class DeReference(object): | ||||
|         if isinstance(items, (dict, SON)): | ||||
|             if '_ref' in items: | ||||
|                 return self.object_map.get(items['_ref'].id, items) | ||||
|             elif '_types' in items and '_cls' in items: | ||||
|             elif '_cls' in items: | ||||
|                 doc = get_document(items['_cls'])._from_son(items) | ||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||
|                 return doc | ||||
|  | ||||
|         if not hasattr(items, 'items'): | ||||
|             is_list = True | ||||
|             as_tuple = isinstance(items, tuple) | ||||
|             iterator = enumerate(items) | ||||
|             data = [] | ||||
|         else: | ||||
| @@ -187,7 +192,7 @@ class DeReference(object): | ||||
|  | ||||
|             if k in self.object_map and not is_list: | ||||
|                 data[k] = self.object_map[k] | ||||
|             elif hasattr(v, '_fields'): | ||||
|             elif isinstance(v, Document): | ||||
|                 for field_name, field in v._fields.iteritems(): | ||||
|                     v = data[k]._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
| @@ -205,7 +210,7 @@ class DeReference(object): | ||||
|  | ||||
|         if instance and name: | ||||
|             if is_list: | ||||
|                 return BaseList(data, instance, name) | ||||
|                 return tuple(data) if as_tuple else BaseList(data, instance, name) | ||||
|             return BaseDict(data, instance, name) | ||||
|         depth += 1 | ||||
|         return data | ||||
|   | ||||
| @@ -1,13 +1,17 @@ | ||||
| import datetime | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from django.utils.encoding import smart_str | ||||
| from django.contrib.auth.models import _user_get_all_permissions | ||||
| from django.contrib.auth.models import _user_has_perm | ||||
| from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms | ||||
| from django.db import models | ||||
| from django.contrib.contenttypes.models import ContentTypeManager | ||||
| from django.contrib import auth | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.utils.translation import ugettext_lazy as _ | ||||
|  | ||||
| from .utils import datetime_now | ||||
|  | ||||
| REDIRECT_FIELD_NAME = 'next' | ||||
|  | ||||
| try: | ||||
|     from django.contrib.auth.hashers import check_password, make_password | ||||
| except ImportError: | ||||
| @@ -34,7 +38,166 @@ except ImportError: | ||||
|         return '%s$%s$%s' % (algo, salt, hash) | ||||
|  | ||||
|  | ||||
| REDIRECT_FIELD_NAME = 'next' | ||||
| class ContentType(Document): | ||||
|     name = StringField(max_length=100) | ||||
|     app_label = StringField(max_length=100) | ||||
|     model = StringField(max_length=100, verbose_name=_('python model class name'), | ||||
|                         unique_with='app_label') | ||||
|     objects = ContentTypeManager() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('content type') | ||||
|         verbose_name_plural = _('content types') | ||||
|         # db_table = 'django_content_type' | ||||
|         # ordering = ('name',) | ||||
|         # unique_together = (('app_label', 'model'),) | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return self.name | ||||
|  | ||||
|     def model_class(self): | ||||
|         "Returns the Python model class for this type of content." | ||||
|         from django.db import models | ||||
|         return models.get_model(self.app_label, self.model) | ||||
|  | ||||
|     def get_object_for_this_type(self, **kwargs): | ||||
|         """ | ||||
|         Returns an object of this type for the keyword arguments given. | ||||
|         Basically, this is a proxy around this object_type's get_object() model | ||||
|         method. The ObjectNotExist exception, if thrown, will not be caught, | ||||
|         so code that calls this method should catch it. | ||||
|         """ | ||||
|         return self.model_class()._default_manager.using(self._state.db).get(**kwargs) | ||||
|  | ||||
|     def natural_key(self): | ||||
|         return (self.app_label, self.model) | ||||
|  | ||||
|  | ||||
| class SiteProfileNotAvailable(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class PermissionManager(models.Manager): | ||||
|     def get_by_natural_key(self, codename, app_label, model): | ||||
|         return self.get( | ||||
|             codename=codename, | ||||
|             content_type=ContentType.objects.get_by_natural_key(app_label, model) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Permission(Document): | ||||
|     """The permissions system provides a way to assign permissions to specific | ||||
|     users and groups of users. | ||||
|  | ||||
|     The permission system is used by the Django admin site, but may also be | ||||
|     useful in your own code. The Django admin site uses permissions as follows: | ||||
|  | ||||
|         - The "add" permission limits the user's ability to view the "add" | ||||
|           form and add an object. | ||||
|         - The "change" permission limits a user's ability to view the change | ||||
|           list, view the "change" form and change an object. | ||||
|         - The "delete" permission limits the ability to delete an object. | ||||
|  | ||||
|     Permissions are set globally per type of object, not per specific object | ||||
|     instance. It is possible to say "Mary may change news stories," but it's | ||||
|     not currently possible to say "Mary may change news stories, but only the | ||||
|     ones she created herself" or "Mary may only change news stories that have | ||||
|     a certain status or publication date." | ||||
|  | ||||
|     Three basic permissions -- add, change and delete -- are automatically | ||||
|     created for each Django model. | ||||
|     """ | ||||
|     name = StringField(max_length=50, verbose_name=_('username')) | ||||
|     content_type = ReferenceField(ContentType) | ||||
|     codename = StringField(max_length=100, verbose_name=_('codename')) | ||||
|         # FIXME: don't access field of the other class | ||||
|         # unique_with=['content_type__app_label', 'content_type__model']) | ||||
|  | ||||
|     objects = PermissionManager() | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('permission') | ||||
|         verbose_name_plural = _('permissions') | ||||
|         # unique_together = (('content_type', 'codename'),) | ||||
|         # ordering = ('content_type__app_label', 'content_type__model', 'codename') | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return u"%s | %s | %s" % ( | ||||
|             unicode(self.content_type.app_label), | ||||
|             unicode(self.content_type), | ||||
|             unicode(self.name)) | ||||
|  | ||||
|     def natural_key(self): | ||||
|         return (self.codename,) + self.content_type.natural_key() | ||||
|     natural_key.dependencies = ['contenttypes.contenttype'] | ||||
|  | ||||
|  | ||||
| class Group(Document): | ||||
|     """Groups are a generic way of categorizing users to apply permissions, | ||||
|     or some other label, to those users. A user can belong to any number of | ||||
|     groups. | ||||
|  | ||||
|     A user in a group automatically has all the permissions granted to that | ||||
|     group. For example, if the group Site editors has the permission | ||||
|     can_edit_home_page, any user in that group will have that permission. | ||||
|  | ||||
|     Beyond permissions, groups are a convenient way to categorize users to | ||||
|     apply some label, or extended functionality, to them. For example, you | ||||
|     could create a group 'Special users', and you could write code that would | ||||
|     do special things to those users -- such as giving them access to a | ||||
|     members-only portion of your site, or sending them members-only | ||||
|     e-mail messages. | ||||
|     """ | ||||
|     name = StringField(max_length=80, unique=True, verbose_name=_('name')) | ||||
|     permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False)) | ||||
|  | ||||
|     class Meta: | ||||
|         verbose_name = _('group') | ||||
|         verbose_name_plural = _('groups') | ||||
|  | ||||
|     def __unicode__(self): | ||||
|         return self.name | ||||
|  | ||||
|  | ||||
| class UserManager(models.Manager): | ||||
|     def create_user(self, username, email, password=None): | ||||
|         """ | ||||
|         Creates and saves a User with the given username, e-mail and password. | ||||
|         """ | ||||
|         now = datetime_now() | ||||
|  | ||||
|         # Normalize the address by lowercasing the domain part of the email | ||||
|         # address. | ||||
|         try: | ||||
|             email_name, domain_part = email.strip().split('@', 1) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         else: | ||||
|             email = '@'.join([email_name, domain_part.lower()]) | ||||
|  | ||||
|         user = self.model(username=username, email=email, is_staff=False, | ||||
|                           is_active=True, is_superuser=False, last_login=now, | ||||
|                           date_joined=now) | ||||
|  | ||||
|         user.set_password(password) | ||||
|         user.save(using=self._db) | ||||
|         return user | ||||
|  | ||||
|     def create_superuser(self, username, email, password): | ||||
|         u = self.create_user(username, email, password) | ||||
|         u.is_staff = True | ||||
|         u.is_active = True | ||||
|         u.is_superuser = True | ||||
|         u.save(using=self._db) | ||||
|         return u | ||||
|  | ||||
|     def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'): | ||||
|         "Generates a random password with the given length and given allowed_chars" | ||||
|         # Note that default value of allowed_chars does not have "I" or letters | ||||
|         # that look like it -- just to avoid confusion. | ||||
|         from random import choice | ||||
|         return ''.join([choice(allowed_chars) for i in range(length)]) | ||||
|  | ||||
|  | ||||
| class User(Document): | ||||
|     """A User document that aims to mirror most of the API specified by Django | ||||
| @@ -62,15 +225,21 @@ class User(Document): | ||||
|     is_superuser = BooleanField(default=False, | ||||
|                                 verbose_name=_('superuser status'), | ||||
|                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) | ||||
|     last_login = DateTimeField(default=datetime.datetime.now, | ||||
|     last_login = DateTimeField(default=datetime_now, | ||||
|                                verbose_name=_('last login')) | ||||
|     date_joined = DateTimeField(default=datetime.datetime.now, | ||||
|     date_joined = DateTimeField(default=datetime_now, | ||||
|                                 verbose_name=_('date joined')) | ||||
|  | ||||
|     user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'), | ||||
|                                                 help_text=_('Permissions for the user.')) | ||||
|  | ||||
|     USERNAME_FIELD = 'username' | ||||
|     REQUIRED_FIELDS = ['email'] | ||||
|  | ||||
|     meta = { | ||||
|         'allow_inheritance': True, | ||||
|         'indexes': [ | ||||
|             {'fields': ['username'], 'unique': True} | ||||
|             {'fields': ['username'], 'unique': True, 'sparse': True} | ||||
|         ] | ||||
|     } | ||||
|  | ||||
| @@ -106,6 +275,40 @@ class User(Document): | ||||
|         """ | ||||
|         return check_password(raw_password, self.password) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_user(cls, username, password, email=None): | ||||
|         """Create (and save) a new user with the given username, password and | ||||
|         email address. | ||||
|         """ | ||||
|         now = datetime_now() | ||||
|  | ||||
|         # Normalize the address by lowercasing the domain part of the email | ||||
|         # address. | ||||
|         if email is not None: | ||||
|             try: | ||||
|                 email_name, domain_part = email.strip().split('@', 1) | ||||
|             except ValueError: | ||||
|                 pass | ||||
|             else: | ||||
|                 email = '@'.join([email_name, domain_part.lower()]) | ||||
|  | ||||
|         user = cls(username=username, email=email, date_joined=now) | ||||
|         user.set_password(password) | ||||
|         user.save() | ||||
|         return user | ||||
|  | ||||
|     def get_group_permissions(self, obj=None): | ||||
|         """ | ||||
|         Returns a list of permission strings that this user has through his/her | ||||
|         groups. This method queries all available auth backends. If an object | ||||
|         is passed in, only permissions matching this object are returned. | ||||
|         """ | ||||
|         permissions = set() | ||||
|         for backend in auth.get_backends(): | ||||
|             if hasattr(backend, "get_group_permissions"): | ||||
|                 permissions.update(backend.get_group_permissions(self, obj)) | ||||
|         return permissions | ||||
|  | ||||
|     def get_all_permissions(self, obj=None): | ||||
|         return _user_get_all_permissions(self, obj) | ||||
|  | ||||
| @@ -125,30 +328,50 @@ class User(Document): | ||||
|         # Otherwise we need to check the backends. | ||||
|         return _user_has_perm(self, perm, obj) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_user(cls, username, password, email=None): | ||||
|         """Create (and save) a new user with the given username, password and | ||||
|         email address. | ||||
|     def has_module_perms(self, app_label): | ||||
|         """ | ||||
|         now = datetime.datetime.now() | ||||
|         Returns True if the user has any permissions in the given app label. | ||||
|         Uses pretty much the same logic as has_perm, above. | ||||
|         """ | ||||
|         # Active superusers have all permissions. | ||||
|         if self.is_active and self.is_superuser: | ||||
|             return True | ||||
|  | ||||
|         # Normalize the address by lowercasing the domain part of the email | ||||
|         # address. | ||||
|         if email is not None: | ||||
|         return _user_has_module_perms(self, app_label) | ||||
|  | ||||
|     def email_user(self, subject, message, from_email=None): | ||||
|         "Sends an e-mail to this User." | ||||
|         from django.core.mail import send_mail | ||||
|         send_mail(subject, message, from_email, [self.email]) | ||||
|  | ||||
|     def get_profile(self): | ||||
|         """ | ||||
|         Returns site-specific profile for this user. Raises | ||||
|         SiteProfileNotAvailable if this site does not allow profiles. | ||||
|         """ | ||||
|         if not hasattr(self, '_profile_cache'): | ||||
|             from django.conf import settings | ||||
|             if not getattr(settings, 'AUTH_PROFILE_MODULE', False): | ||||
|                 raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO' | ||||
|                                               'DULE in your project settings') | ||||
|             try: | ||||
|                 email_name, domain_part = email.strip().split('@', 1) | ||||
|                 app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') | ||||
|             except ValueError: | ||||
|                 pass | ||||
|             else: | ||||
|                 email = '@'.join([email_name, domain_part.lower()]) | ||||
|                 raise SiteProfileNotAvailable('app_label and model_name should' | ||||
|                         ' be separated by a dot in the AUTH_PROFILE_MODULE set' | ||||
|                         'ting') | ||||
|  | ||||
|         user = cls(username=username, email=email, date_joined=now) | ||||
|         user.set_password(password) | ||||
|         user.save() | ||||
|         return user | ||||
|  | ||||
|     def get_and_delete_messages(self): | ||||
|         return [] | ||||
|             try: | ||||
|                 model = models.get_model(app_label, model_name) | ||||
|                 if model is None: | ||||
|                     raise SiteProfileNotAvailable('Unable to load the profile ' | ||||
|                         'model, check AUTH_PROFILE_MODULE in your project sett' | ||||
|                         'ings') | ||||
|                 self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id) | ||||
|                 self._profile_cache.user = self | ||||
|             except (ImportError, ImproperlyConfigured): | ||||
|                 raise SiteProfileNotAvailable | ||||
|         return self._profile_cache | ||||
|  | ||||
|  | ||||
| class MongoEngineBackend(object): | ||||
| @@ -158,17 +381,26 @@ class MongoEngineBackend(object): | ||||
|     supports_object_permissions = False | ||||
|     supports_anonymous_user = False | ||||
|     supports_inactive_user = False | ||||
|     _user_doc = False | ||||
|  | ||||
|     def authenticate(self, username=None, password=None): | ||||
|         user = User.objects(username=username).first() | ||||
|         user = self.user_document.objects(username=username).first() | ||||
|         if user: | ||||
|             if password and user.check_password(password): | ||||
|                 backend = auth.get_backends()[0] | ||||
|                 user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__) | ||||
|                 return user | ||||
|         return None | ||||
|  | ||||
|     def get_user(self, user_id): | ||||
|         return User.objects.with_id(user_id) | ||||
|         return self.user_document.objects.with_id(user_id) | ||||
|  | ||||
|     @property | ||||
|     def user_document(self): | ||||
|         if self._user_doc is False: | ||||
|             from .mongo_auth.models import get_user_document | ||||
|             self._user_doc = get_user_document() | ||||
|         return self._user_doc | ||||
|  | ||||
| def get_user(userid): | ||||
|     """Returns a User object from an id (User.id). Django's equivalent takes | ||||
|   | ||||
							
								
								
									
										0
									
								
								mongoengine/django/mongo_auth/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								mongoengine/django/mongo_auth/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										115
									
								
								mongoengine/django/mongo_auth/models.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										115
									
								
								mongoengine/django/mongo_auth/models.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,115 @@ | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.hashers import make_password | ||||
| from django.contrib.auth.models import UserManager | ||||
| from django.core.exceptions import ImproperlyConfigured | ||||
| from django.db import models | ||||
| from django.utils.importlib import import_module | ||||
| from django.utils.translation import ugettext_lazy as _ | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     'get_user_document', | ||||
| ) | ||||
|  | ||||
|  | ||||
| MONGOENGINE_USER_DOCUMENT = getattr( | ||||
|     settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') | ||||
|  | ||||
|  | ||||
| def get_user_document(): | ||||
|     """Get the user document class used for authentication. | ||||
|  | ||||
|     This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which | ||||
|     defaults to `mongoengine.django.auth.User`. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     name = MONGOENGINE_USER_DOCUMENT | ||||
|     dot = name.rindex('.') | ||||
|     module = import_module(name[:dot]) | ||||
|     return getattr(module, name[dot + 1:]) | ||||
|  | ||||
|  | ||||
| class MongoUserManager(UserManager): | ||||
|     """A User manager wich allows the use of MongoEngine documents in Django. | ||||
|  | ||||
|     To use the manager, you must tell django.contrib.auth to use MongoUser as | ||||
|     the user model. In you settings.py, you need: | ||||
|  | ||||
|         INSTALLED_APPS = ( | ||||
|             ... | ||||
|             'django.contrib.auth', | ||||
|             'mongoengine.django.mongo_auth', | ||||
|             ... | ||||
|         ) | ||||
|         AUTH_USER_MODEL = 'mongo_auth.MongoUser' | ||||
|  | ||||
|     Django will use the model object to access the custom Manager, which will | ||||
|     replace the original queryset with MongoEngine querysets. | ||||
|  | ||||
|     By default, mongoengine.django.auth.User will be used to store users. You | ||||
|     can specify another document class in MONGOENGINE_USER_DOCUMENT in your | ||||
|     settings.py. | ||||
|  | ||||
|     The User Document class has the same requirements as a standard custom user | ||||
|     model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/ | ||||
|  | ||||
|     In particular, the User Document class must define USERNAME_FIELD and | ||||
|     REQUIRED_FIELDS. | ||||
|  | ||||
|     `AUTH_USER_MODEL` has been added in Django 1.5. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def contribute_to_class(self, model, name): | ||||
|         super(MongoUserManager, self).contribute_to_class(model, name) | ||||
|         self.dj_model = self.model | ||||
|         self.model = get_user_document() | ||||
|  | ||||
|         self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD | ||||
|         username = models.CharField(_('username'), max_length=30, unique=True) | ||||
|         username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) | ||||
|  | ||||
|         self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS | ||||
|         for name in self.dj_model.REQUIRED_FIELDS: | ||||
|             field = models.CharField(_(name), max_length=30) | ||||
|             field.contribute_to_class(self.dj_model, name) | ||||
|  | ||||
|  | ||||
|     def get(self, *args, **kwargs): | ||||
|         try: | ||||
|             return self.get_query_set().get(*args, **kwargs) | ||||
|         except self.model.DoesNotExist: | ||||
|             # ModelBackend expects this exception | ||||
|             raise self.dj_model.DoesNotExist | ||||
|  | ||||
|     @property | ||||
|     def db(self): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def get_empty_query_set(self): | ||||
|         return self.model.objects.none() | ||||
|  | ||||
|     def get_query_set(self): | ||||
|         return self.model.objects | ||||
|  | ||||
|  | ||||
| class MongoUser(models.Model): | ||||
|     """"Dummy user model for Django. | ||||
|  | ||||
|     MongoUser is used to replace Django's UserManager with MongoUserManager. | ||||
|     The actual user document class is mongoengine.django.auth.User or any | ||||
|     other document class specified in MONGOENGINE_USER_DOCUMENT. | ||||
|  | ||||
|     To get the user document class, use `get_user_document()`. | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     objects = MongoUserManager() | ||||
|  | ||||
|     class Meta: | ||||
|         app_label = 'mongo_auth' | ||||
|  | ||||
|     def set_password(self, password): | ||||
|         """Doesn't do anything, but works around the issue with Django 1.6.""" | ||||
|         make_password(password) | ||||
| @@ -1,15 +1,19 @@ | ||||
| from datetime import datetime | ||||
|  | ||||
| from bson import json_util | ||||
| from django.conf import settings | ||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||
| from django.core.exceptions import SuspiciousOperation | ||||
| from django.utils.encoding import force_unicode | ||||
| try: | ||||
|     from django.utils.encoding import force_unicode | ||||
| except ImportError: | ||||
|     from django.utils.encoding import force_text as force_unicode | ||||
|  | ||||
| from mongoengine.document import Document | ||||
| from mongoengine import fields | ||||
| from mongoengine.queryset import OperationError | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME | ||||
|  | ||||
| from .utils import datetime_now | ||||
|  | ||||
|  | ||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||
| @@ -25,25 +29,43 @@ MONGOENGINE_SESSION_DATA_ENCODE = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DATA_ENCODE', | ||||
|     True) | ||||
|  | ||||
|  | ||||
| class MongoSession(Document): | ||||
|     session_key = fields.StringField(primary_key=True, max_length=40) | ||||
|     session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ | ||||
|                                         else fields.DictField() | ||||
|     expire_date = fields.DateTimeField() | ||||
|  | ||||
|     meta = {'collection': MONGOENGINE_SESSION_COLLECTION, | ||||
|             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||
|             'allow_inheritance': False} | ||||
|     meta = { | ||||
|         'collection': MONGOENGINE_SESSION_COLLECTION, | ||||
|         'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||
|         'allow_inheritance': False, | ||||
|         'indexes': [ | ||||
|             { | ||||
|                 'fields': ['expire_date'], | ||||
|                 'expireAfterSeconds': 0 | ||||
|             } | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     def get_decoded(self): | ||||
|         return SessionStore().decode(self.session_data) | ||||
|  | ||||
|  | ||||
| class SessionStore(SessionBase): | ||||
|     """A MongoEngine-based session store for Django. | ||||
|     """ | ||||
|  | ||||
|     def _get_session(self, *args, **kwargs): | ||||
|         sess = super(SessionStore, self)._get_session(*args, **kwargs) | ||||
|         if sess.get('_auth_user_id', None): | ||||
|             sess['_auth_user_id'] = str(sess.get('_auth_user_id')) | ||||
|         return sess | ||||
|  | ||||
|     def load(self): | ||||
|         try: | ||||
|             s = MongoSession.objects(session_key=self.session_key, | ||||
|                                      expire_date__gt=datetime.now())[0] | ||||
|                                      expire_date__gt=datetime_now)[0] | ||||
|             if MONGOENGINE_SESSION_DATA_ENCODE: | ||||
|                 return self.decode(force_unicode(s.session_data)) | ||||
|             else: | ||||
| @@ -76,7 +98,7 @@ class SessionStore(SessionBase): | ||||
|             s.session_data = self._get_session(no_load=must_create) | ||||
|         s.expire_date = self.get_expiry_date() | ||||
|         try: | ||||
|             s.save(force_insert=must_create, safe=True) | ||||
|             s.save(force_insert=must_create) | ||||
|         except OperationError: | ||||
|             if must_create: | ||||
|                 raise CreateError | ||||
| @@ -88,3 +110,15 @@ class SessionStore(SessionBase): | ||||
|                 return | ||||
|             session_key = self.session_key | ||||
|         MongoSession.objects(session_key=session_key).delete() | ||||
|  | ||||
|  | ||||
| class BSONSerializer(object): | ||||
|     """ | ||||
|     Serializer that can handle BSON types (eg ObjectId). | ||||
|     """ | ||||
|     def dumps(self, obj): | ||||
|         return json_util.dumps(obj, separators=(',', ':')).encode('ascii') | ||||
|  | ||||
|     def loads(self, data): | ||||
|         return json_util.loads(data.decode('ascii')) | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from mongoengine.queryset import QuerySet | ||||
| from mongoengine.base import BaseDocument | ||||
| from mongoengine.base import ValidationError | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| def _get_queryset(cls): | ||||
|     """Inspired by django.shortcuts.*""" | ||||
|   | ||||
| @@ -76,7 +76,7 @@ class GridFSStorage(Storage): | ||||
|         """Find the documents in the store with the given name | ||||
|         """ | ||||
|         docs = self.document.objects | ||||
|         doc = [d for d in docs if getattr(d, self.field).name == name] | ||||
|         doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name] | ||||
|         if doc: | ||||
|             return doc[0] | ||||
|         else: | ||||
|   | ||||
							
								
								
									
										6
									
								
								mongoengine/django/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								mongoengine/django/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| try: | ||||
|     # django >= 1.4 | ||||
|     from django.utils.timezone import now as datetime_now | ||||
| except ImportError: | ||||
|     from datetime import datetime | ||||
|     datetime_now = datetime.now | ||||
| @@ -1,19 +1,38 @@ | ||||
| import warnings | ||||
|  | ||||
| import hashlib | ||||
| import pymongo | ||||
| import re | ||||
|  | ||||
| from pymongo.read_preferences import ReadPreference | ||||
| from bson import ObjectId | ||||
| from bson.dbref import DBRef | ||||
| from mongoengine import signals, queryset | ||||
| from mongoengine import signals | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, | ||||
|                               BaseDocument, BaseDict, BaseList, | ||||
|                               ALLOW_INHERITANCE, get_document) | ||||
| from mongoengine.errors import ValidationError | ||||
| from mongoengine.queryset import OperationError, NotUniqueError, QuerySet | ||||
| from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME | ||||
| from mongoengine.context_managers import switch_db, switch_collection | ||||
|  | ||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||
|                   BaseDict, BaseList) | ||||
| from queryset import OperationError, NotUniqueError | ||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | ||||
|  | ||||
| __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | ||||
| __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', | ||||
|            'DynamicEmbeddedDocument', 'OperationError', | ||||
|            'InvalidCollectionError', 'NotUniqueError'] | ||||
|            'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') | ||||
|  | ||||
|  | ||||
| def includes_cls(fields): | ||||
|     """ Helper function used for ensuring and comparing indexes | ||||
|     """ | ||||
|  | ||||
|     first_field = None | ||||
|     if len(fields): | ||||
|         if isinstance(fields[0], basestring): | ||||
|             first_field = fields[0] | ||||
|         elif isinstance(fields[0], (list, tuple)) and len(fields[0]): | ||||
|             first_field = fields[0][0] | ||||
|     return first_field == '_cls' | ||||
|  | ||||
|  | ||||
| class InvalidCollectionError(Exception): | ||||
| @@ -28,11 +47,11 @@ class EmbeddedDocument(BaseDocument): | ||||
|  | ||||
|     A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed, | ||||
|     to create a specialised version of the embedded document that will be | ||||
|     stored in the same collection. To facilitate this behaviour, `_cls` and | ||||
|     `_types` fields are added to documents (hidden though the MongoEngine | ||||
|     interface though). To disable this behaviour and remove the dependence on | ||||
|     the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to | ||||
|     ``False`` in the :attr:`meta` dictionary. | ||||
|     stored in the same collection. To facilitate this behaviour a `_cls` | ||||
|     field is added to documents (hidden though the MongoEngine interface). | ||||
|     To disable this behaviour and remove the dependence on the presence of | ||||
|     `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` | ||||
|     dictionary. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
| @@ -40,26 +59,20 @@ class EmbeddedDocument(BaseDocument): | ||||
|     my_metaclass  = DocumentMetaclass | ||||
|     __metaclass__ = DocumentMetaclass | ||||
|  | ||||
|     _instance = None | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||
|         self._changed_fields = [] | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         """Handle deletions of fields""" | ||||
|         field_name = args[0] | ||||
|         if field_name in self._fields: | ||||
|             default = self._fields[field_name].default | ||||
|             if callable(default): | ||||
|                 default = default() | ||||
|             setattr(self, field_name, default) | ||||
|         else: | ||||
|             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, self.__class__): | ||||
|             return self._data == other._data | ||||
|             return self.to_mongo() == other.to_mongo() | ||||
|         return False | ||||
|  | ||||
|     def __ne__(self, other): | ||||
|         return not self.__eq__(other) | ||||
|  | ||||
|  | ||||
| class Document(BaseDocument): | ||||
|     """The base class used for defining the structure and properties of | ||||
| @@ -76,11 +89,11 @@ class Document(BaseDocument): | ||||
|  | ||||
|     A :class:`~mongoengine.Document` subclass may be itself subclassed, to | ||||
|     create a specialised version of the document that will be stored in the | ||||
|     same collection. To facilitate this behaviour, `_cls` and `_types` | ||||
|     fields are added to documents (hidden though the MongoEngine interface | ||||
|     though). To disable this behaviour and remove the dependence on the | ||||
|     presence of `_cls` and `_types`, set :attr:`allow_inheritance` to | ||||
|     ``False`` in the :attr:`meta` dictionary. | ||||
|     same collection. To facilitate this behaviour a `_cls` | ||||
|     field is added to documents (hidden though the MongoEngine interface). | ||||
|     To disable this behaviour and remove the dependence on the presence of | ||||
|     `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` | ||||
|     dictionary. | ||||
|  | ||||
|     A :class:`~mongoengine.Document` may use a **Capped Collection** by | ||||
|     specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` | ||||
| @@ -98,13 +111,13 @@ class Document(BaseDocument): | ||||
|     Automatic index creation can be disabled by specifying | ||||
|     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to | ||||
|     False then indexes will not be created by MongoEngine.  This is useful in | ||||
|     production systems where index creation is performed as part of a deployment | ||||
|     system. | ||||
|     production systems where index creation is performed as part of a | ||||
|     deployment system. | ||||
|  | ||||
|     By default, _types will be added to the start of every index (that | ||||
|     By default, _cls will be added to the start of every index (that | ||||
|     doesn't contain a list) if allow_inheritance is True. This can be | ||||
|     disabled by either setting types to False on the specific index or | ||||
|     by setting index_types to False on the meta dictionary for the document. | ||||
|     disabled by either setting cls to False on the specific index or | ||||
|     by setting index_cls to False on the meta dictionary for the document. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
| @@ -117,6 +130,7 @@ class Document(BaseDocument): | ||||
|         """ | ||||
|         def fget(self): | ||||
|             return getattr(self, self._meta['id_field']) | ||||
|  | ||||
|         def fset(self, value): | ||||
|             return setattr(self, self._meta['id_field'], value) | ||||
|         return property(fget, fset) | ||||
| @@ -125,7 +139,7 @@ class Document(BaseDocument): | ||||
|     @classmethod | ||||
|     def _get_db(cls): | ||||
|         """Some Model using other db_alias""" | ||||
|         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) | ||||
|         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_collection(cls): | ||||
| @@ -148,7 +162,7 @@ class Document(BaseDocument): | ||||
|                        options.get('size') != max_size: | ||||
|                         msg = (('Cannot create collection "%s" as a capped ' | ||||
|                                'collection as it already exists') | ||||
|                                 % cls._collection) | ||||
|                                % cls._collection) | ||||
|                         raise InvalidCollectionError(msg) | ||||
|                 else: | ||||
|                     # Create the collection as a capped collection | ||||
| @@ -160,34 +174,34 @@ class Document(BaseDocument): | ||||
|                     ) | ||||
|             else: | ||||
|                 cls._collection = db[collection_name] | ||||
|             if cls._meta.get('auto_create_index', True): | ||||
|                 cls.ensure_indexes() | ||||
|         return cls._collection | ||||
|  | ||||
|     def save(self, safe=True, force_insert=False, validate=True, | ||||
|              write_options=None,  cascade=None, cascade_kwargs=None, | ||||
|              _refs=None): | ||||
|     def save(self, force_insert=False, validate=True, clean=True, | ||||
|              write_concern=None,  cascade=None, cascade_kwargs=None, | ||||
|              _refs=None, **kwargs): | ||||
|         """Save the :class:`~mongoengine.Document` to the database. If the | ||||
|         document already exists, it will be updated, otherwise it will be | ||||
|         created. | ||||
|  | ||||
|         If ``safe=True`` and the operation is unsuccessful, an | ||||
|         :class:`~mongoengine.OperationError` will be raised. | ||||
|  | ||||
|         :param safe: check if the operation succeeded before returning | ||||
|         :param force_insert: only try to create a new document, don't allow | ||||
|             updates of existing documents | ||||
|         :param validate: validates the document; set to ``False`` to skip. | ||||
|         :param write_options: Extra keyword arguments are passed down to | ||||
|         :param clean: call the document clean method, requires `validate` to be | ||||
|             True. | ||||
|         :param write_concern: Extra keyword arguments are passed down to | ||||
|             :meth:`~pymongo.collection.Collection.save` OR | ||||
|             :meth:`~pymongo.collection.Collection.insert` | ||||
|             which will be used as options for the resultant | ||||
|             ``getLastError`` command.  For example, | ||||
|             ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||
|             wait until at least two servers have recorded the write and | ||||
|             will force an fsync on the primary server. | ||||
|         :param cascade: Sets the flag for cascading saves.  You can set a | ||||
|             default by setting "cascade" in the document __meta__ | ||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw | ||||
|             to cascading saves | ||||
|         :param cascade_kwargs: (optional) kwargs dictionary to be passed throw | ||||
|             to cascading saves.  Implies ``cascade=True``. | ||||
|         :param _refs: A list of processed references used in cascading saves | ||||
|  | ||||
|         .. versionchanged:: 0.5 | ||||
| @@ -196,33 +210,35 @@ class Document(BaseDocument): | ||||
|             :class:`~bson.dbref.DBRef` objects that have changes are | ||||
|             saved as well. | ||||
|         .. versionchanged:: 0.6 | ||||
|             Cascade saves are optional = defaults to True, if you want | ||||
|             Added cascading saves | ||||
|         .. versionchanged:: 0.8 | ||||
|             Cascade saves are optional and default to False.  If you want | ||||
|             fine grain control then you can turn off using document | ||||
|             meta['cascade'] = False  Also you can pass different kwargs to | ||||
|             meta['cascade'] = True.  Also you can pass different kwargs to | ||||
|             the cascade save using cascade_kwargs which overwrites the | ||||
|             existing kwargs with custom values | ||||
|             existing kwargs with custom values. | ||||
|         """ | ||||
|         signals.pre_save.send(self.__class__, document=self) | ||||
|  | ||||
|         if validate: | ||||
|             self.validate() | ||||
|             self.validate(clean=clean) | ||||
|  | ||||
|         if not write_options: | ||||
|             write_options = {} | ||||
|         if write_concern is None: | ||||
|             write_concern = {"w": 1} | ||||
|  | ||||
|         doc = self.to_mongo() | ||||
|  | ||||
|         created = force_insert or '_id' not in doc | ||||
|         created = ('_id' not in doc or self._created or force_insert) | ||||
|  | ||||
|         signals.pre_save_post_validation.send(self.__class__, document=self, created=created) | ||||
|  | ||||
|         try: | ||||
|             collection = self.__class__.objects._collection | ||||
|             collection = self._get_collection() | ||||
|             if created: | ||||
|                 if force_insert: | ||||
|                     object_id = collection.insert(doc, safe=safe, | ||||
|                                                   **write_options) | ||||
|                     object_id = collection.insert(doc, **write_concern) | ||||
|                 else: | ||||
|                     object_id = collection.save(doc, safe=safe, | ||||
|                                                 **write_options) | ||||
|                     object_id = collection.save(doc, **write_concern) | ||||
|             else: | ||||
|                 object_id = doc['_id'] | ||||
|                 updates, removals = self._delta() | ||||
| @@ -233,30 +249,41 @@ class Document(BaseDocument): | ||||
|                     actual_key = self._db_field_map.get(k, k) | ||||
|                     select_dict[actual_key] = doc[actual_key] | ||||
|  | ||||
|                 upsert = self._created | ||||
|                 if updates: | ||||
|                     collection.update(select_dict, {"$set": updates}, | ||||
|                         upsert=upsert, safe=safe, **write_options) | ||||
|                 if removals: | ||||
|                     collection.update(select_dict, {"$unset": removals}, | ||||
|                         upsert=upsert, safe=safe, **write_options) | ||||
|                 def is_new_object(last_error): | ||||
|                     if last_error is not None: | ||||
|                         updated = last_error.get("updatedExisting") | ||||
|                         if updated is not None: | ||||
|                             return not updated | ||||
|                     return created | ||||
|  | ||||
|                 update_query = {} | ||||
|  | ||||
|                 if updates: | ||||
|                     update_query["$set"] = updates | ||||
|                 if removals: | ||||
|                     update_query["$unset"] = removals | ||||
|                 if updates or removals: | ||||
|                     last_error = collection.update(select_dict, update_query, | ||||
|                                                    upsert=True, **write_concern) | ||||
|                     created = is_new_object(last_error) | ||||
|  | ||||
|             if cascade is None: | ||||
|                 cascade = self._meta.get('cascade', False) or cascade_kwargs is not None | ||||
|  | ||||
|             warn_cascade = not cascade and 'cascade' not in self._meta | ||||
|             cascade = (self._meta.get('cascade', True) | ||||
|                        if cascade is None else cascade) | ||||
|             if cascade: | ||||
|                 kwargs = { | ||||
|                     "safe": safe, | ||||
|                     "force_insert": force_insert, | ||||
|                     "validate": validate, | ||||
|                     "write_options": write_options, | ||||
|                     "write_concern": write_concern, | ||||
|                     "cascade": cascade | ||||
|                 } | ||||
|                 if cascade_kwargs:  # Allow granular control over cascades | ||||
|                     kwargs.update(cascade_kwargs) | ||||
|                 kwargs['_refs'] = _refs | ||||
|                 self.cascade_save(warn_cascade=warn_cascade, **kwargs) | ||||
|  | ||||
|                 self.cascade_save(**kwargs) | ||||
|         except pymongo.errors.DuplicateKeyError, err: | ||||
|             message = u'Tried to save duplicate unique keys (%s)' | ||||
|             raise NotUniqueError(message % unicode(err)) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||
| @@ -269,23 +296,25 @@ class Document(BaseDocument): | ||||
|         if id_field not in self._meta.get('shard_key', []): | ||||
|             self[id_field] = self._fields[id_field].to_python(object_id) | ||||
|  | ||||
|         self._changed_fields = [] | ||||
|         self._clear_changed_fields() | ||||
|         self._created = False | ||||
|         signals.post_save.send(self.__class__, document=self, created=created) | ||||
|         return self | ||||
|  | ||||
|     def cascade_save(self, warn_cascade=None, *args, **kwargs): | ||||
|     def cascade_save(self, *args, **kwargs): | ||||
|         """Recursively saves any references / | ||||
|            generic references on an objects""" | ||||
|         import fields | ||||
|         _refs = kwargs.get('_refs', []) or [] | ||||
|  | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|         for name, cls in self._fields.items(): | ||||
|             if not isinstance(cls, (fields.ReferenceField, | ||||
|                                     fields.GenericReferenceField)): | ||||
|             if not isinstance(cls, (ReferenceField, | ||||
|                                     GenericReferenceField)): | ||||
|                 continue | ||||
|  | ||||
|             ref = getattr(self, name) | ||||
|             ref = self._data.get(name) | ||||
|             if not ref or isinstance(ref, DBRef): | ||||
|                 continue | ||||
|  | ||||
| @@ -294,15 +323,20 @@ class Document(BaseDocument): | ||||
|  | ||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||
|             if ref and ref_id not in _refs: | ||||
|                 if warn_cascade: | ||||
|                     msg = ("Cascading saves will default to off in 0.8, " | ||||
|                           "please  explicitly set `.save(cascade=True)`") | ||||
|                     warnings.warn(msg, FutureWarning) | ||||
|                 _refs.append(ref_id) | ||||
|                 kwargs["_refs"] = _refs | ||||
|                 ref.save(**kwargs) | ||||
|                 ref._changed_fields = [] | ||||
|  | ||||
|     @property | ||||
|     def _qs(self): | ||||
|         """ | ||||
|         Returns the queryset to use for updating / reloading / deletions | ||||
|         """ | ||||
|         if not hasattr(self, '__objects'): | ||||
|             self.__objects = QuerySet(self, self._get_collection()) | ||||
|         return self.__objects | ||||
|  | ||||
|     @property | ||||
|     def _object_key(self): | ||||
|         """Dict to identify object in collection | ||||
| @@ -321,35 +355,96 @@ class Document(BaseDocument): | ||||
|         been saved. | ||||
|         """ | ||||
|         if not self.pk: | ||||
|             raise OperationError('attempt to update a document not yet saved') | ||||
|             if kwargs.get('upsert', False): | ||||
|                 query = self.to_mongo() | ||||
|                 if "_cls" in query: | ||||
|                     del(query["_cls"]) | ||||
|                 return self._qs.filter(**query).update_one(**kwargs) | ||||
|             else: | ||||
|                 raise OperationError('attempt to update a document not yet saved') | ||||
|  | ||||
|         # Need to add shard key to query, or you get an error | ||||
|         return self.__class__.objects(**self._object_key).update_one(**kwargs) | ||||
|         return self._qs.filter(**self._object_key).update_one(**kwargs) | ||||
|  | ||||
|     def delete(self, safe=False): | ||||
|     def delete(self, **write_concern): | ||||
|         """Delete the :class:`~mongoengine.Document` from the database. This | ||||
|         will only take effect if the document has been previously saved. | ||||
|  | ||||
|         :param safe: check if the operation succeeded before returning | ||||
|         :param write_concern: Extra keyword arguments are passed down which | ||||
|             will be used as options for the resultant | ||||
|             ``getLastError`` command.  For example, | ||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||
|             wait until at least two servers have recorded the write and | ||||
|             will force an fsync on the primary server. | ||||
|         """ | ||||
|         signals.pre_delete.send(self.__class__, document=self) | ||||
|  | ||||
|         try: | ||||
|             self.__class__.objects(**self._object_key).delete(safe=safe) | ||||
|             self._qs.filter(**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = u'Could not delete document (%s)' % err.message | ||||
|             raise OperationError(message) | ||||
|  | ||||
|         signals.post_delete.send(self.__class__, document=self) | ||||
|  | ||||
|     def switch_db(self, db_alias): | ||||
|         """ | ||||
|         Temporarily switch the database for a document instance. | ||||
|  | ||||
|         Only really useful for archiving off data and calling `save()`:: | ||||
|  | ||||
|             user = User.objects.get(id=user_id) | ||||
|             user.switch_db('archive-db') | ||||
|             user.save() | ||||
|  | ||||
|         If you need to read from another database see | ||||
|         :class:`~mongoengine.context_managers.switch_db` | ||||
|  | ||||
|         :param db_alias: The database alias to use for saving the document | ||||
|         """ | ||||
|         with switch_db(self.__class__, db_alias) as cls: | ||||
|             collection = cls._get_collection() | ||||
|             db = cls._get_db() | ||||
|         self._get_collection = lambda: collection | ||||
|         self._get_db = lambda: db | ||||
|         self._collection = collection | ||||
|         self._created = True | ||||
|         self.__objects = self._qs | ||||
|         self.__objects._collection_obj = collection | ||||
|         return self | ||||
|  | ||||
|     def switch_collection(self, collection_name): | ||||
|         """ | ||||
|         Temporarily switch the collection for a document instance. | ||||
|  | ||||
|         Only really useful for archiving off data and calling `save()`:: | ||||
|  | ||||
|             user = User.objects.get(id=user_id) | ||||
|             user.switch_collection('old-users') | ||||
|             user.save() | ||||
|  | ||||
|         If you need to read from another database see | ||||
|         :class:`~mongoengine.context_managers.switch_db` | ||||
|  | ||||
|         :param collection_name: The database alias to use for saving the | ||||
|             document | ||||
|         """ | ||||
|         with switch_collection(self.__class__, collection_name) as cls: | ||||
|             collection = cls._get_collection() | ||||
|         self._get_collection = lambda: collection | ||||
|         self._collection = collection | ||||
|         self._created = True | ||||
|         self.__objects = self._qs | ||||
|         self.__objects._collection_obj = collection | ||||
|         return self | ||||
|  | ||||
|     def select_related(self, max_depth=1): | ||||
|         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to | ||||
|         a maximum depth in order to cut down the number queries to mongodb. | ||||
|  | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
|         import dereference | ||||
|         self._data = dereference.DeReference()(self._data, max_depth) | ||||
|         DeReference = _import_class('DeReference') | ||||
|         DeReference()([self], max_depth + 1) | ||||
|         return self | ||||
|  | ||||
|     def reload(self, max_depth=1): | ||||
| @@ -358,21 +453,20 @@ class Document(BaseDocument): | ||||
|         .. versionadded:: 0.1.2 | ||||
|         .. versionchanged:: 0.6  Now chainable | ||||
|         """ | ||||
|         id_field = self._meta['id_field'] | ||||
|         obj = self.__class__.objects( | ||||
|                 **{id_field: self[id_field]} | ||||
|               ).limit(1).select_related(max_depth=max_depth) | ||||
|         if not self.pk: | ||||
|             raise self.DoesNotExist("Document does not exist") | ||||
|         obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( | ||||
|                     **self._object_key).limit(1).select_related(max_depth=max_depth) | ||||
|  | ||||
|  | ||||
|         if obj: | ||||
|             obj = obj[0] | ||||
|         else: | ||||
|             msg = "Reloaded document has been deleted" | ||||
|             raise OperationError(msg) | ||||
|         for field in self._fields: | ||||
|             raise self.DoesNotExist("Document does not exist") | ||||
|         for field in self._fields_ordered: | ||||
|             setattr(self, field, self._reload(field, obj[field])) | ||||
|         if self._dynamic: | ||||
|             for name in self._dynamic_fields.keys(): | ||||
|                 setattr(self, name, self._reload(name, obj._data[name])) | ||||
|         self._changed_fields = obj._changed_fields | ||||
|         self._created = False | ||||
|         return obj | ||||
|  | ||||
|     def _reload(self, key, value): | ||||
| @@ -386,6 +480,7 @@ class Document(BaseDocument): | ||||
|             value = [self._reload(key, v) for v in value] | ||||
|             value = BaseList(value, self, key) | ||||
|         elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): | ||||
|             value._instance = None | ||||
|             value._changed_fields = [] | ||||
|         return value | ||||
|  | ||||
| @@ -402,18 +497,172 @@ class Document(BaseDocument): | ||||
|         """This method registers the delete rules to apply when removing this | ||||
|         object. | ||||
|         """ | ||||
|         delete_rules = cls._meta.get('delete_rules') or {} | ||||
|         delete_rules[(document_cls, field_name)] = rule | ||||
|         cls._meta['delete_rules'] = delete_rules | ||||
|         classes = [get_document(class_name) | ||||
|                     for class_name in cls._subclasses | ||||
|                     if class_name != cls.__name__] + [cls] | ||||
|         documents = [get_document(class_name) | ||||
|                      for class_name in document_cls._subclasses | ||||
|                      if class_name != document_cls.__name__] + [document_cls] | ||||
|  | ||||
|         for cls in classes: | ||||
|             for document_cls in documents: | ||||
|                 delete_rules = cls._meta.get('delete_rules') or {} | ||||
|                 delete_rules[(document_cls, field_name)] = rule | ||||
|                 cls._meta['delete_rules'] = delete_rules | ||||
|  | ||||
|     @classmethod | ||||
|     def drop_collection(cls): | ||||
|         """Drops the entire collection associated with this | ||||
|         :class:`~mongoengine.Document` type from the database. | ||||
|         """ | ||||
|         cls._collection = None | ||||
|         db = cls._get_db() | ||||
|         db.drop_collection(cls._get_collection_name()) | ||||
|         queryset.QuerySet._reset_already_indexed(cls) | ||||
|  | ||||
|     @classmethod | ||||
|     def ensure_index(cls, key_or_list, drop_dups=False, background=False, | ||||
|         **kwargs): | ||||
|         """Ensure that the given indexes are in place. | ||||
|  | ||||
|         :param key_or_list: a single index key or a list of index keys (to | ||||
|             construct a multi-field index); keys may be prefixed with a **+** | ||||
|             or a **-** to determine the index ordering | ||||
|         """ | ||||
|         index_spec = cls._build_index_spec(key_or_list) | ||||
|         index_spec = index_spec.copy() | ||||
|         fields = index_spec.pop('fields') | ||||
|         index_spec['drop_dups'] = drop_dups | ||||
|         index_spec['background'] = background | ||||
|         index_spec.update(kwargs) | ||||
|  | ||||
|         return cls._get_collection().ensure_index(fields, **index_spec) | ||||
|  | ||||
|     @classmethod | ||||
|     def ensure_indexes(cls): | ||||
|         """Checks the document meta data and ensures all the indexes exist. | ||||
|  | ||||
|         Global defaults can be set in the meta - see :doc:`guide/defining-documents` | ||||
|  | ||||
|         .. note:: You can disable automatic index creation by setting | ||||
|                   `auto_create_index` to False in the documents meta data | ||||
|         """ | ||||
|         background = cls._meta.get('index_background', False) | ||||
|         drop_dups = cls._meta.get('index_drop_dups', False) | ||||
|         index_opts = cls._meta.get('index_opts') or {} | ||||
|         index_cls = cls._meta.get('index_cls', True) | ||||
|  | ||||
|         collection = cls._get_collection() | ||||
|         if collection.read_preference > 1: | ||||
|             return | ||||
|  | ||||
|         # determine if an index which we are creating includes | ||||
|         # _cls as its first field; if so, we can avoid creating | ||||
|         # an extra index on _cls, as mongodb will use the existing | ||||
|         # index to service queries against _cls | ||||
|         cls_indexed = False | ||||
|  | ||||
|         # Ensure document-defined indexes are created | ||||
|         if cls._meta['index_specs']: | ||||
|             index_spec = cls._meta['index_specs'] | ||||
|             for spec in index_spec: | ||||
|                 spec = spec.copy() | ||||
|                 fields = spec.pop('fields') | ||||
|                 cls_indexed = cls_indexed or includes_cls(fields) | ||||
|                 opts = index_opts.copy() | ||||
|                 opts.update(spec) | ||||
|                 collection.ensure_index(fields, background=background, | ||||
|                                         drop_dups=drop_dups, **opts) | ||||
|  | ||||
|         # If _cls is being used (for polymorphism), it needs an index, | ||||
|         # only if another index doesn't begin with _cls | ||||
|         if (index_cls and not cls_indexed and | ||||
|            cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): | ||||
|             collection.ensure_index('_cls', background=background, | ||||
|                                     **index_opts) | ||||
|  | ||||
|     @classmethod | ||||
|     def list_indexes(cls, go_up=True, go_down=True): | ||||
|         """ Lists all of the indexes that should be created for given | ||||
|         collection. It includes all the indexes from super- and sub-classes. | ||||
|         """ | ||||
|  | ||||
|         if cls._meta.get('abstract'): | ||||
|             return [] | ||||
|  | ||||
|         # get all the base classes, subclasses and sieblings | ||||
|         classes = [] | ||||
|         def get_classes(cls): | ||||
|  | ||||
|             if (cls not in classes and | ||||
|                isinstance(cls, TopLevelDocumentMetaclass)): | ||||
|                 classes.append(cls) | ||||
|  | ||||
|             for base_cls in cls.__bases__: | ||||
|                 if (isinstance(base_cls, TopLevelDocumentMetaclass) and | ||||
|                    base_cls != Document and | ||||
|                    not base_cls._meta.get('abstract') and | ||||
|                    base_cls._get_collection().full_name == cls._get_collection().full_name and | ||||
|                    base_cls not in classes): | ||||
|                     classes.append(base_cls) | ||||
|                     get_classes(base_cls) | ||||
|             for subclass in cls.__subclasses__(): | ||||
|                 if (isinstance(base_cls, TopLevelDocumentMetaclass) and | ||||
|                    subclass._get_collection().full_name == cls._get_collection().full_name and | ||||
|                    subclass not in classes): | ||||
|                     classes.append(subclass) | ||||
|                     get_classes(subclass) | ||||
|  | ||||
|         get_classes(cls) | ||||
|  | ||||
|         # get the indexes spec for all of the gathered classes | ||||
|         def get_indexes_spec(cls): | ||||
|             indexes = [] | ||||
|  | ||||
|             if cls._meta['index_specs']: | ||||
|                 index_spec = cls._meta['index_specs'] | ||||
|                 for spec in index_spec: | ||||
|                     spec = spec.copy() | ||||
|                     fields = spec.pop('fields') | ||||
|                     indexes.append(fields) | ||||
|             return indexes | ||||
|  | ||||
|         indexes = [] | ||||
|         for cls in classes: | ||||
|             for index in get_indexes_spec(cls): | ||||
|                 if index not in indexes: | ||||
|                     indexes.append(index) | ||||
|  | ||||
|         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed | ||||
|         if [(u'_id', 1)] not in indexes: | ||||
|             indexes.append([(u'_id', 1)]) | ||||
|         if (cls._meta.get('index_cls', True) and | ||||
|            cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): | ||||
|              indexes.append([(u'_cls', 1)]) | ||||
|  | ||||
|         return indexes | ||||
|  | ||||
|     @classmethod | ||||
|     def compare_indexes(cls): | ||||
|         """ Compares the indexes defined in MongoEngine with the ones existing | ||||
|         in the database. Returns any missing/extra indexes. | ||||
|         """ | ||||
|  | ||||
|         required = cls.list_indexes() | ||||
|         existing = [info['key'] for info in cls._get_collection().index_information().values()] | ||||
|         missing = [index for index in required if index not in existing] | ||||
|         extra = [index for index in existing if index not in required] | ||||
|  | ||||
|         # if { _cls: 1 } is missing, make sure it's *really* necessary | ||||
|         if [(u'_cls', 1)] in missing: | ||||
|             cls_obsolete = False | ||||
|             for index in existing: | ||||
|                 if includes_cls(index) and index not in extra: | ||||
|                     cls_obsolete = True | ||||
|                     break | ||||
|             if cls_obsolete: | ||||
|                 missing.remove([(u'_cls', 1)]) | ||||
|  | ||||
|         return {'missing': missing, 'extra': extra} | ||||
|  | ||||
|  | ||||
| class DynamicDocument(Document): | ||||
| @@ -422,7 +671,7 @@ class DynamicDocument(Document): | ||||
|     way as an ordinary document but has expando style properties.  Any data | ||||
|     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||
|     not a field is automatically converted into a | ||||
|     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||
|     :class:`~mongoengine.fields.DynamicField` and data can be attributed to that | ||||
|     field. | ||||
|  | ||||
|     .. note:: | ||||
| @@ -464,7 +713,13 @@ class DynamicEmbeddedDocument(EmbeddedDocument): | ||||
|         """Deletes the attribute by setting to None and allowing _delta to unset | ||||
|         it""" | ||||
|         field_name = args[0] | ||||
|         setattr(self, field_name, None) | ||||
|         if field_name in self._fields: | ||||
|             default = self._fields[field_name].default | ||||
|             if callable(default): | ||||
|                 default = default() | ||||
|             setattr(self, field_name, default) | ||||
|         else: | ||||
|             setattr(self, field_name, None) | ||||
|  | ||||
|  | ||||
| class MapReduceDocument(object): | ||||
|   | ||||
							
								
								
									
										126
									
								
								mongoengine/errors.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										126
									
								
								mongoengine/errors.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,126 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from mongoengine.python_support import txt_type | ||||
|  | ||||
|  | ||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||
|            'OperationError', 'NotUniqueError', 'ValidationError') | ||||
|  | ||||
|  | ||||
| class NotRegistered(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class InvalidDocumentError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class LookUpError(AttributeError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class DoesNotExist(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class MultipleObjectsReturned(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class InvalidQueryError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class OperationError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class NotUniqueError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class ValidationError(AssertionError): | ||||
|     """Validation exception. | ||||
|  | ||||
|     May represent an error validating a field or a | ||||
|     document containing fields with validation errors. | ||||
|  | ||||
|     :ivar errors: A dictionary of errors for fields within this | ||||
|         document or list, or None if the error is for an | ||||
|         individual field. | ||||
|     """ | ||||
|  | ||||
|     errors = {} | ||||
|     field_name = None | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|         self.errors = kwargs.get('errors', {}) | ||||
|         self.field_name = kwargs.get('field_name') | ||||
|         self.message = message | ||||
|  | ||||
|     def __str__(self): | ||||
|         return txt_type(self.message) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) | ||||
|  | ||||
|     def __getattribute__(self, name): | ||||
|         message = super(ValidationError, self).__getattribute__(name) | ||||
|         if name == 'message': | ||||
|             if self.field_name: | ||||
|                 message = '%s' % message | ||||
|             if self.errors: | ||||
|                 message = '%s(%s)' % (message, self._format_errors()) | ||||
|         return message | ||||
|  | ||||
|     def _get_message(self): | ||||
|         return self._message | ||||
|  | ||||
|     def _set_message(self, message): | ||||
|         self._message = message | ||||
|  | ||||
|     message = property(_get_message, _set_message) | ||||
|  | ||||
|     def to_dict(self): | ||||
|         """Returns a dictionary of all errors within a document | ||||
|  | ||||
|         Keys are field names or list indices and values are the | ||||
|         validation error messages, or a nested dictionary of | ||||
|         errors for an embedded document or list. | ||||
|         """ | ||||
|  | ||||
|         def build_dict(source): | ||||
|             errors_dict = {} | ||||
|             if not source: | ||||
|                 return errors_dict | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in source.iteritems(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
|             else: | ||||
|                 return unicode(source) | ||||
|             return errors_dict | ||||
|         if not self.errors: | ||||
|             return {} | ||||
|         return build_dict(self.errors) | ||||
|  | ||||
|     def _format_errors(self): | ||||
|         """Returns a string listing all errors within a document""" | ||||
|  | ||||
|         def generate_key(value, prefix=''): | ||||
|             if isinstance(value, list): | ||||
|                 value = ' '.join([generate_key(k) for k in value]) | ||||
|             if isinstance(value, dict): | ||||
|                 value = ' '.join( | ||||
|                         [generate_key(v, k) for k, v in value.iteritems()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in self.to_dict().iteritems(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) | ||||
| @@ -4,36 +4,47 @@ import itertools | ||||
| import re | ||||
| import time | ||||
| import urllib2 | ||||
| import urlparse | ||||
| import uuid | ||||
| import warnings | ||||
| from operator import itemgetter | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
| except ImportError: | ||||
|     dateutil = None | ||||
| else: | ||||
|     import dateutil.parser | ||||
|  | ||||
| import pymongo | ||||
| import gridfs | ||||
| from bson import Binary, DBRef, SON, ObjectId | ||||
|  | ||||
| from mongoengine.errors import ValidationError | ||||
| from mongoengine.python_support import (PY3, bin_type, txt_type, | ||||
|                                         str_types, StringIO) | ||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, | ||||
|                   ValidationError, get_document, BaseDocument) | ||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField, | ||||
|                   get_document, BaseDocument) | ||||
| from queryset import DO_NOTHING, QuerySet | ||||
| from document import Document, EmbeddedDocument | ||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from PIL import Image, ImageOps | ||||
| except ImportError: | ||||
|     Image = None | ||||
|     ImageOps = None | ||||
|  | ||||
| __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | ||||
|            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', | ||||
|            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', | ||||
|            'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', | ||||
|            'GenericReferenceField', 'FileField', 'BinaryField', | ||||
|            'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', | ||||
|            'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] | ||||
| __all__ = ['StringField',  'URLField',  'EmailField',  'IntField',  'LongField', | ||||
|            'FloatField',  'DecimalField',  'BooleanField',  'DateTimeField', | ||||
|            'ComplexDateTimeField',  'EmbeddedDocumentField', 'ObjectIdField', | ||||
|            'GenericEmbeddedDocumentField',  'DynamicField',  'ListField', | ||||
|            'SortedListField',  'DictField',  'MapField',  'ReferenceField', | ||||
|            'GenericReferenceField',  'BinaryField',  'GridFSError', | ||||
|            'GridFSProxy',  'FileField',  'ImageGridFsProxy', | ||||
|            'ImproperlyConfigured',  'ImageField',  'GeoPointField', 'PointField', | ||||
|            'LineStringField', 'PolygonField', 'SequenceField',  'UUIDField', | ||||
|            'GeoJsonBaseField'] | ||||
|  | ||||
|  | ||||
| RECURSIVE_REFERENCE_CONSTANT = 'self' | ||||
|  | ||||
| @@ -104,11 +115,11 @@ class URLField(StringField): | ||||
|     """ | ||||
|  | ||||
|     _URL_REGEX = re.compile( | ||||
|         r'^(?:http|ftp)s?://' # http:// or https:// | ||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... | ||||
|         r'localhost|' #localhost... | ||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip | ||||
|         r'(?::\d+)?' # optional port | ||||
|         r'^(?:http|ftp)s?://'  # http:// or https:// | ||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'  # domain... | ||||
|         r'localhost|'  # localhost... | ||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'  # ...or ip | ||||
|         r'(?::\d+)?'  # optional port | ||||
|         r'(?:/?|[/?]\S+)$', re.IGNORECASE) | ||||
|  | ||||
|     def __init__(self, verify_exists=False, url_regex=None, **kwargs): | ||||
| @@ -125,8 +136,7 @@ class URLField(StringField): | ||||
|             warnings.warn( | ||||
|                 "The URLField verify_exists argument has intractable security " | ||||
|                 "and performance issues. Accordingly, it has been deprecated.", | ||||
|             DeprecationWarning | ||||
|             ) | ||||
|                 DeprecationWarning) | ||||
|             try: | ||||
|                 request = urllib2.Request(value) | ||||
|                 urllib2.urlopen(request) | ||||
| @@ -143,7 +153,7 @@ class EmailField(StringField): | ||||
|     EMAIL_REGEX = re.compile( | ||||
|         r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"  # dot-atom | ||||
|         r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'  # quoted-string | ||||
|         r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE  # domain | ||||
|         r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE  # domain | ||||
|     ) | ||||
|  | ||||
|     def validate(self, value): | ||||
| @@ -153,7 +163,7 @@ class EmailField(StringField): | ||||
|  | ||||
|  | ||||
| class IntField(BaseField): | ||||
|     """An integer field. | ||||
|     """An 32-bit integer field. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
| @@ -186,6 +196,40 @@ class IntField(BaseField): | ||||
|         return int(value) | ||||
|  | ||||
|  | ||||
| class LongField(BaseField): | ||||
|     """An 64-bit integer field. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(LongField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|         except: | ||||
|             self.error('%s could not be converted to long' % value) | ||||
|  | ||||
|         if self.min_value is not None and value < self.min_value: | ||||
|             self.error('Long value is too small') | ||||
|  | ||||
|         if self.max_value is not None and value > self.max_value: | ||||
|             self.error('Long value is too large') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return long(value) | ||||
|  | ||||
|  | ||||
| class FloatField(BaseField): | ||||
|     """An floating point number field. | ||||
|     """ | ||||
| @@ -223,30 +267,61 @@ class FloatField(BaseField): | ||||
| class DecimalField(BaseField): | ||||
|     """A fixed-point decimal number field. | ||||
|  | ||||
|     .. versionchanged:: 0.8 | ||||
|     .. versionadded:: 0.3 | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|     def __init__(self, min_value=None, max_value=None, force_string=False, | ||||
|                  precision=2, rounding=decimal.ROUND_HALF_UP, **kwargs): | ||||
|         """ | ||||
|         :param min_value: Validation rule for the minimum acceptable value. | ||||
|         :param max_value: Validation rule for the maximum acceptable value. | ||||
|         :param force_string: Store as a string. | ||||
|         :param precision: Number of decimal places to store. | ||||
|         :param rounding: The rounding rule from the python decimal libary: | ||||
|  | ||||
|             - decimal.ROUND_CEILING (towards Infinity) | ||||
|             - decimal.ROUND_DOWN (towards zero) | ||||
|             - decimal.ROUND_FLOOR (towards -Infinity) | ||||
|             - decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero) | ||||
|             - decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) | ||||
|             - decimal.ROUND_HALF_UP (to nearest with ties going away from zero) | ||||
|             - decimal.ROUND_UP (away from zero) | ||||
|             - decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) | ||||
|  | ||||
|             Defaults to: ``decimal.ROUND_HALF_UP`` | ||||
|  | ||||
|         """ | ||||
|         self.min_value = min_value | ||||
|         self.max_value = max_value | ||||
|         self.force_string = force_string | ||||
|         self.precision = decimal.Decimal(".%s" % ("0" * precision)) | ||||
|         self.rounding = rounding | ||||
|  | ||||
|         super(DecimalField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         original_value = value | ||||
|         if not isinstance(value, basestring): | ||||
|             value = unicode(value) | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         # Convert to string for python 2.6 before casting to Decimal | ||||
|         try: | ||||
|             value = decimal.Decimal(value) | ||||
|         except ValueError: | ||||
|             return original_value | ||||
|         return value | ||||
|             value = decimal.Decimal("%s" % value) | ||||
|         except decimal.InvalidOperation: | ||||
|             return value | ||||
|         return value.quantize(self.precision, rounding=self.rounding) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return unicode(value) | ||||
|         if value is None: | ||||
|             return value | ||||
|         if self.force_string: | ||||
|             return unicode(value) | ||||
|         return float(self.to_python(value)) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, decimal.Decimal): | ||||
|             if not isinstance(value, basestring): | ||||
|                 value = str(value) | ||||
|                 value = unicode(value) | ||||
|             try: | ||||
|                 value = decimal.Decimal(value) | ||||
|             except Exception, exc: | ||||
| @@ -258,6 +333,9 @@ class DecimalField(BaseField): | ||||
|         if self.max_value is not None and value > self.max_value: | ||||
|             self.error('Decimal value is too large') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| class BooleanField(BaseField): | ||||
|     """A boolean field type. | ||||
| @@ -280,6 +358,11 @@ class BooleanField(BaseField): | ||||
| class DateTimeField(BaseField): | ||||
|     """A datetime field. | ||||
|  | ||||
|     Uses the python-dateutil library if available alternatively use time.strptime | ||||
|     to parse the dates.  Note: python-dateutil's parser is fully featured and when | ||||
|     installed you can utilise it to convert varing types of date formats into valid | ||||
|     python datetime objects. | ||||
|  | ||||
|     Note: Microseconds are rounded to the nearest millisecond. | ||||
|       Pre UTC microsecond support is effecively broken. | ||||
|       Use :class:`~mongoengine.fields.ComplexDateTimeField` if you | ||||
| @@ -287,22 +370,30 @@ class DateTimeField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (datetime.datetime, datetime.date)): | ||||
|         new_value = self.to_mongo(value) | ||||
|         if not isinstance(new_value, (datetime.datetime, datetime.date)): | ||||
|             self.error(u'cannot parse date "%s"' % value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return self.prepare_query_value(None, value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|         if isinstance(value, datetime.datetime): | ||||
|             return value | ||||
|         if isinstance(value, datetime.date): | ||||
|             return datetime.datetime(value.year, value.month, value.day) | ||||
|         if callable(value): | ||||
|             return value() | ||||
|  | ||||
|         if not isinstance(value, basestring): | ||||
|             return None | ||||
|  | ||||
|         # Attempt to parse a datetime: | ||||
|         # value = smart_str(value) | ||||
|         if dateutil: | ||||
|             try: | ||||
|                 return dateutil.parser.parse(value) | ||||
|             except ValueError: | ||||
|                 return None | ||||
|  | ||||
|         # split usecs, because they are not recognized by strptime. | ||||
|         if '.' in value: | ||||
|             try: | ||||
| @@ -314,19 +405,22 @@ class DateTimeField(BaseField): | ||||
|             usecs = 0 | ||||
|         kwargs = {'microsecond': usecs} | ||||
|         try:  # Seconds are optional, so try converting seconds first. | ||||
|             return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], | ||||
|                                      **kwargs) | ||||
|             return datetime.datetime(*time.strptime(value, | ||||
|                                      '%Y-%m-%d %H:%M:%S')[:6], **kwargs) | ||||
|         except ValueError: | ||||
|             try:  # Try without seconds. | ||||
|                 return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], | ||||
|                                          **kwargs) | ||||
|                 return datetime.datetime(*time.strptime(value, | ||||
|                                          '%Y-%m-%d %H:%M')[:5], **kwargs) | ||||
|             except ValueError:  # Try without hour/minutes/seconds. | ||||
|                 try: | ||||
|                     return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], | ||||
|                                              **kwargs) | ||||
|                     return datetime.datetime(*time.strptime(value, | ||||
|                                              '%Y-%m-%d')[:3], **kwargs) | ||||
|                 except ValueError: | ||||
|                     return None | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| class ComplexDateTimeField(StringField): | ||||
|     """ | ||||
| @@ -399,7 +493,7 @@ class ComplexDateTimeField(StringField): | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||
|         if data == None: | ||||
|         if data is None: | ||||
|             return datetime.datetime.now() | ||||
|         if isinstance(data, datetime.datetime): | ||||
|             return data | ||||
| @@ -410,6 +504,7 @@ class ComplexDateTimeField(StringField): | ||||
|         return super(ComplexDateTimeField, self).__set__(instance, value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         value = self.to_python(value) | ||||
|         if not isinstance(value, datetime.datetime): | ||||
|             self.error('Only datetime objects may used in a ' | ||||
|                        'ComplexDateTimeField') | ||||
| @@ -422,6 +517,7 @@ class ComplexDateTimeField(StringField): | ||||
|             return original_value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         value = self.to_python(value) | ||||
|         return self._convert_from_datetime(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -460,7 +556,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|             return value | ||||
|         return self.document_type.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|     def validate(self, value, clean=True): | ||||
|         """Make sure that the document instance is an instance of the | ||||
|         EmbeddedDocument subclass provided when the document was defined. | ||||
|         """ | ||||
| @@ -468,7 +564,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|         if not isinstance(value, self.document_type): | ||||
|             self.error('Invalid embedded document instance provided to an ' | ||||
|                        'EmbeddedDocumentField') | ||||
|         self.document_type.validate(value) | ||||
|         self.document_type.validate(value, clean) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return self.document_type._fields.get(member_name) | ||||
| @@ -498,12 +594,12 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|  | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|     def validate(self, value, clean=True): | ||||
|         if not isinstance(value, EmbeddedDocument): | ||||
|             self.error('Invalid embedded document instance provided to an ' | ||||
|                        'GenericEmbeddedDocumentField') | ||||
|  | ||||
|         value.validate() | ||||
|         value.validate(clean=clean) | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if document is None: | ||||
| @@ -529,7 +625,14 @@ class DynamicField(BaseField): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             return value.to_mongo() | ||||
|             cls = value.__class__ | ||||
|             val = value.to_mongo() | ||||
|             # If we its a document thats not inherited add _cls | ||||
|             if (isinstance(value, Document)): | ||||
|                 val = {"_ref": value.to_dbref(), "_cls": cls.__name__} | ||||
|             if (isinstance(value, EmbeddedDocument)): | ||||
|                 val['_cls'] = cls.__name__ | ||||
|             return val | ||||
|  | ||||
|         if not isinstance(value, (dict, list, tuple)): | ||||
|             return value | ||||
| @@ -540,15 +643,23 @@ class DynamicField(BaseField): | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.items(): | ||||
|         for k, v in value.iteritems(): | ||||
|             data[k] = self.to_mongo(v) | ||||
|  | ||||
|         value = data | ||||
|         if is_list:  # Convert back to a list | ||||
|             value = [v for k, v in sorted(data.items(), key=itemgetter(0))] | ||||
|         else: | ||||
|             value = data | ||||
|             value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, dict) and '_cls' in value: | ||||
|             doc_cls = get_document(value['_cls']) | ||||
|             if '_ref' in value: | ||||
|                 value = doc_cls._get_db().dereference(value['_ref']) | ||||
|             return doc_cls._from_son(value) | ||||
|  | ||||
|         return super(DynamicField, self).to_python(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return member_name | ||||
|  | ||||
| @@ -558,6 +669,10 @@ class DynamicField(BaseField): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         if hasattr(value, "validate"): | ||||
|             value.validate(clean=clean) | ||||
|  | ||||
|  | ||||
| class ListField(ComplexBaseField): | ||||
|     """A list field that wraps a standard field, allowing multiple instances | ||||
| @@ -569,9 +684,6 @@ class ListField(ComplexBaseField): | ||||
|         Required means it cannot be empty - as the default for ListFields is [] | ||||
|     """ | ||||
|  | ||||
|     # ListFields cannot be indexed with _types - MongoDB doesn't support this | ||||
|     _index_with_types = False | ||||
|  | ||||
|     def __init__(self, field=None, **kwargs): | ||||
|         self.field = field | ||||
|         kwargs.setdefault('default', lambda: []) | ||||
| @@ -581,15 +693,15 @@ class ListField(ComplexBaseField): | ||||
|         """Make sure that a list of valid fields is being used. | ||||
|         """ | ||||
|         if (not isinstance(value, (list, tuple, QuerySet)) or | ||||
|             isinstance(value, basestring)): | ||||
|            isinstance(value, basestring)): | ||||
|             self.error('Only lists and tuples may be used in a list field') | ||||
|         super(ListField, self).validate(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if self.field: | ||||
|             if op in ('set', 'unset') and (not isinstance(value, basestring) | ||||
|                 and not isinstance(value, BaseDocument) | ||||
|                 and hasattr(value, '__iter__')): | ||||
|                and not isinstance(value, BaseDocument) | ||||
|                and hasattr(value, '__iter__')): | ||||
|                 return [self.field.prepare_query_value(op, v) for v in value] | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|         return super(ListField, self).prepare_query_value(op, value) | ||||
| @@ -623,9 +735,25 @@ class SortedListField(ListField): | ||||
|     def to_mongo(self, value): | ||||
|         value = super(SortedListField, self).to_mongo(value) | ||||
|         if self._ordering is not None: | ||||
|             return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) | ||||
|             return sorted(value, key=itemgetter(self._ordering), | ||||
|                           reverse=self._order_reverse) | ||||
|         return sorted(value, reverse=self._order_reverse) | ||||
|  | ||||
| def key_not_string(d): | ||||
|     """ Helper function to recursively determine if any key in a dictionary is | ||||
|     not a string. | ||||
|     """ | ||||
|     for k, v in d.items(): | ||||
|         if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)): | ||||
|             return True | ||||
|  | ||||
| def key_has_dot_or_dollar(d): | ||||
|     """ Helper function to recursively determine if any key in a dictionary | ||||
|     contains a dot or a dollar sign. | ||||
|     """ | ||||
|     for k, v in d.items(): | ||||
|         if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): | ||||
|             return True | ||||
|  | ||||
| class DictField(ComplexBaseField): | ||||
|     """A dictionary field that wraps a standard Python dictionary. This is | ||||
| @@ -652,9 +780,11 @@ class DictField(ComplexBaseField): | ||||
|         if not isinstance(value, dict): | ||||
|             self.error('Only dictionaries may be used in a DictField') | ||||
|  | ||||
|         if any(k for k in value.keys() if not isinstance(k, basestring)): | ||||
|             self.error('Invalid dictionary key - documents must have only string keys') | ||||
|         if any(('.' in k or '$' in k) for k in value.keys()): | ||||
|         if key_not_string(value): | ||||
|             msg = ("Invalid dictionary key - documents must " | ||||
|                    "have only string keys") | ||||
|             self.error(msg) | ||||
|         if key_has_dot_or_dollar(value): | ||||
|             self.error('Invalid dictionary key name - keys may not contain "."' | ||||
|                        ' or "$" characters') | ||||
|         super(DictField, self).validate(value) | ||||
| @@ -670,6 +800,9 @@ class DictField(ComplexBaseField): | ||||
|         if op in match_operators and isinstance(value, basestring): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|  | ||||
|         if hasattr(self.field, 'field'): | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(DictField, self).prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| @@ -703,7 +836,7 @@ class ReferenceField(BaseField): | ||||
|       * NULLIFY     - Updates the reference to null. | ||||
|       * CASCADE     - Deletes the documents associated with the reference. | ||||
|       * DENY        - Prevent the deletion of the reference object. | ||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` | ||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.fields.ListField` | ||||
|                       of references | ||||
|  | ||||
|     Alternative syntax for registering delete rules (useful when implementing | ||||
| @@ -724,7 +857,7 @@ class ReferenceField(BaseField): | ||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document_type, dbref=None, | ||||
|     def __init__(self, document_type, dbref=False, | ||||
|                  reverse_delete_rule=DO_NOTHING, **kwargs): | ||||
|         """Initialises the Reference Field. | ||||
|  | ||||
| @@ -738,12 +871,7 @@ class ReferenceField(BaseField): | ||||
|                 self.error('Argument to ReferenceField constructor must be a ' | ||||
|                            'document class or a string') | ||||
|  | ||||
|         if dbref is None: | ||||
|             msg = ("ReferenceFields will default to using ObjectId " | ||||
|                    " strings in 0.8, set DBRef=True if this isn't desired") | ||||
|             warnings.warn(msg, FutureWarning) | ||||
|  | ||||
|         self.dbref = dbref if dbref is not None else True  # To change in 0.8 | ||||
|         self.dbref = dbref | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(ReferenceField, self).__init__(**kwargs) | ||||
| @@ -766,9 +894,9 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|         # Get value from document instance if available | ||||
|         value = instance._data.get(self.name) | ||||
|  | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         # Dereference DBRefs | ||||
|         if isinstance(value, DBRef): | ||||
|         if self._auto_dereference and isinstance(value, DBRef): | ||||
|             value = self.document_type._get_db().dereference(value) | ||||
|             if value is not None: | ||||
|                 instance._data[self.name] = self.document_type._from_son(value) | ||||
| @@ -780,8 +908,6 @@ class ReferenceField(BaseField): | ||||
|             if not self.dbref: | ||||
|                 return document.id | ||||
|             return document | ||||
|         elif not self.dbref and isinstance(document, basestring): | ||||
|             return document | ||||
|  | ||||
|         id_field_name = self.document_type._meta['id_field'] | ||||
|         id_field = self.document_type._fields[id_field_name] | ||||
| @@ -806,7 +932,7 @@ class ReferenceField(BaseField): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         if (not self.dbref and | ||||
|             not isinstance(value, (DBRef, Document, EmbeddedDocument))): | ||||
|            not isinstance(value, (DBRef, Document, EmbeddedDocument))): | ||||
|             collection = self.document_type._get_collection_name() | ||||
|             value = DBRef(collection, self.document_type.id.to_python(value)) | ||||
|         return value | ||||
| @@ -848,17 +974,22 @@ class GenericReferenceField(BaseField): | ||||
|             return self | ||||
|  | ||||
|         value = instance._data.get(self.name) | ||||
|         if isinstance(value, (dict, SON)): | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         if self._auto_dereference and isinstance(value, (dict, SON)): | ||||
|             instance._data[self.name] = self.dereference(value) | ||||
|  | ||||
|         return super(GenericReferenceField, self).__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (Document, DBRef)): | ||||
|         if not isinstance(value, (Document, DBRef, dict, SON)): | ||||
|             self.error('GenericReferences can only contain documents') | ||||
|  | ||||
|         if isinstance(value, (dict, SON)): | ||||
|             if '_ref' not in value or '_cls' not in value: | ||||
|                 self.error('GenericReferences can only contain documents') | ||||
|  | ||||
|         # We need the id from the saved object to create the DBRef | ||||
|         if isinstance(value, Document) and value.id is None: | ||||
|         elif isinstance(value, Document) and value.id is None: | ||||
|             self.error('You can only reference documents once they have been' | ||||
|                        ' saved to the database') | ||||
|  | ||||
| @@ -892,7 +1023,10 @@ class GenericReferenceField(BaseField): | ||||
|         id_ = id_field.to_mongo(id_) | ||||
|         collection = document._get_collection_name() | ||||
|         ref = DBRef(collection, id_) | ||||
|         return {'_cls': document._class_name, '_ref': ref} | ||||
|         return SON(( | ||||
|             ('_cls', document._class_name), | ||||
|             ('_ref', ref) | ||||
|         )) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
| @@ -922,7 +1056,7 @@ class BinaryField(BaseField): | ||||
|         if not isinstance(value, (bin_type, txt_type, Binary)): | ||||
|             self.error("BinaryField only accepts instances of " | ||||
|                        "(%s, %s, Binary)" % ( | ||||
|                         bin_type.__name__, txt_type.__name__)) | ||||
|                        bin_type.__name__, txt_type.__name__)) | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
|             self.error('Binary value is too long') | ||||
| @@ -960,7 +1094,7 @@ class GridFSProxy(object): | ||||
|         if name in attrs: | ||||
|             return self.__getattribute__(name) | ||||
|         obj = self.get() | ||||
|         if name in dir(obj): | ||||
|         if hasattr(obj, name): | ||||
|             return getattr(obj, name) | ||||
|         raise AttributeError | ||||
|  | ||||
| @@ -975,14 +1109,26 @@ class GridFSProxy(object): | ||||
|         self_dict['_fs'] = None | ||||
|         return self_dict | ||||
|  | ||||
|     def __copy__(self): | ||||
|         copied = GridFSProxy() | ||||
|         copied.__dict__.update(self.__getstate__()) | ||||
|         return copied | ||||
|  | ||||
|     def __deepcopy__(self, memo): | ||||
|         return self.__copy__() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||
|  | ||||
|     def __str__(self): | ||||
|         name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)' | ||||
|         return '<%s: %s>' % (self.__class__.__name__, name) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, GridFSProxy): | ||||
|             return  ((self.grid_id == other.grid_id) and | ||||
|                      (self.collection_name == other.collection_name) and | ||||
|                      (self.db_alias == other.db_alias)) | ||||
|             return ((self.grid_id == other.grid_id) and | ||||
|                     (self.collection_name == other.collection_name) and | ||||
|                     (self.db_alias == other.db_alias)) | ||||
|         else: | ||||
|             return False | ||||
|  | ||||
| @@ -1085,9 +1231,7 @@ class FileField(BaseField): | ||||
|         # Check if a file already exists for this model | ||||
|         grid_file = instance._data.get(self.name) | ||||
|         if not isinstance(grid_file, self.proxy_class): | ||||
|             grid_file = self.proxy_class(key=self.name, instance=instance, | ||||
|                                          db_alias=self.db_alias, | ||||
|                                          collection_name=self.collection_name) | ||||
|             grid_file = self.get_proxy_obj(key=self.name, instance=instance) | ||||
|             instance._data[self.name] = grid_file | ||||
|  | ||||
|         if not grid_file.key: | ||||
| @@ -1107,18 +1251,25 @@ class FileField(BaseField): | ||||
|                     grid_file.delete() | ||||
|                 except: | ||||
|                     pass | ||||
|                 # Create a new file with the new data | ||||
|                 grid_file.put(value) | ||||
|             else: | ||||
|                 # Create a new proxy object as we don't already have one | ||||
|                 instance._data[key] = self.proxy_class(key=key, instance=instance, | ||||
|                                                        collection_name=self.collection_name) | ||||
|                 instance._data[key].put(value) | ||||
|  | ||||
|             # Create a new proxy object as we don't already have one | ||||
|             instance._data[key] = self.get_proxy_obj(key=key, instance=instance) | ||||
|             instance._data[key].put(value) | ||||
|         else: | ||||
|             instance._data[key] = value | ||||
|  | ||||
|         instance._mark_as_changed(key) | ||||
|  | ||||
|     def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None): | ||||
|         if db_alias is None: | ||||
|             db_alias = self.db_alias | ||||
|         if collection_name is None: | ||||
|             collection_name = self.collection_name | ||||
|  | ||||
|         return self.proxy_class(key=key, instance=instance, | ||||
|                                 db_alias=db_alias, | ||||
|                                 collection_name=collection_name) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         # Store the GridFS file id in MongoDB | ||||
|         if isinstance(value, self.proxy_class) and value.grid_id is not None: | ||||
| @@ -1151,12 +1302,15 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|         applying field properties (size, thumbnail_size) | ||||
|         """ | ||||
|         field = self.instance._fields[self.key] | ||||
|         # Handle nested fields | ||||
|         if hasattr(field, 'field') and isinstance(field.field, FileField): | ||||
|             field = field.field | ||||
|  | ||||
|         try: | ||||
|             img = Image.open(file_obj) | ||||
|             img_format = img.format | ||||
|         except: | ||||
|             raise ValidationError('Invalid image') | ||||
|         except Exception, e: | ||||
|             raise ValidationError('Invalid image: %s' % e) | ||||
|  | ||||
|         if (field.size and (img.size[0] > field.size['width'] or | ||||
|                             img.size[1] > field.size['height'])): | ||||
| @@ -1177,10 +1331,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|             size = field.thumbnail_size | ||||
|  | ||||
|             if size['force']: | ||||
|                 thumbnail = ImageOps.fit(img, | ||||
|                                    (size['width'], | ||||
|                                     size['height']), | ||||
|                                    Image.ANTIALIAS) | ||||
|                 thumbnail = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS) | ||||
|             else: | ||||
|                 thumbnail = img.copy() | ||||
|                 thumbnail.thumbnail((size['width'], | ||||
| @@ -1188,8 +1339,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|                                     Image.ANTIALIAS) | ||||
|  | ||||
|         if thumbnail: | ||||
|             thumb_id = self._put_thumbnail(thumbnail, | ||||
|                                           img_format) | ||||
|             thumb_id = self._put_thumbnail(thumbnail, img_format) | ||||
|         else: | ||||
|             thumb_id = None | ||||
|  | ||||
| @@ -1225,6 +1375,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|                            height=h, | ||||
|                            format=format, | ||||
|                            **kwargs) | ||||
|  | ||||
|     @property | ||||
|     def size(self): | ||||
|         """ | ||||
| @@ -1292,7 +1443,7 @@ class ImageField(FileField): | ||||
|             if isinstance(att, (tuple, list)): | ||||
|                 if PY3: | ||||
|                     value = dict(itertools.zip_longest(params_size, att, | ||||
|                                                         fillvalue=None)) | ||||
|                                                        fillvalue=None)) | ||||
|                 else: | ||||
|                     value = dict(map(None, params_size, att)) | ||||
|  | ||||
| @@ -1303,30 +1454,9 @@ class ImageField(FileField): | ||||
|             **kwargs) | ||||
|  | ||||
|  | ||||
| class GeoPointField(BaseField): | ||||
|     """A list storing a latitude and longitude. | ||||
|  | ||||
|     .. versionadded:: 0.4 | ||||
|     """ | ||||
|  | ||||
|     _geo_index = True | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a geo-value is of type (x, y) | ||||
|         """ | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             self.error('GeoPointField can only accept tuples or lists ' | ||||
|                        'of (x, y)') | ||||
|  | ||||
|         if not len(value) == 2: | ||||
|             self.error('Value must be a two-dimensional point') | ||||
|         if (not isinstance(value[0], (float, int)) and | ||||
|             not isinstance(value[1], (float, int))): | ||||
|             self.error('Both values in point must be float or int') | ||||
|  | ||||
|  | ||||
| class SequenceField(IntField): | ||||
|     """Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers) | ||||
| class SequenceField(BaseField): | ||||
|     """Provides a sequental counter see: | ||||
|      http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers | ||||
|  | ||||
|     .. note:: | ||||
|  | ||||
| @@ -1336,15 +1466,29 @@ class SequenceField(IntField): | ||||
|              cluster of machines, it is easier to create an object ID than have | ||||
|              global, uniformly increasing sequence numbers. | ||||
|  | ||||
|     Use any callable as `value_decorator` to transform calculated counter into | ||||
|     any value suitable for your needs, e.g. string or hexadecimal | ||||
|     representation of the default integer counter value. | ||||
|  | ||||
|     .. versionadded:: 0.5 | ||||
|  | ||||
|     .. versionchanged:: 0.8 added `value_decorator` | ||||
|     """ | ||||
|     def __init__(self, collection_name=None, db_alias=None, sequence_name=None, *args, **kwargs): | ||||
|         self.collection_name = collection_name or 'mongoengine.counters' | ||||
|  | ||||
|     _auto_gen = True | ||||
|     COLLECTION_NAME = 'mongoengine.counters' | ||||
|     VALUE_DECORATOR = int | ||||
|  | ||||
|     def __init__(self, collection_name=None, db_alias=None, sequence_name=None, | ||||
|                  value_decorator=None, *args, **kwargs): | ||||
|         self.collection_name = collection_name or self.COLLECTION_NAME | ||||
|         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME | ||||
|         self.sequence_name = sequence_name | ||||
|         self.value_decorator = (callable(value_decorator) and | ||||
|                                 value_decorator or self.VALUE_DECORATOR) | ||||
|         return super(SequenceField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def generate_new_value(self): | ||||
|     def generate(self): | ||||
|         """ | ||||
|         Generate and Increment the counter | ||||
|         """ | ||||
| @@ -1355,7 +1499,34 @@ class SequenceField(IntField): | ||||
|                                              update={"$inc": {"next": 1}}, | ||||
|                                              new=True, | ||||
|                                              upsert=True) | ||||
|         return counter['next'] | ||||
|         return self.value_decorator(counter['next']) | ||||
|  | ||||
|     def set_next_value(self, value): | ||||
|         """Helper method to set the next sequence value""" | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         counter = collection.find_and_modify(query={"_id": sequence_id}, | ||||
|                                              update={"$set": {"next": value}}, | ||||
|                                              new=True, | ||||
|                                              upsert=True) | ||||
|         return self.value_decorator(counter['next']) | ||||
|  | ||||
|     def get_next_value(self): | ||||
|         """Helper method to get the next value for previewing. | ||||
|  | ||||
|         .. warning:: There is no guarantee this will be the next value | ||||
|         as it is only fixed on set. | ||||
|         """ | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         data = collection.find_one({"_id": sequence_id}) | ||||
|  | ||||
|         if data: | ||||
|             return self.value_decorator(data['next']+1) | ||||
|  | ||||
|         return self.value_decorator(1) | ||||
|  | ||||
|     def get_sequence_name(self): | ||||
|         if self.sequence_name: | ||||
| @@ -1365,35 +1536,27 @@ class SequenceField(IntField): | ||||
|             return owner._get_collection_name() | ||||
|         else: | ||||
|             return ''.join('_%s' % c if c.isupper() else c | ||||
|                             for c in owner._class_name).strip('_').lower() | ||||
|                            for c in owner._class_name).strip('_').lower() | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|  | ||||
|         if instance is None: | ||||
|             return self | ||||
|  | ||||
|         if not instance._data: | ||||
|             return | ||||
|  | ||||
|         value = instance._data.get(self.name) | ||||
|  | ||||
|         if not value and instance._initialised: | ||||
|             value = self.generate_new_value() | ||||
|         value = super(SequenceField, self).__get__(instance, owner) | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate() | ||||
|             instance._data[self.name] = value | ||||
|             instance._mark_as_changed(self.name) | ||||
|  | ||||
|         return int(value) if value else None | ||||
|         return value | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|  | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate_new_value() | ||||
|             value = self.generate() | ||||
|  | ||||
|         return super(SequenceField, self).__set__(instance, value) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if value is None: | ||||
|             value = self.generate_new_value() | ||||
|             value = self.generate() | ||||
|         return value | ||||
|  | ||||
|  | ||||
| @@ -1404,19 +1567,15 @@ class UUIDField(BaseField): | ||||
|     """ | ||||
|     _binary = None | ||||
|  | ||||
|     def __init__(self, binary=None, **kwargs): | ||||
|     def __init__(self, binary=True, **kwargs): | ||||
|         """ | ||||
|         Store UUID data in the database | ||||
|  | ||||
|         :param binary: (optional) boolean store as binary. | ||||
|         :param binary: if False store as a string. | ||||
|  | ||||
|         .. versionchanged:: 0.8.0 | ||||
|         .. versionchanged:: 0.6.19 | ||||
|         """ | ||||
|         if binary is None: | ||||
|             binary = False | ||||
|             msg = ("UUIDFields will soon default to store as binary, please " | ||||
|                   "configure binary=False if you wish to store as a string") | ||||
|             warnings.warn(msg, FutureWarning) | ||||
|         self._binary = binary | ||||
|         super(UUIDField, self).__init__(**kwargs) | ||||
|  | ||||
| @@ -1434,6 +1593,8 @@ class UUIDField(BaseField): | ||||
|     def to_mongo(self, value): | ||||
|         if not self._binary: | ||||
|             return unicode(value) | ||||
|         elif isinstance(value, basestring): | ||||
|             return uuid.UUID(value) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -1449,3 +1610,83 @@ class UUIDField(BaseField): | ||||
|                 value = uuid.UUID(value) | ||||
|             except Exception, exc: | ||||
|                 self.error('Could not convert to UUID: %s' % exc) | ||||
|  | ||||
|  | ||||
| class GeoPointField(BaseField): | ||||
|     """A list storing a latitude and longitude. | ||||
|  | ||||
|     .. versionadded:: 0.4 | ||||
|     """ | ||||
|  | ||||
|     _geo_index = pymongo.GEO2D | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a geo-value is of type (x, y) | ||||
|         """ | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             self.error('GeoPointField can only accept tuples or lists ' | ||||
|                        'of (x, y)') | ||||
|  | ||||
|         if not len(value) == 2: | ||||
|             self.error("Value (%s) must be a two-dimensional point" % repr(value)) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             self.error("Both values (%s) in point must be float or int" % repr(value)) | ||||
|  | ||||
|  | ||||
| class PointField(GeoJsonBaseField): | ||||
|     """A geo json field storing a latitude and longitude. | ||||
|  | ||||
|     The data is represented as: | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "Point" , | ||||
|           "coordinates" : [x, y]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
|     to set the value. | ||||
|  | ||||
|     Requires mongodb >= 2.4 | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "Point" | ||||
|  | ||||
|  | ||||
| class LineStringField(GeoJsonBaseField): | ||||
|     """A geo json field storing a line of latitude and longitude coordinates. | ||||
|  | ||||
|     The data is represented as: | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "LineString" , | ||||
|           "coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list of points. | ||||
|  | ||||
|     Requires mongodb >= 2.4 | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "LineString" | ||||
|  | ||||
|  | ||||
| class PolygonField(GeoJsonBaseField): | ||||
|     """A geo json field storing a polygon of latitude and longitude coordinates. | ||||
|  | ||||
|     The data is represented as: | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "Polygon" , | ||||
|           "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                            [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
|     of LineStrings. The first LineString being the outside and the rest being | ||||
|     holes. | ||||
|  | ||||
|     Requires mongodb >= 2.4 | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "Polygon" | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										11
									
								
								mongoengine/queryset/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								mongoengine/queryset/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, | ||||
|                                 InvalidQueryError, OperationError, | ||||
|                                 NotUniqueError) | ||||
| from mongoengine.queryset.field_list import * | ||||
| from mongoengine.queryset.manager import * | ||||
| from mongoengine.queryset.queryset import * | ||||
| from mongoengine.queryset.transform import * | ||||
| from mongoengine.queryset.visitor import * | ||||
|  | ||||
| __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + | ||||
|            transform.__all__ + visitor.__all__) | ||||
							
								
								
									
										1504
									
								
								mongoengine/queryset/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1504
									
								
								mongoengine/queryset/base.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										86
									
								
								mongoengine/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										86
									
								
								mongoengine/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,86 @@ | ||||
|  | ||||
| __all__ = ('QueryFieldList',) | ||||
|  | ||||
|  | ||||
| class QueryFieldList(object): | ||||
|     """Object that handles combinations of .only() and .exclude() calls""" | ||||
|     ONLY = 1 | ||||
|     EXCLUDE = 0 | ||||
|  | ||||
|     def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): | ||||
|         """The QueryFieldList builder | ||||
|  | ||||
|         :param fields: A list of fields used in `.only()` or `.exclude()` | ||||
|         :param value: How to handle the fields; either `ONLY` or `EXCLUDE` | ||||
|         :param always_include: Any fields to always_include eg `_cls` | ||||
|         :param _only_called: Has `.only()` been called?  If so its a set of fields | ||||
|            otherwise it performs a union. | ||||
|         """ | ||||
|         self.value = value | ||||
|         self.fields = set(fields or []) | ||||
|         self.always_include = set(always_include or []) | ||||
|         self._id = None | ||||
|         self._only_called = _only_called | ||||
|         self.slice = {} | ||||
|  | ||||
|     def __add__(self, f): | ||||
|         if isinstance(f.value, dict): | ||||
|             for field in f.fields: | ||||
|                 self.slice[field] = f.value | ||||
|             if not self.fields: | ||||
|                 self.fields = f.fields | ||||
|         elif not self.fields: | ||||
|             self.fields = f.fields | ||||
|             self.value = f.value | ||||
|             self.slice = {} | ||||
|         elif self.value is self.ONLY and f.value is self.ONLY: | ||||
|             self._clean_slice() | ||||
|             if self._only_called: | ||||
|                 self.fields = self.fields.union(f.fields) | ||||
|             else: | ||||
|                 self.fields = f.fields | ||||
|         elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: | ||||
|             self.fields = self.fields.union(f.fields) | ||||
|             self._clean_slice() | ||||
|         elif self.value is self.ONLY and f.value is self.EXCLUDE: | ||||
|             self.fields -= f.fields | ||||
|             self._clean_slice() | ||||
|         elif self.value is self.EXCLUDE and f.value is self.ONLY: | ||||
|             self.value = self.ONLY | ||||
|             self.fields = f.fields - self.fields | ||||
|             self._clean_slice() | ||||
|  | ||||
|         if '_id' in f.fields: | ||||
|             self._id = f.value | ||||
|  | ||||
|         if self.always_include: | ||||
|             if self.value is self.ONLY and self.fields: | ||||
|                 if sorted(self.slice.keys()) != sorted(self.fields): | ||||
|                     self.fields = self.fields.union(self.always_include) | ||||
|             else: | ||||
|                 self.fields -= self.always_include | ||||
|  | ||||
|         if getattr(f, '_only_called', False): | ||||
|             self._only_called = True | ||||
|         return self | ||||
|  | ||||
|     def __nonzero__(self): | ||||
|         return bool(self.fields) | ||||
|  | ||||
|     def as_dict(self): | ||||
|         field_list = dict((field, self.value) for field in self.fields) | ||||
|         if self.slice: | ||||
|             field_list.update(self.slice) | ||||
|         if self._id is not None: | ||||
|             field_list['_id'] = self._id | ||||
|         return field_list | ||||
|  | ||||
|     def reset(self): | ||||
|         self.fields = set([]) | ||||
|         self.slice = {} | ||||
|         self.value = self.ONLY | ||||
|  | ||||
|     def _clean_slice(self): | ||||
|         if self.slice: | ||||
|             for field in set(self.slice.keys()) - self.fields: | ||||
|                 del self.slice[field] | ||||
							
								
								
									
										57
									
								
								mongoengine/queryset/manager.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								mongoengine/queryset/manager.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| from functools import partial | ||||
| from mongoengine.queryset.queryset import QuerySet | ||||
|  | ||||
| __all__ = ('queryset_manager', 'QuerySetManager') | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
|     """ | ||||
|     The default QuerySet Manager. | ||||
|  | ||||
|     Custom QuerySet Manager functions can extend this class and users can | ||||
|     add extra queryset functionality.  Any custom manager methods must accept a | ||||
|     :class:`~mongoengine.Document` class as its first argument, and a | ||||
|     :class:`~mongoengine.queryset.QuerySet` as its second argument. | ||||
|  | ||||
|     The method function should return a :class:`~mongoengine.queryset.QuerySet` | ||||
|     , probably the same one that was passed in, but modified in some way. | ||||
|     """ | ||||
|  | ||||
|     get_queryset = None | ||||
|     default = QuerySet | ||||
|  | ||||
|     def __init__(self, queryset_func=None): | ||||
|         if queryset_func: | ||||
|             self.get_queryset = queryset_func | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor for instantiating a new QuerySet object when | ||||
|         Document.objects is accessed. | ||||
|         """ | ||||
|         if instance is not None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         # owner is the document that contains the QuerySetManager | ||||
|         queryset_class = owner._meta.get('queryset_class', self.default) | ||||
|         queryset = queryset_class(owner, owner._get_collection()) | ||||
|         if self.get_queryset: | ||||
|             arg_count = self.get_queryset.func_code.co_argcount | ||||
|             if arg_count == 1: | ||||
|                 queryset = self.get_queryset(queryset) | ||||
|             elif arg_count == 2: | ||||
|                 queryset = self.get_queryset(owner, queryset) | ||||
|             else: | ||||
|                 queryset = partial(self.get_queryset, owner, queryset) | ||||
|         return queryset | ||||
|  | ||||
|  | ||||
| def queryset_manager(func): | ||||
|     """Decorator that allows you to define custom QuerySet managers on | ||||
|     :class:`~mongoengine.Document` classes. The manager must be a function that | ||||
|     accepts a :class:`~mongoengine.Document` class as its first argument, and a | ||||
|     :class:`~mongoengine.queryset.QuerySet` as its second argument. The method | ||||
|     function should return a :class:`~mongoengine.queryset.QuerySet`, probably | ||||
|     the same one that was passed in, but modified in some way. | ||||
|     """ | ||||
|     return QuerySetManager(func) | ||||
							
								
								
									
										157
									
								
								mongoengine/queryset/queryset.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										157
									
								
								mongoengine/queryset/queryset.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,157 @@ | ||||
| from mongoengine.errors import OperationError | ||||
| from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, | ||||
|                                        CASCADE, DENY, PULL) | ||||
|  | ||||
| __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', | ||||
|            'DENY', 'PULL') | ||||
|  | ||||
| # The maximum number of items to display in a QuerySet.__repr__ | ||||
| REPR_OUTPUT_SIZE = 20 | ||||
| ITER_CHUNK_SIZE = 100 | ||||
|  | ||||
|  | ||||
| class QuerySet(BaseQuerySet): | ||||
|     """The default queryset, that builds queries and handles a set of results | ||||
|     returned from a query. | ||||
|  | ||||
|     Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as | ||||
|     the results. | ||||
|     """ | ||||
|  | ||||
|     _has_more = True | ||||
|     _len = None | ||||
|     _result_cache = None | ||||
|  | ||||
|     def __iter__(self): | ||||
|         """Iteration utilises a results cache which iterates the cursor | ||||
|         in batches of ``ITER_CHUNK_SIZE``. | ||||
|  | ||||
|         If ``self._has_more`` the cursor hasn't been exhausted so cache then | ||||
|         batch.  Otherwise iterate the result_cache. | ||||
|         """ | ||||
|         self._iter = True | ||||
|         if self._has_more: | ||||
|             return self._iter_results() | ||||
|  | ||||
|         # iterating over the cache. | ||||
|         return iter(self._result_cache) | ||||
|  | ||||
|     def __len__(self): | ||||
|         """Since __len__ is called quite frequently (for example, as part of | ||||
|         list(qs) we populate the result cache and cache the length. | ||||
|         """ | ||||
|         if self._len is not None: | ||||
|             return self._len | ||||
|         if self._has_more: | ||||
|             # populate the cache | ||||
|             list(self._iter_results()) | ||||
|  | ||||
|         self._len = len(self._result_cache) | ||||
|         return self._len | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
|         """ | ||||
|         if self._iter: | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         self._populate_cache() | ||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|         return repr(data) | ||||
|  | ||||
|  | ||||
|     def _iter_results(self): | ||||
|         """A generator for iterating over the result cache. | ||||
|  | ||||
|         Also populates the cache if there are more possible results to yield. | ||||
|         Raises StopIteration when there are no more results""" | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|         pos = 0 | ||||
|         while True: | ||||
|             upper = len(self._result_cache) | ||||
|             while pos < upper: | ||||
|                 yield self._result_cache[pos] | ||||
|                 pos = pos + 1 | ||||
|             if not self._has_more: | ||||
|                 raise StopIteration | ||||
|             if len(self._result_cache) <= pos: | ||||
|                 self._populate_cache() | ||||
|  | ||||
|     def _populate_cache(self): | ||||
|         """ | ||||
|         Populates the result cache with ``ITER_CHUNK_SIZE`` more entries | ||||
|         (until the cursor is exhausted). | ||||
|         """ | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|         if self._has_more: | ||||
|             try: | ||||
|                 for i in xrange(ITER_CHUNK_SIZE): | ||||
|                     self._result_cache.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 self._has_more = False | ||||
|  | ||||
|     def count(self, with_limit_and_skip=True): | ||||
|         """Count the selected elements in the query. | ||||
|  | ||||
|         :param with_limit_and_skip (optional): take any :meth:`limit` or | ||||
|             :meth:`skip` that has been applied to this cursor into account when | ||||
|             getting the count | ||||
|         """ | ||||
|         if with_limit_and_skip is False: | ||||
|             return super(QuerySet, self).count(with_limit_and_skip) | ||||
|  | ||||
|         if self._len is None: | ||||
|             self._len = super(QuerySet, self).count(with_limit_and_skip) | ||||
|  | ||||
|         return self._len | ||||
|  | ||||
|     def no_cache(self): | ||||
|         """Convert to a non_caching queryset | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||
|         """ | ||||
|         if self._result_cache is not None: | ||||
|             raise OperationError("QuerySet already cached") | ||||
|         return self.clone_into(QuerySetNoCache(self._document, self._collection)) | ||||
|  | ||||
|  | ||||
| class QuerySetNoCache(BaseQuerySet): | ||||
|     """A non caching QuerySet""" | ||||
|  | ||||
|     def cache(self): | ||||
|         """Convert to a caching queryset | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to caching queryset | ||||
|         """ | ||||
|         return self.clone_into(QuerySet(self._document, self._collection)) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
|  | ||||
|         .. versionchanged:: 0.6.13 Now doesnt modify the cursor | ||||
|         """ | ||||
|         if self._iter: | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         data = [] | ||||
|         for i in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|  | ||||
|         self.rewind() | ||||
|         return repr(data) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         queryset = self | ||||
|         if queryset._iter: | ||||
|             queryset = self.clone() | ||||
|         queryset.rewind() | ||||
|         return queryset | ||||
							
								
								
									
										343
									
								
								mongoengine/queryset/transform.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										343
									
								
								mongoengine/queryset/transform.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,343 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| import pymongo | ||||
| from bson import SON | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidQueryError, LookUpError | ||||
|  | ||||
| __all__ = ('query', 'update') | ||||
|  | ||||
|  | ||||
| COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | ||||
|                         'all', 'size', 'exists', 'not') | ||||
| GEO_OPERATORS        = ('within_distance', 'within_spherical_distance', | ||||
|                         'within_box', 'within_polygon', 'near', 'near_sphere', | ||||
|                         'max_distance', 'geo_within', 'geo_within_box', | ||||
|                         'geo_within_polygon', 'geo_within_center', | ||||
|                         'geo_within_sphere', 'geo_intersects') | ||||
| STRING_OPERATORS     = ('contains', 'icontains', 'startswith', | ||||
|                         'istartswith', 'endswith', 'iendswith', | ||||
|                         'exact', 'iexact') | ||||
| CUSTOM_OPERATORS     = ('match',) | ||||
| MATCH_OPERATORS      = (COMPARISON_OPERATORS + GEO_OPERATORS + | ||||
|                         STRING_OPERATORS + CUSTOM_OPERATORS) | ||||
|  | ||||
| UPDATE_OPERATORS     = ('set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert') | ||||
|  | ||||
|  | ||||
| def query(_doc_cls=None, _field_operation=False, **query): | ||||
|     """Transform a query from Django-style format to Mongo format. | ||||
|     """ | ||||
|     mongo_query = {} | ||||
|     merge_query = defaultdict(list) | ||||
|     for key, value in sorted(query.items()): | ||||
|         if key == "__raw__": | ||||
|             mongo_query.update(value) | ||||
|             continue | ||||
|  | ||||
|         parts = key.split('__') | ||||
|         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||
|         parts = [part for part in parts if not part.isdigit()] | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
|         op = None | ||||
|         if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: | ||||
|             op = parts.pop() | ||||
|  | ||||
|         negate = False | ||||
|         if len(parts) > 1 and parts[-1] == 'not': | ||||
|             parts.pop() | ||||
|             negate = True | ||||
|  | ||||
|         if _doc_cls: | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
|             cleaned_fields = [] | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 else: | ||||
|                     parts.append(field.db_field) | ||||
|                 if append_field: | ||||
|                     cleaned_fields.append(field) | ||||
|  | ||||
|             # Convert value to proper value | ||||
|             field = cleaned_fields[-1] | ||||
|  | ||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||
|             singular_ops += STRING_OPERATORS | ||||
|             if op in singular_ops: | ||||
|                 if isinstance(field, basestring): | ||||
|                     if (op in STRING_OPERATORS and | ||||
|                        isinstance(value, basestring)): | ||||
|                         StringField = _import_class('StringField') | ||||
|                         value = StringField.prepare_query_value(op, value) | ||||
|                     else: | ||||
|                         value = field | ||||
|                 else: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): | ||||
|                 # 'in', 'nin' and 'all' require a list of values | ||||
|                 value = [field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|         # if op and op not in COMPARISON_OPERATORS: | ||||
|         if op: | ||||
|             if op in GEO_OPERATORS: | ||||
|                 value = _geo_operator(field, op, value) | ||||
|             elif op in CUSTOM_OPERATORS: | ||||
|                 if op == 'match': | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|                     value = {"$elemMatch": value} | ||||
|                 else: | ||||
|                     NotImplementedError("Custom method '%s' has not " | ||||
|                                         "been implemented" % op) | ||||
|             elif op not in STRING_OPERATORS: | ||||
|                 value = {'$' + op: value} | ||||
|  | ||||
|         if negate: | ||||
|             value = {'$not': value} | ||||
|  | ||||
|         for i, part in indices: | ||||
|             parts.insert(i, part) | ||||
|         key = '.'.join(parts) | ||||
|         if op is None or key not in mongo_query: | ||||
|             mongo_query[key] = value | ||||
|         elif key in mongo_query: | ||||
|             if key in mongo_query and isinstance(mongo_query[key], dict): | ||||
|                 mongo_query[key].update(value) | ||||
|                 # $maxDistance needs to come last - convert to SON | ||||
|                 if '$maxDistance' in mongo_query[key]: | ||||
|                     value_dict = mongo_query[key] | ||||
|                     value_son = SON() | ||||
|                     for k, v in value_dict.iteritems(): | ||||
|                         if k == '$maxDistance': | ||||
|                             continue | ||||
|                         value_son[k] = v | ||||
|                     value_son['$maxDistance'] = value_dict['$maxDistance'] | ||||
|                     mongo_query[key] = value_son | ||||
|             else: | ||||
|                 # Store for manually merging later | ||||
|                 merge_query[key].append(value) | ||||
|  | ||||
|     # The queryset has been filter in such a way we must manually merge | ||||
|     for k, v in merge_query.items(): | ||||
|         merge_query[k].append(mongo_query[k]) | ||||
|         del mongo_query[k] | ||||
|         if isinstance(v, list): | ||||
|             value = [{k: val} for val in v] | ||||
|             if '$and' in mongo_query.keys(): | ||||
|                 mongo_query['$and'].append(value) | ||||
|             else: | ||||
|                 mongo_query['$and'] = value | ||||
|  | ||||
|     return mongo_query | ||||
|  | ||||
|  | ||||
| def update(_doc_cls=None, **update): | ||||
|     """Transform an update spec from Django-style format to Mongo format. | ||||
|     """ | ||||
|     mongo_update = {} | ||||
|     for key, value in update.items(): | ||||
|         if key == "__raw__": | ||||
|             mongo_update.update(value) | ||||
|             continue | ||||
|         parts = key.split('__') | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
|         op = None | ||||
|         if parts[0] in UPDATE_OPERATORS: | ||||
|             op = parts.pop(0) | ||||
|             # Convert Pythonic names to Mongo equivalents | ||||
|             if op in ('push_all', 'pull_all'): | ||||
|                 op = op.replace('_all', 'All') | ||||
|             elif op == 'dec': | ||||
|                 # Support decrement by flipping a positive value's sign | ||||
|                 # and using 'inc' | ||||
|                 op = 'inc' | ||||
|                 if value > 0: | ||||
|                     value = -value | ||||
|             elif op == 'add_to_set': | ||||
|                 op = 'addToSet' | ||||
|             elif op == 'set_on_insert': | ||||
|                 op = "setOnInsert" | ||||
|  | ||||
|         match = None | ||||
|         if parts[-1] in COMPARISON_OPERATORS: | ||||
|             match = parts.pop() | ||||
|  | ||||
|         if _doc_cls: | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
|             cleaned_fields = [] | ||||
|             appended_sub_field = False | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                     # Convert the S operator to $ | ||||
|                     if field == 'S': | ||||
|                         field = '$' | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 else: | ||||
|                     parts.append(field.db_field) | ||||
|                 if append_field: | ||||
|                     appended_sub_field = False | ||||
|                     cleaned_fields.append(field) | ||||
|                     if hasattr(field, 'field'): | ||||
|                         cleaned_fields.append(field.field) | ||||
|                         appended_sub_field = True | ||||
|  | ||||
|             # Convert value to proper value | ||||
|             if appended_sub_field: | ||||
|                 field = cleaned_fields[-2] | ||||
|             else: | ||||
|                 field = cleaned_fields[-1] | ||||
|  | ||||
|             GeoJsonBaseField = _import_class("GeoJsonBaseField") | ||||
|             if isinstance(field, GeoJsonBaseField): | ||||
|                 value = field.to_mongo(value) | ||||
|  | ||||
|             if op in (None, 'set', 'push', 'pull'): | ||||
|                 if field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op in ('pushAll', 'pullAll'): | ||||
|                 value = [field.prepare_query_value(op, v) for v in value] | ||||
|             elif op in ('addToSet', 'setOnInsert'): | ||||
|                 if isinstance(value, (list, tuple, set)): | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|                 elif field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op == "unset": | ||||
|                 value = 1 | ||||
|  | ||||
|         if match: | ||||
|             match = '$' + match | ||||
|             value = {match: value} | ||||
|  | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if not op: | ||||
|             raise InvalidQueryError("Updates must supply an operation " | ||||
|                                     "eg: set__FIELD=value") | ||||
|  | ||||
|         if 'pull' in op and '.' in key: | ||||
|             # Dot operators don't work on pull operations | ||||
|             # unless they point to a list field | ||||
|             # Otherwise it uses nested dict syntax | ||||
|             if op == 'pullAll': | ||||
|                 raise InvalidQueryError("pullAll operations only support " | ||||
|                                         "a single field depth") | ||||
|  | ||||
|             # Look for the last list field and use dot notation until there | ||||
|             field_classes = [c.__class__ for c in cleaned_fields] | ||||
|             field_classes.reverse() | ||||
|             ListField = _import_class('ListField') | ||||
|             if ListField in field_classes: | ||||
|                 # Join all fields via dot notation to the last ListField | ||||
|                 # Then process as normal | ||||
|                 last_listField = len(cleaned_fields) - field_classes.index(ListField) | ||||
|                 key = ".".join(parts[:last_listField]) | ||||
|                 parts = parts[last_listField:] | ||||
|                 parts.insert(0, key) | ||||
|  | ||||
|             parts.reverse() | ||||
|             for key in parts: | ||||
|                 value = {key: value} | ||||
|         elif op == 'addToSet' and isinstance(value, list): | ||||
|             value = {key: {"$each": value}} | ||||
|         else: | ||||
|             value = {key: value} | ||||
|         key = '$' + op | ||||
|  | ||||
|         if key not in mongo_update: | ||||
|             mongo_update[key] = value | ||||
|         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||
|             mongo_update[key].update(value) | ||||
|  | ||||
|     return mongo_update | ||||
|  | ||||
|  | ||||
| def _geo_operator(field, op, value): | ||||
|     """Helper to return the query for a given geo query""" | ||||
|     if field._geo_index == pymongo.GEO2D: | ||||
|         if op == "within_distance": | ||||
|             value = {'$within': {'$center': value}} | ||||
|         elif op == "within_spherical_distance": | ||||
|             value = {'$within': {'$centerSphere': value}} | ||||
|         elif op == "within_polygon": | ||||
|             value = {'$within': {'$polygon': value}} | ||||
|         elif op == "near": | ||||
|             value = {'$near': value} | ||||
|         elif op == "near_sphere": | ||||
|             value = {'$nearSphere': value} | ||||
|         elif op == 'within_box': | ||||
|             value = {'$within': {'$box': value}} | ||||
|         elif op == "max_distance": | ||||
|             value = {'$maxDistance': value} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a GeoPointField" % op) | ||||
|     else: | ||||
|         if op == "geo_within": | ||||
|             value = {"$geoWithin": _infer_geometry(value)} | ||||
|         elif op == "geo_within_box": | ||||
|             value = {"$geoWithin": {"$box": value}} | ||||
|         elif op == "geo_within_polygon": | ||||
|             value = {"$geoWithin": {"$polygon": value}} | ||||
|         elif op == "geo_within_center": | ||||
|             value = {"$geoWithin": {"$center": value}} | ||||
|         elif op == "geo_within_sphere": | ||||
|             value = {"$geoWithin": {"$centerSphere": value}} | ||||
|         elif op == "geo_intersects": | ||||
|             value = {"$geoIntersects": _infer_geometry(value)} | ||||
|         elif op == "near": | ||||
|             value = {'$near': _infer_geometry(value)} | ||||
|         elif op == "max_distance": | ||||
|             value = {'$maxDistance': value} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a %s " % (op, field._name)) | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def _infer_geometry(value): | ||||
|     """Helper method that tries to infer the $geometry shape for a given value""" | ||||
|     if isinstance(value, dict): | ||||
|         if "$geometry" in value: | ||||
|             return value | ||||
|         elif 'coordinates' in value and 'type' in value: | ||||
|             return {"$geometry": value} | ||||
|         raise InvalidQueryError("Invalid $geometry dictionary should have " | ||||
|                                 "type and coordinates keys") | ||||
|     elif isinstance(value, (list, set)): | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||
|         except: | ||||
|             pass | ||||
|         try: | ||||
|             value[0][0] | ||||
|             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||
|         except: | ||||
|             pass | ||||
|         try: | ||||
|             value[0] | ||||
|             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " | ||||
|                             "or (nested) lists of coordinate(s)") | ||||
							
								
								
									
										161
									
								
								mongoengine/queryset/visitor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								mongoengine/queryset/visitor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | ||||
| import copy | ||||
|  | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.python_support import product, reduce | ||||
|  | ||||
| from mongoengine.queryset import transform | ||||
|  | ||||
| __all__ = ('Q',) | ||||
|  | ||||
|  | ||||
| class QNodeVisitor(object): | ||||
|     """Base visitor class for visiting Q-object nodes in a query tree. | ||||
|     """ | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         """Called by QCombination objects. | ||||
|         """ | ||||
|         return combination | ||||
|  | ||||
|     def visit_query(self, query): | ||||
|         """Called by (New)Q objects. | ||||
|         """ | ||||
|         return query | ||||
|  | ||||
|  | ||||
| class DuplicateQueryConditionsError(InvalidQueryError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class SimplificationVisitor(QNodeVisitor): | ||||
|     """Simplifies query trees by combinging unnecessary 'and' connection nodes | ||||
|     into a single Q-object. | ||||
|     """ | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         if combination.operation == combination.AND: | ||||
|             # The simplification only applies to 'simple' queries | ||||
|             if all(isinstance(node, Q) for node in combination.children): | ||||
|                 queries = [n.query for n in combination.children] | ||||
|                 try: | ||||
|                     return Q(**self._query_conjunction(queries)) | ||||
|                 except DuplicateQueryConditionsError: | ||||
|                     # Cannot be simplified | ||||
|                     pass | ||||
|         return combination | ||||
|  | ||||
|     def _query_conjunction(self, queries): | ||||
|         """Merges query dicts - effectively &ing them together. | ||||
|         """ | ||||
|         query_ops = set() | ||||
|         combined_query = {} | ||||
|         for query in queries: | ||||
|             ops = set(query.keys()) | ||||
|             # Make sure that the same operation isn't applied more than once | ||||
|             # to a single field | ||||
|             intersection = ops.intersection(query_ops) | ||||
|             if intersection: | ||||
|                 raise DuplicateQueryConditionsError() | ||||
|  | ||||
|             query_ops.update(ops) | ||||
|             combined_query.update(copy.deepcopy(query)) | ||||
|         return combined_query | ||||
|  | ||||
|  | ||||
| class QueryCompilerVisitor(QNodeVisitor): | ||||
|     """Compiles the nodes in a query tree to a PyMongo-compatible query | ||||
|     dictionary. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document): | ||||
|         self.document = document | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         operator = "$and" | ||||
|         if combination.operation == combination.OR: | ||||
|             operator = "$or" | ||||
|         return {operator: combination.children} | ||||
|  | ||||
|     def visit_query(self, query): | ||||
|         return transform.query(self.document, **query.query) | ||||
|  | ||||
|  | ||||
| class QNode(object): | ||||
|     """Base class for nodes in query trees. | ||||
|     """ | ||||
|  | ||||
|     AND = 0 | ||||
|     OR = 1 | ||||
|  | ||||
|     def to_query(self, document): | ||||
|         query = self.accept(SimplificationVisitor()) | ||||
|         query = query.accept(QueryCompilerVisitor(document)) | ||||
|         return query | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def _combine(self, other, operation): | ||||
|         """Combine this node with another node into a QCombination object. | ||||
|         """ | ||||
|         if getattr(other, 'empty', True): | ||||
|             return self | ||||
|  | ||||
|         if self.empty: | ||||
|             return other | ||||
|  | ||||
|         return QCombination(operation, [self, other]) | ||||
|  | ||||
|     @property | ||||
|     def empty(self): | ||||
|         return False | ||||
|  | ||||
|     def __or__(self, other): | ||||
|         return self._combine(other, self.OR) | ||||
|  | ||||
|     def __and__(self, other): | ||||
|         return self._combine(other, self.AND) | ||||
|  | ||||
|  | ||||
| class QCombination(QNode): | ||||
|     """Represents the combination of several conditions by a given logical | ||||
|     operator. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, operation, children): | ||||
|         self.operation = operation | ||||
|         self.children = [] | ||||
|         for node in children: | ||||
|             # If the child is a combination of the same type, we can merge its | ||||
|             # children directly into this combinations children | ||||
|             if isinstance(node, QCombination) and node.operation == operation: | ||||
|                 self.children += node.children | ||||
|             else: | ||||
|                 self.children.append(node) | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         for i in range(len(self.children)): | ||||
|             if isinstance(self.children[i], QNode): | ||||
|                 self.children[i] = self.children[i].accept(visitor) | ||||
|  | ||||
|         return visitor.visit_combination(self) | ||||
|  | ||||
|     @property | ||||
|     def empty(self): | ||||
|         return not bool(self.children) | ||||
|  | ||||
|  | ||||
| class Q(QNode): | ||||
|     """A simple query object, used in a query tree to build up more complex | ||||
|     query structures. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, **query): | ||||
|         self.query = query | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         return visitor.visit_query(self) | ||||
|  | ||||
|     @property | ||||
|     def empty(self): | ||||
|         return not bool(self.query) | ||||
| @@ -1,7 +1,7 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', | ||||
|            'pre_delete', 'post_delete'] | ||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete'] | ||||
|  | ||||
| signals_available = False | ||||
| try: | ||||
| @@ -39,6 +39,7 @@ _signals = Namespace() | ||||
| pre_init = _signals.signal('pre_init') | ||||
| post_init = _signals.signal('post_init') | ||||
| pre_save = _signals.signal('pre_save') | ||||
| pre_save_post_validation = _signals.signal('pre_save_post_validation') | ||||
| post_save = _signals.signal('post_save') | ||||
| pre_delete = _signals.signal('pre_delete') | ||||
| post_delete = _signals.signal('post_delete') | ||||
|   | ||||
| @@ -1,59 +0,0 @@ | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
|     """ Query_counter contextmanager to get the number of queries. """ | ||||
|  | ||||
|     def __init__(self): | ||||
|         """ Construct the query_counter. """ | ||||
|         self.counter = 0 | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ On every with block we need to drop the profile collection. """ | ||||
|         self.db.set_profiling_level(0) | ||||
|         self.db.system.profile.drop() | ||||
|         self.db.set_profiling_level(2) | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the profiling level. """ | ||||
|         self.db.set_profiling_level(0) | ||||
|  | ||||
|     def __eq__(self, value): | ||||
|         """ == Compare querycounter. """ | ||||
|         return value == self._get_count() | ||||
|  | ||||
|     def __ne__(self, value): | ||||
|         """ != Compare querycounter. """ | ||||
|         return not self.__eq__(value) | ||||
|  | ||||
|     def __lt__(self, value): | ||||
|         """ < Compare querycounter. """ | ||||
|         return self._get_count() < value | ||||
|  | ||||
|     def __le__(self, value): | ||||
|         """ <= Compare querycounter. """ | ||||
|         return self._get_count() <= value | ||||
|  | ||||
|     def __gt__(self, value): | ||||
|         """ > Compare querycounter. """ | ||||
|         return self._get_count() > value | ||||
|  | ||||
|     def __ge__(self, value): | ||||
|         """ >= Compare querycounter. """ | ||||
|         return self._get_count() >= value | ||||
|  | ||||
|     def __int__(self): | ||||
|         """ int representation. """ | ||||
|         return self._get_count() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """ repr query_counter as the number of queries. """ | ||||
|         return u"%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """ Get the number of queries. """ | ||||
|         count = self.db.system.profile.find().count() - self.counter | ||||
|         self.counter += 1 | ||||
|         return count | ||||
| @@ -5,7 +5,7 @@ | ||||
| %define srcname mongoengine | ||||
|  | ||||
| Name:           python-%{srcname} | ||||
| Version:        0.7.9 | ||||
| Version:        0.8.7 | ||||
| Release:        1%{?dist} | ||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | ||||
|  | ||||
| @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT | ||||
| # %{python_sitearch}/* | ||||
|  | ||||
| %changelog | ||||
| * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||
| * See: http://docs.mongoengine.org/en/latest/changelog.html | ||||
| @@ -8,4 +8,4 @@ detailed-errors = 1 | ||||
| #cover-package = mongoengine | ||||
| py3where = build | ||||
| where = tests | ||||
| #tests =  test_bugfix.py | ||||
| #tests =  document/__init__.py | ||||
							
								
								
									
										19
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								setup.py
									
									
									
									
									
								
							| @@ -8,8 +8,8 @@ try: | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
| DESCRIPTION = """MongoEngine is a Python Object-Document | ||||
| Mapper for working with MongoDB.""" | ||||
| DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ | ||||
| 'Mapper for working with MongoDB.' | ||||
| LONG_DESCRIPTION = None | ||||
| try: | ||||
|     LONG_DESCRIPTION = open('README.rst').read() | ||||
| @@ -38,7 +38,6 @@ CLASSIFIERS = [ | ||||
|     'Operating System :: OS Independent', | ||||
|     'Programming Language :: Python', | ||||
|     "Programming Language :: Python :: 2", | ||||
|     "Programming Language :: Python :: 2.5", | ||||
|     "Programming Language :: Python :: 2.6", | ||||
|     "Programming Language :: Python :: 2.7", | ||||
|     "Programming Language :: Python :: 3", | ||||
| @@ -49,17 +48,15 @@ CLASSIFIERS = [ | ||||
|     'Topic :: Software Development :: Libraries :: Python Modules', | ||||
| ] | ||||
|  | ||||
| extra_opts = {} | ||||
| extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} | ||||
| if sys.version_info[0] == 3: | ||||
|     extra_opts['use_2to3'] = True | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] | ||||
|     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1'] | ||||
|     if "test" in sys.argv or "nosetests" in sys.argv: | ||||
|         extra_opts['packages'].append("tests") | ||||
|         extra_opts['package_data'] = {"tests": ["mongoengine.png"]} | ||||
|         extra_opts['packages'] = find_packages() | ||||
|         extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} | ||||
| else: | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||
|     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil'] | ||||
|  | ||||
| setup(name='mongoengine', | ||||
|       version=VERSION, | ||||
| @@ -75,7 +72,7 @@ setup(name='mongoengine', | ||||
|       long_description=LONG_DESCRIPTION, | ||||
|       platforms=['any'], | ||||
|       classifiers=CLASSIFIERS, | ||||
|       install_requires=['pymongo'], | ||||
|       install_requires=['pymongo>=2.5'], | ||||
|       test_suite='nose.collector', | ||||
|       **extra_opts | ||||
| ) | ||||
|   | ||||
| @@ -0,0 +1,5 @@ | ||||
| from all_warnings import AllWarnings | ||||
| from document import * | ||||
| from queryset import * | ||||
| from fields import * | ||||
| from migration import * | ||||
|   | ||||
							
								
								
									
										44
									
								
								tests/all_warnings/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								tests/all_warnings/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,44 @@ | ||||
| """ | ||||
| This test has been put into a module.  This is because it tests warnings that | ||||
| only get triggered on first hit.  This way we can ensure its imported into the | ||||
| top level and called first by the test suite. | ||||
| """ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
|  | ||||
| __all__ = ('AllWarnings', ) | ||||
|  | ||||
|  | ||||
| class AllWarnings(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.warning_list = [] | ||||
|         self.showwarning_default = warnings.showwarning | ||||
|         warnings.showwarning = self.append_to_warning_list | ||||
|  | ||||
|     def append_to_warning_list(self, message, category, *args): | ||||
|         self.warning_list.append({"message": message, | ||||
|                                   "category": category}) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         # restore default handling of warnings | ||||
|         warnings.showwarning = self.showwarning_default | ||||
|  | ||||
|     def test_document_collection_syntax_warning(self): | ||||
|  | ||||
|         class NonAbstractBase(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class InheritedDocumentFailTest(NonAbstractBase): | ||||
|             meta = {'collection': 'fail'} | ||||
|  | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(SyntaxWarning, warning["category"]) | ||||
|         self.assertEqual('non_abstract_base', | ||||
|                          InheritedDocumentFailTest._get_collection_name()) | ||||
							
								
								
									
										15
									
								
								tests/document/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								tests/document/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from class_methods import * | ||||
| from delta import * | ||||
| from dynamic import * | ||||
| from indexes import * | ||||
| from inheritance import * | ||||
| from instance import * | ||||
| from json_serialisation import * | ||||
| from validation import * | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										352
									
								
								tests/document/class_methods.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										352
									
								
								tests/document/class_methods.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,352 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from mongoengine.queryset import NULLIFY, PULL | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("ClassMethodsTest", ) | ||||
|  | ||||
|  | ||||
| class ClassMethodsTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_definition(self): | ||||
|         """Ensure that document may be defined using fields. | ||||
|         """ | ||||
|         self.assertEqual(['age', 'id', 'name'], | ||||
|                          sorted(self.Person._fields.keys())) | ||||
|         self.assertEqual(["IntField", "ObjectIdField", "StringField"], | ||||
|                         sorted([x.__class__.__name__ for x in | ||||
|                                 self.Person._fields.values()])) | ||||
|  | ||||
|     def test_get_db(self): | ||||
|         """Ensure that get_db returns the expected db. | ||||
|         """ | ||||
|         db = self.Person._get_db() | ||||
|         self.assertEqual(self.db, db) | ||||
|  | ||||
|     def test_get_collection_name(self): | ||||
|         """Ensure that get_collection_name returns the expected collection | ||||
|         name. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         self.assertEqual(collection_name, self.Person._get_collection_name()) | ||||
|  | ||||
|     def test_get_collection(self): | ||||
|         """Ensure that get_collection returns the expected collection. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         collection = self.Person._get_collection() | ||||
|         self.assertEqual(self.db[collection_name], collection) | ||||
|  | ||||
|     def test_drop_collection(self): | ||||
|         """Ensure that the collection may be dropped from the database. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         self.Person(name='Test').save() | ||||
|         self.assertTrue(collection_name in self.db.collection_names()) | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|         self.assertFalse(collection_name in self.db.collection_names()) | ||||
|  | ||||
|     def test_register_delete_rule(self): | ||||
|         """Ensure that register delete rule adds a delete rule to the document | ||||
|         meta. | ||||
|         """ | ||||
|         class Job(Document): | ||||
|             employee = ReferenceField(self.Person) | ||||
|  | ||||
|         self.assertEqual(self.Person._meta.get('delete_rules'), None) | ||||
|  | ||||
|         self.Person.register_delete_rule(Job, 'employee', NULLIFY) | ||||
|         self.assertEqual(self.Person._meta['delete_rules'], | ||||
|                          {(Job, 'employee'): NULLIFY}) | ||||
|  | ||||
|     def test_compare_indexes(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'title')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPost.ensure_index(['author', 'description']) | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] }) | ||||
|  | ||||
|         BlogPost._get_collection().drop_index('author_1_description_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPost._get_collection().drop_index('author_1_title_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] }) | ||||
|  | ||||
|     def test_compare_indexes_inheritance(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes for subclassed | ||||
|         documents (_cls included) | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPostWithTags.ensure_index(['author', 'tag_list']) | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] }) | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) | ||||
|  | ||||
|     def test_compare_indexes_multiple_subclasses(self): | ||||
|         """ Ensure that compare_indexes behaves correctly if called from a | ||||
|         class, which base class has multiple subclasses | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithCustomField(BlogPost): | ||||
|             custom = DictField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'custom')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithCustomField.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|         self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|         self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|     def test_list_indexes_inheritance(self): | ||||
|         """ ensure that all of the indexes are listed regardless of the super- | ||||
|         or sub-class that we call it from | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTagsAndExtraText(BlogPostWithTags): | ||||
|             extra_text = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags', 'extra_text')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithTagsAndExtraText.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          BlogPostWithTags.list_indexes()) | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          BlogPostWithTagsAndExtraText.list_indexes()) | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          [[('_cls', 1), ('author', 1), ('tags', 1)], | ||||
|                          [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], | ||||
|                          [(u'_id', 1)], [('_cls', 1)]]) | ||||
|  | ||||
|     def test_register_delete_rule_inherited(self): | ||||
|  | ||||
|         class Vaccine(Document): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|             meta = {"indexes": ["name"]} | ||||
|  | ||||
|         class Animal(Document): | ||||
|             family = StringField(required=True) | ||||
|             vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) | ||||
|  | ||||
|             meta = {"allow_inheritance": True, "indexes": ["family"]} | ||||
|  | ||||
|         class Cat(Animal): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) | ||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) | ||||
|  | ||||
|     def test_collection_naming(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class DefaultNamingTest(Document): | ||||
|             pass | ||||
|         self.assertEqual('default_naming_test', | ||||
|                          DefaultNamingTest._get_collection_name()) | ||||
|  | ||||
|         class CustomNamingTest(Document): | ||||
|             meta = {'collection': 'pimp_my_collection'} | ||||
|  | ||||
|         self.assertEqual('pimp_my_collection', | ||||
|                          CustomNamingTest._get_collection_name()) | ||||
|  | ||||
|         class DynamicNamingTest(Document): | ||||
|             meta = {'collection': lambda c: "DYNAMO"} | ||||
|         self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) | ||||
|  | ||||
|         # Use Abstract class to handle backwards compatibility | ||||
|         class BaseDocument(Document): | ||||
|             meta = { | ||||
|                 'abstract': True, | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class OldNamingConvention(BaseDocument): | ||||
|             pass | ||||
|         self.assertEqual('oldnamingconvention', | ||||
|                          OldNamingConvention._get_collection_name()) | ||||
|  | ||||
|         class InheritedAbstractNamingTest(BaseDocument): | ||||
|             meta = {'collection': 'wibble'} | ||||
|         self.assertEqual('wibble', | ||||
|                          InheritedAbstractNamingTest._get_collection_name()) | ||||
|  | ||||
|         # Mixin tests | ||||
|         class BaseMixin(object): | ||||
|             meta = { | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class OldMixinNamingConvention(Document, BaseMixin): | ||||
|             pass | ||||
|         self.assertEqual('oldmixinnamingconvention', | ||||
|                           OldMixinNamingConvention._get_collection_name()) | ||||
|  | ||||
|         class BaseMixin(object): | ||||
|             meta = { | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class BaseDocument(Document, BaseMixin): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class MyDocument(BaseDocument): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual('basedocument', MyDocument._get_collection_name()) | ||||
|  | ||||
|     def test_custom_collection_name_operations(self): | ||||
|         """Ensure that a collection with a specified name is used as expected. | ||||
|         """ | ||||
|         collection_name = 'personCollTest' | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             meta = {'collection': collection_name} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|         self.assertTrue(collection_name in self.db.collection_names()) | ||||
|  | ||||
|         user_obj = self.db[collection_name].find_one() | ||||
|         self.assertEqual(user_obj['name'], "Test User") | ||||
|  | ||||
|         user_obj = Person.objects[0] | ||||
|         self.assertEqual(user_obj.name, "Test User") | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         self.assertFalse(collection_name in self.db.collection_names()) | ||||
|  | ||||
|     def test_collection_name_and_primary(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField(primary_key=True) | ||||
|             meta = {'collection': 'app'} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|  | ||||
|         user_obj = Person.objects.first() | ||||
|         self.assertEqual(user_obj.name, "Test User") | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										739
									
								
								tests/document/delta.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										739
									
								
								tests/document/delta.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,739 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from bson import SON | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("DeltaTest",) | ||||
|  | ||||
|  | ||||
| class DeltaTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_delta(self): | ||||
|         self.delta(Document) | ||||
|         self.delta(DynamicDocument) | ||||
|  | ||||
|     def delta(self, DocClass): | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||
|  | ||||
|     def test_delta_recursive(self): | ||||
|         self.delta_recursive(Document, EmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive(self, DocClass, EmbeddedClass): | ||||
|  | ||||
|         class Embedded(EmbeddedClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'string_field': 'hello', | ||||
|             'int_field': 1, | ||||
|             'dict_field': {'hello': 'world'}, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}] | ||||
|         } | ||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field': embedded_delta}, {})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.dict_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field, []) | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello', | ||||
|                 'dict_field': {'hello': 'world'}, | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|  | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello', | ||||
|                 'dict_field': {'hello': 'world'}, | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], | ||||
|                              embedded_2[k]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = 'world' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field.2.string_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|                          ({'list_field.2.string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                          'world') | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|             '_cls': 'Embedded', | ||||
|             'string_field': 'hello world', | ||||
|             'int_field': 1, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'dict_field': {'hello': 'world'}}]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello world', | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'dict_field': {'hello': 'world'}} | ||||
|             ]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                          'hello world') | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': | ||||
|                           [2, {'hello': 'world'}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': | ||||
|                           [2, {'hello': 'world'}, 1]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|                          [2, {'hello': 'world'}, 1]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|                          [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field['Embedded'] = embedded_1 | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field['Embedded'].string_field = 'Hello World' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['dict_field.Embedded.string_field']) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'dict_field.Embedded.string_field': 'Hello World'}, {})) | ||||
|  | ||||
|     def test_circular_reference_deltas(self): | ||||
|         self.circular_reference_deltas(Document, Document) | ||||
|         self.circular_reference_deltas(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas(self, DocClass1, DocClass2): | ||||
|  | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization')) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person') | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|  | ||||
|         p = Person.objects[0].select_related() | ||||
|         o = Organization.objects.first() | ||||
|         self.assertEqual(p.owns[0], o) | ||||
|         self.assertEqual(o.owner, p) | ||||
|  | ||||
|     def test_circular_reference_deltas_2(self): | ||||
|         self.circular_reference_deltas_2(Document, Document) | ||||
|         self.circular_reference_deltas_2(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): | ||||
|  | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization', dbref=dbref)) | ||||
|             employer = ReferenceField('Organization', dbref=dbref) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person', dbref=dbref) | ||||
|             employees = ListField(ReferenceField('Person', dbref=dbref)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         employee = Person(name="employee").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         organization.employees.append(employee) | ||||
|         employee.employer = organization | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|         employee.save() | ||||
|  | ||||
|         p = Person.objects.get(name="owner") | ||||
|         e = Person.objects.get(name="employee") | ||||
|         o = Organization.objects.first() | ||||
|  | ||||
|         self.assertEqual(p.owns[0], o) | ||||
|         self.assertEqual(o.owner, p) | ||||
|         self.assertEqual(e.employer, o) | ||||
|  | ||||
|         return person, organization, employee | ||||
|  | ||||
|     def test_delta_db_field(self): | ||||
|         self.delta_db_field(Document) | ||||
|         self.delta_db_field(DynamicDocument) | ||||
|  | ||||
|     def delta_db_field(self, DocClass): | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) | ||||
|  | ||||
|         # Test it saves that data | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         doc.int_field = 1 | ||||
|         doc.dict_field = {'hello': 'world'} | ||||
|         doc.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.string_field, 'hello') | ||||
|         self.assertEqual(doc.int_field, 1) | ||||
|         self.assertEqual(doc.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|  | ||||
|     def test_delta_recursive_db_field(self): | ||||
|         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive_db_field(self, DocClass, EmbeddedClass): | ||||
|  | ||||
|         class Embedded(EmbeddedClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|             embedded_field = EmbeddedDocumentField(Embedded, | ||||
|                                     db_field='db_embedded_field') | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'db_string_field': 'hello', | ||||
|             'db_int_field': 1, | ||||
|             'db_dict_field': {'hello': 'world'}, | ||||
|             'db_list_field': ['1', 2, {'hello': 'world'}] | ||||
|         } | ||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field': embedded_delta}, {})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_dict_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({}, {'db_dict_field': 1})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({}, {'db_embedded_field.db_dict_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({}, {'db_list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({}, {'db_embedded_field.db_list_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field, []) | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'db_list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello', | ||||
|                 'db_dict_field': {'hello': 'world'}, | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|  | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'db_embedded_field.db_list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello', | ||||
|                 'db_dict_field': {'hello': 'world'}, | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], | ||||
|                              embedded_2[k]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = 'world' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field.2.db_string_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({'db_list_field.2.db_string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, | ||||
|              {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                         'world') | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'db_list_field': ['1', 2, { | ||||
|             '_cls': 'Embedded', | ||||
|             'db_string_field': 'hello world', | ||||
|             'db_int_field': 1, | ||||
|             'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'db_dict_field': {'hello': 'world'}}]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'db_embedded_field.db_list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello world', | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'db_dict_field': {'hello': 'world'}} | ||||
|             ]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                         'hello world') | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [2, {'hello': 'world'}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [2, {'hello': 'world'}, 1]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|             [2, {'hello': 'world'}, 1]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|             [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [1, 2, {}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field) | ||||
|         self.assertEqual(doc._delta(), ({}, | ||||
|             {'db_embedded_field.db_list_field.2.db_list_field': 1})) | ||||
|  | ||||
|     def test_delta_for_dynamic_documents(self): | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(name="James", age=34) | ||||
|         self.assertEqual(p._delta(), ( | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
|         p.doc = 123 | ||||
|         del(p.doc) | ||||
|         self.assertEqual(p._delta(), ( | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
|         p = Person() | ||||
|         p.name = "Dean" | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
|  | ||||
|         p.age = 24 | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p = Person.objects(age=22).get() | ||||
|         p.age = 24 | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p.save() | ||||
|         self.assertEqual(1, Person.objects(age=24).count()) | ||||
|  | ||||
|     def test_dynamic_delta(self): | ||||
|  | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||
|  | ||||
|     def test_delta_with_dbref_true(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) | ||||
|         employee.name = 'test' | ||||
|  | ||||
|         self.assertEqual(organization._get_changed_fields(), []) | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertEqual({}, updates) | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertTrue('employees' in updates) | ||||
|  | ||||
|     def test_delta_with_dbref_false(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) | ||||
|         employee.name = 'test' | ||||
|  | ||||
|         self.assertEqual(organization._get_changed_fields(), []) | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertEqual({}, updates) | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertTrue('employees' in updates) | ||||
|  | ||||
|     def test_nested_nested_fields_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||
|             name = StringField() | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs['a']['b'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										297
									
								
								tests/document/dynamic.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										297
									
								
								tests/document/dynamic.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,297 @@ | ||||
| import unittest | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("DynamicTest", ) | ||||
|  | ||||
|  | ||||
| class DynamicTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def test_simple_dynamic_document(self): | ||||
|         """Ensures simple dynamic documents are saved correctly""" | ||||
|  | ||||
|         p = self.Person() | ||||
|         p.name = "James" | ||||
|         p.age = 34 | ||||
|  | ||||
|         self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", | ||||
|                                         "age": 34}) | ||||
|         self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) | ||||
|         p.save() | ||||
|         self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) | ||||
|  | ||||
|         self.assertEqual(self.Person.objects.first().age, 34) | ||||
|  | ||||
|         # Confirm no changes to self.Person | ||||
|         self.assertFalse(hasattr(self.Person, 'age')) | ||||
|  | ||||
|     def test_change_scope_of_variable(self): | ||||
|         """Test changing the scope of a dynamic field has no adverse effects""" | ||||
|         p = self.Person() | ||||
|         p.name = "Dean" | ||||
|         p.misc = 22 | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
|  | ||||
|     def test_delete_dynamic_field(self): | ||||
|         """Test deleting a dynamic field works""" | ||||
|         self.Person.drop_collection() | ||||
|         p = self.Person() | ||||
|         p.name = "Dean" | ||||
|         p.misc = 22 | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
|         collection = self.db[self.Person._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) | ||||
|  | ||||
|         del(p.misc) | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         self.assertFalse(hasattr(p, 'misc')) | ||||
|  | ||||
|         obj = collection.find_one() | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) | ||||
|  | ||||
|     def test_dynamic_document_queries(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
|         p = self.Person() | ||||
|         p.name = "Dean" | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
|  | ||||
|         self.assertEqual(1, self.Person.objects(age=22).count()) | ||||
|         p = self.Person.objects(age=22) | ||||
|         p = p.get() | ||||
|         self.assertEqual(22, p.age) | ||||
|  | ||||
|     def test_complex_dynamic_document_queries(self): | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(name="test") | ||||
|         p.age = "ten" | ||||
|         p.save() | ||||
|  | ||||
|         p1 = Person(name="test1") | ||||
|         p1.age = "less then ten and a half" | ||||
|         p1.save() | ||||
|  | ||||
|         p2 = Person(name="test2") | ||||
|         p2.age = 10 | ||||
|         p2.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) | ||||
|         self.assertEqual(Person.objects(age__gte=10).count(), 1) | ||||
|  | ||||
|     def test_complex_data_lookups(self): | ||||
|         """Ensure you can query dynamic document dynamic fields""" | ||||
|         p = self.Person() | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
|  | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||
|  | ||||
|     def test_complex_embedded_document_validation(self): | ||||
|         """Ensure embedded dynamic documents may be validated""" | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             content = URLField() | ||||
|  | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|  | ||||
|         embedded_doc_1 = Embedded(content='http://mongoengine.org') | ||||
|         embedded_doc_1.validate() | ||||
|  | ||||
|         embedded_doc_2 = Embedded(content='this is not a url') | ||||
|         self.assertRaises(ValidationError, embedded_doc_2.validate) | ||||
|  | ||||
|         doc.embedded_field_1 = embedded_doc_1 | ||||
|         doc.embedded_field_2 = embedded_doc_2 | ||||
|         self.assertRaises(ValidationError, doc.validate) | ||||
|  | ||||
|     def test_inheritance(self): | ||||
|         """Ensure that dynamic document plays nice with inheritance""" | ||||
|         class Employee(self.Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         Employee.drop_collection() | ||||
|  | ||||
|         self.assertTrue('name' in Employee._fields) | ||||
|         self.assertTrue('salary' in Employee._fields) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          self.Person._get_collection_name()) | ||||
|  | ||||
|         joe_bloggs = Employee() | ||||
|         joe_bloggs.name = "Joe Bloggs" | ||||
|         joe_bloggs.salary = 10 | ||||
|         joe_bloggs.age = 20 | ||||
|         joe_bloggs.save() | ||||
|  | ||||
|         self.assertEqual(1, self.Person.objects(age=20).count()) | ||||
|         self.assertEqual(1, Employee.objects(age=20).count()) | ||||
|  | ||||
|         joe_bloggs = self.Person.objects.first() | ||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||
|  | ||||
|     def test_embedded_dynamic_document(self): | ||||
|         """Test dynamic embedded documents""" | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             pass | ||||
|  | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc.to_mongo(), { | ||||
|             "embedded_field": { | ||||
|                 "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
|                 "int_field": 1, | ||||
|                 "dict_field": {"hello": "world"}, | ||||
|                 "list_field": ['1', 2, {'hello': 'world'}] | ||||
|             } | ||||
|         }) | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field, | ||||
|                             ['1', 2, {'hello': 'world'}]) | ||||
|  | ||||
|     def test_complex_embedded_documents(self): | ||||
|         """Test complex dynamic embedded documents setups""" | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             pass | ||||
|  | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         embedded_1.list_field = ['1', 2, embedded_2] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc.to_mongo(), { | ||||
|             "embedded_field": { | ||||
|                 "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
|                 "int_field": 1, | ||||
|                 "dict_field": {"hello": "world"}, | ||||
|                 "list_field": ['1', 2, | ||||
|                     {"_cls": "Embedded", | ||||
|                     "string_field": "hello", | ||||
|                     "int_field": 1, | ||||
|                     "dict_field": {"hello": "world"}, | ||||
|                     "list_field": ['1', 2, {'hello': 'world'}]} | ||||
|                 ] | ||||
|             } | ||||
|         }) | ||||
|         doc.save() | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|  | ||||
|         embedded_field = doc.embedded_field.list_field[2] | ||||
|  | ||||
|         self.assertEqual(embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(embedded_field.string_field, "hello") | ||||
|         self.assertEqual(embedded_field.int_field, 1) | ||||
|         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(embedded_field.list_field, ['1', 2, | ||||
|                                                         {'hello': 'world'}]) | ||||
|  | ||||
|     def test_dynamic_and_embedded(self): | ||||
|         """Ensure embedded documents play nicely""" | ||||
|  | ||||
|         class Address(EmbeddedDocument): | ||||
|             city = StringField() | ||||
|  | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(name="Ross", address=Address(city="London")).save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.address.city = "Lundenne" | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.address = Address(city="Londinium") | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.age = 35 | ||||
|         person.save() | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										731
									
								
								tests/document/indexes.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										731
									
								
								tests/document/indexes.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,731 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import os | ||||
| import pymongo | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from datetime import datetime | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db, get_connection | ||||
|  | ||||
| __all__ = ("IndexesTest", ) | ||||
|  | ||||
|  | ||||
| class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_indexes_document(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
|         Documents | ||||
|         """ | ||||
|         self._index_test(Document) | ||||
|  | ||||
|     def test_indexes_dynamic_document(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
|         Dynamic Documents | ||||
|         """ | ||||
|         self._index_test(DynamicDocument) | ||||
|  | ||||
|     def _index_test(self, InheritFrom): | ||||
|  | ||||
|         class BlogPost(InheritFrom): | ||||
|             date = DateTimeField(db_field='addDate', default=datetime.now) | ||||
|             category = StringField() | ||||
|             tags = ListField(StringField()) | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     '-date', | ||||
|                     'tags', | ||||
|                     ('category', '-date') | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         expected_specs = [{'fields': [('addDate', -1)]}, | ||||
|                           {'fields': [('tags', 1)]}, | ||||
|                           {'fields': [('category', 1), ('addDate', -1)]}] | ||||
|         self.assertEqual(expected_specs, BlogPost._meta['index_specs']) | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date', 'tags', ('cat', 'date') | ||||
|         self.assertEqual(len(info), 4) | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         for expected in expected_specs: | ||||
|             self.assertTrue(expected['fields'] in info) | ||||
|  | ||||
|     def _index_test_inheritance(self, InheritFrom): | ||||
|  | ||||
|         class BlogPost(InheritFrom): | ||||
|             date = DateTimeField(db_field='addDate', default=datetime.now) | ||||
|             category = StringField() | ||||
|             tags = ListField(StringField()) | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     '-date', | ||||
|                     'tags', | ||||
|                     ('category', '-date') | ||||
|                 ], | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]}, | ||||
|                           {'fields': [('_cls', 1), ('tags', 1)]}, | ||||
|                           {'fields': [('_cls', 1), ('category', 1), | ||||
|                                       ('addDate', -1)]}] | ||||
|         self.assertEqual(expected_specs, BlogPost._meta['index_specs']) | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date', 'tags', ('cat', 'date') | ||||
|         # NB: there is no index on _cls by itself, since | ||||
|         # the indices on -date and tags will both contain | ||||
|         # _cls as first element in the key | ||||
|         self.assertEqual(len(info), 4) | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         for expected in expected_specs: | ||||
|             self.assertTrue(expected['fields'] in info) | ||||
|  | ||||
|         class ExtendedBlogPost(BlogPost): | ||||
|             title = StringField() | ||||
|             meta = {'indexes': ['title']} | ||||
|  | ||||
|         expected_specs.append({'fields': [('_cls', 1), ('title', 1)]}) | ||||
|         self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs']) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         ExtendedBlogPost.ensure_indexes() | ||||
|         info = ExtendedBlogPost.objects._collection.index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         for expected in expected_specs: | ||||
|             self.assertTrue(expected['fields'] in info) | ||||
|  | ||||
|     def test_indexes_document_inheritance(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
|         Documents | ||||
|         """ | ||||
|         self._index_test_inheritance(Document) | ||||
|  | ||||
|     def test_indexes_dynamic_document_inheritance(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
|         Dynamic Documents | ||||
|         """ | ||||
|         self._index_test_inheritance(DynamicDocument) | ||||
|  | ||||
|     def test_inherited_index(self): | ||||
|         """Ensure index specs are inhertited correctly""" | ||||
|  | ||||
|         class A(Document): | ||||
|             title = StringField() | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                         { | ||||
|                         'fields': ('title',), | ||||
|                         }, | ||||
|                 ], | ||||
|                 'allow_inheritance': True, | ||||
|                 } | ||||
|  | ||||
|         class B(A): | ||||
|             description = StringField() | ||||
|  | ||||
|         self.assertEqual(A._meta['index_specs'], B._meta['index_specs']) | ||||
|         self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], | ||||
|                          A._meta['index_specs']) | ||||
|  | ||||
|     def test_index_no_cls(self): | ||||
|         """Ensure index specs are inhertited correctly""" | ||||
|  | ||||
|         class A(Document): | ||||
|             title = StringField() | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                         {'fields': ('title',), 'cls': False}, | ||||
|                 ], | ||||
|                 'allow_inheritance': True, | ||||
|                 'index_cls': False | ||||
|                 } | ||||
|  | ||||
|         self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields']) | ||||
|         A._get_collection().drop_indexes() | ||||
|         A.ensure_indexes() | ||||
|         info = A._get_collection().index_information() | ||||
|         self.assertEqual(len(info.keys()), 2) | ||||
|  | ||||
|     def test_build_index_spec_is_not_destructive(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             keywords = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': ['keywords'], | ||||
|                 'allow_inheritance': False | ||||
|             } | ||||
|  | ||||
|         self.assertEqual(MyDoc._meta['index_specs'], | ||||
|                          [{'fields': [('keywords', 1)]}]) | ||||
|  | ||||
|         # Force index creation | ||||
|         MyDoc.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(MyDoc._meta['index_specs'], | ||||
|                         [{'fields': [('keywords', 1)]}]) | ||||
|  | ||||
|     def test_embedded_document_index_meta(self): | ||||
|         """Ensure that embedded document indexes are created explicitly | ||||
|         """ | ||||
|         class Rank(EmbeddedDocument): | ||||
|             title = StringField(required=True) | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField(required=True) | ||||
|             rank = EmbeddedDocumentField(Rank, required=False) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     'rank.title', | ||||
|                 ], | ||||
|                 'allow_inheritance': False | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([{'fields': [('rank.title', 1)]}], | ||||
|                         Person._meta['index_specs']) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(Person.objects) | ||||
|         info = Person.objects._collection.index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('rank.title', 1)] in info) | ||||
|  | ||||
|     def test_explicit_geo2d_index(self): | ||||
|         """Ensure that geo2d indexes work when created via meta[indexes] | ||||
|         """ | ||||
|         class Place(Document): | ||||
|             location = DictField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': [ | ||||
|                     '*location.point', | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([{'fields': [('location.point', '2d')]}], | ||||
|                          Place._meta['index_specs']) | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('location.point', '2d')] in info) | ||||
|  | ||||
|     def test_explicit_geo2d_index_embedded(self): | ||||
|         """Ensure that geo2d indexes work when created via meta[indexes] | ||||
|         """ | ||||
|         class EmbeddedLocation(EmbeddedDocument): | ||||
|             location = DictField() | ||||
|  | ||||
|         class Place(Document): | ||||
|             current = DictField(field=EmbeddedDocumentField('EmbeddedLocation')) | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': [ | ||||
|                     '*current.location.point', | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([{'fields': [('current.location.point', '2d')]}], | ||||
|                          Place._meta['index_specs']) | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         self.assertTrue([('current.location.point', '2d')] in info) | ||||
|  | ||||
|     def test_dictionary_indexes(self): | ||||
|         """Ensure that indexes are used when meta[indexes] contains | ||||
|         dictionaries instead of lists. | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
|             date = DateTimeField(db_field='addDate', default=datetime.now) | ||||
|             category = StringField() | ||||
|             tags = ListField(StringField()) | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     {'fields': ['-date'], 'unique': True, 'sparse': True}, | ||||
|                 ], | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, | ||||
|                           'sparse': True}], | ||||
|                          BlogPost._meta['index_specs']) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date' | ||||
|         self.assertEqual(len(info), 2) | ||||
|  | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(BlogPost.objects) | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         info = [(value['key'], | ||||
|                  value.get('unique', False), | ||||
|                  value.get('sparse', False)) | ||||
|                 for key, value in info.iteritems()] | ||||
|         self.assertTrue(([('addDate', -1)], True, True) in info) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_abstract_index_inheritance(self): | ||||
|  | ||||
|         class UserBase(Document): | ||||
|             user_guid = StringField(required=True) | ||||
|             meta = { | ||||
|                 'abstract': True, | ||||
|                 'indexes': ['user_guid'], | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class Person(UserBase): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': ['name'], | ||||
|             } | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(name="test", user_guid='123').save() | ||||
|  | ||||
|         self.assertEqual(1, Person.objects.count()) | ||||
|         info = Person.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), | ||||
|                          ['_cls_1_name_1', '_cls_1_user_guid_1', '_id_']) | ||||
|  | ||||
|     def test_disable_index_creation(self): | ||||
|         """Tests setting auto_create_index to False on the connection will | ||||
|         disable any index generation. | ||||
|         """ | ||||
|         class User(Document): | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['user_guid'], | ||||
|                 'auto_create_index': False | ||||
|             } | ||||
|             user_guid = StringField(required=True) | ||||
|  | ||||
|         class MongoUser(User): | ||||
|             pass | ||||
|  | ||||
|         User.drop_collection() | ||||
|  | ||||
|         User(user_guid='123').save() | ||||
|         MongoUser(user_guid='123').save() | ||||
|  | ||||
|         self.assertEqual(2, User.objects.count()) | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(info.keys(), ['_id_']) | ||||
|  | ||||
|         User.ensure_indexes() | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) | ||||
|         User.drop_collection() | ||||
|  | ||||
|     def test_embedded_document_index(self): | ||||
|         """Tests settings an index on an embedded document | ||||
|         """ | ||||
|         class Date(EmbeddedDocument): | ||||
|             year = IntField(db_field='yr') | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             date = EmbeddedDocumentField(Date) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     '-date.year' | ||||
|                 ], | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1']) | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_list_embedded_document_index(self): | ||||
|         """Ensure list embedded documents can be indexed | ||||
|         """ | ||||
|         class Tag(EmbeddedDocument): | ||||
|             name = StringField(db_field='tag') | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             tags = ListField(EmbeddedDocumentField(Tag)) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     'tags.name' | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # we don't use _cls in with list fields by default | ||||
|         self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1']) | ||||
|  | ||||
|         post1 = BlogPost(title="Embedded Indexes tests in place", | ||||
|                          tags=[Tag(name="about"), Tag(name="time")]) | ||||
|         post1.save() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_recursive_embedded_objects_dont_break_indexes(self): | ||||
|  | ||||
|         class RecursiveObject(EmbeddedDocument): | ||||
|             obj = EmbeddedDocumentField('self') | ||||
|  | ||||
|         class RecursiveDocument(Document): | ||||
|             recursive_obj = EmbeddedDocumentField(RecursiveObject) | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         RecursiveDocument.ensure_indexes() | ||||
|         info = RecursiveDocument._get_collection().index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_']) | ||||
|  | ||||
|     def test_covered_index(self): | ||||
|         """Ensure that covered indexes can be used | ||||
|         """ | ||||
|  | ||||
|         class Test(Document): | ||||
|             a = IntField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': ['a'], | ||||
|                 'allow_inheritance': False | ||||
|             } | ||||
|  | ||||
|         Test.drop_collection() | ||||
|  | ||||
|         obj = Test(a=1) | ||||
|         obj.save() | ||||
|  | ||||
|         # Need to be explicit about covered indexes as mongoDB doesn't know if | ||||
|         # the documents returned might have more keys in that here. | ||||
|         query_plan = Test.objects(id=obj.id).exclude('a').explain() | ||||
|         self.assertFalse(query_plan['indexOnly']) | ||||
|  | ||||
|         query_plan = Test.objects(id=obj.id).only('id').explain() | ||||
|         self.assertTrue(query_plan['indexOnly']) | ||||
|  | ||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||
|         self.assertTrue(query_plan['indexOnly']) | ||||
|  | ||||
|     def test_index_on_id(self): | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     ['categories', 'id'] | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|             title = StringField(required=True) | ||||
|             description = StringField(required=True) | ||||
|             categories = ListField() | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         indexes = BlogPost.objects._collection.index_information() | ||||
|         self.assertEqual(indexes['categories_1__id_1']['key'], | ||||
|                                  [('categories', 1), ('_id', 1)]) | ||||
|  | ||||
|     def test_hint(self): | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             tags = ListField(StringField()) | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     'tags', | ||||
|                 ], | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         for i in xrange(0, 10): | ||||
|             tags = [("tag %i" % n) for n in xrange(0, i % 2)] | ||||
|             BlogPost(tags=tags).save() | ||||
|  | ||||
|         self.assertEqual(BlogPost.objects.count(), 10) | ||||
|         self.assertEqual(BlogPost.objects.hint().count(), 10) | ||||
|         self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) | ||||
|  | ||||
|         self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) | ||||
|  | ||||
|         def invalid_index(): | ||||
|             BlogPost.objects.hint('tags') | ||||
|         self.assertRaises(TypeError, invalid_index) | ||||
|  | ||||
|         def invalid_index_2(): | ||||
|             return BlogPost.objects.hint(('tags', 1)) | ||||
|         self.assertRaises(Exception, invalid_index_2) | ||||
|  | ||||
|     def test_unique(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields. | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             slug = StringField(unique=True) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost(title='test1', slug='test') | ||||
|         post1.save() | ||||
|  | ||||
|         # Two posts with the same slug is not allowed | ||||
|         post2 = BlogPost(title='test2', slug='test') | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|  | ||||
|         # Ensure backwards compatibilty for errors | ||||
|         self.assertRaises(OperationError, post2.save) | ||||
|  | ||||
|     def test_unique_with(self): | ||||
|         """Ensure that unique_with constraints are applied to fields. | ||||
|         """ | ||||
|         class Date(EmbeddedDocument): | ||||
|             year = IntField(db_field='yr') | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             date = EmbeddedDocumentField(Date) | ||||
|             slug = StringField(unique_with='date.year') | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') | ||||
|         post1.save() | ||||
|  | ||||
|         # day is different so won't raise exception | ||||
|         post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') | ||||
|         post2.save() | ||||
|  | ||||
|         # Now there will be two docs with the same slug and the same day: fail | ||||
|         post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') | ||||
|         self.assertRaises(OperationError, post3.save) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_unique_embedded_document(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on embedded documents. | ||||
|         """ | ||||
|         class SubDocument(EmbeddedDocument): | ||||
|             year = IntField(db_field='yr') | ||||
|             slug = StringField(unique=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             sub = EmbeddedDocumentField(SubDocument) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost(title='test1', | ||||
|                          sub=SubDocument(year=2009, slug="test")) | ||||
|         post1.save() | ||||
|  | ||||
|         # sub.slug is different so won't raise exception | ||||
|         post2 = BlogPost(title='test2', | ||||
|                          sub=SubDocument(year=2010, slug='another-slug')) | ||||
|         post2.save() | ||||
|  | ||||
|         # Now there will be two docs with the same sub.slug | ||||
|         post3 = BlogPost(title='test3', | ||||
|                          sub=SubDocument(year=2010, slug='test')) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_unique_with_embedded_document_and_embedded_unique(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on | ||||
|         embedded documents.  And work with unique_with as well. | ||||
|         """ | ||||
|         class SubDocument(EmbeddedDocument): | ||||
|             year = IntField(db_field='yr') | ||||
|             slug = StringField(unique=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField(unique_with='sub.year') | ||||
|             sub = EmbeddedDocumentField(SubDocument) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post1 = BlogPost(title='test1', | ||||
|                          sub=SubDocument(year=2009, slug="test")) | ||||
|         post1.save() | ||||
|  | ||||
|         # sub.slug is different so won't raise exception | ||||
|         post2 = BlogPost(title='test2', | ||||
|                          sub=SubDocument(year=2010, slug='another-slug')) | ||||
|         post2.save() | ||||
|  | ||||
|         # Now there will be two docs with the same sub.slug | ||||
|         post3 = BlogPost(title='test3', | ||||
|                          sub=SubDocument(year=2010, slug='test')) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|  | ||||
|         # Now there will be two docs with the same title and year | ||||
|         post3 = BlogPost(title='test1', | ||||
|                          sub=SubDocument(year=2009, slug='test-1')) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_ttl_indexes(self): | ||||
|  | ||||
|         class Log(Document): | ||||
|             created = DateTimeField(default=datetime.now) | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3: | ||||
|             raise SkipTest('pymongo needs to be 2.3 or higher for this test') | ||||
|  | ||||
|         connection = get_connection() | ||||
|         version_array = connection.server_info()['versionArray'] | ||||
|         if version_array[0] < 2 and version_array[1] < 2: | ||||
|             raise SkipTest('MongoDB needs to be 2.2 or higher for this test') | ||||
|  | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(Log.objects) | ||||
|         info = Log.objects._collection.index_information() | ||||
|         self.assertEqual(3600, | ||||
|                          info['created_1']['expireAfterSeconds']) | ||||
|  | ||||
|     def test_unique_and_indexes(self): | ||||
|         """Ensure that 'unique' constraints aren't overridden by | ||||
|         meta.indexes. | ||||
|         """ | ||||
|         class Customer(Document): | ||||
|             cust_id = IntField(unique=True, required=True) | ||||
|             meta = { | ||||
|                 'indexes': ['cust_id'], | ||||
|                 'allow_inheritance': False, | ||||
|             } | ||||
|  | ||||
|         Customer.drop_collection() | ||||
|         cust = Customer(cust_id=1) | ||||
|         cust.save() | ||||
|  | ||||
|         cust_dupe = Customer(cust_id=1) | ||||
|         try: | ||||
|             cust_dupe.save() | ||||
|             raise AssertionError("We saved a dupe!") | ||||
|         except NotUniqueError: | ||||
|             pass | ||||
|         Customer.drop_collection() | ||||
|  | ||||
|     def test_unique_and_primary(self): | ||||
|         """If you set a field as primary, then unexpected behaviour can occur. | ||||
|         You won't create a duplicate but you will update an existing document. | ||||
|         """ | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField(primary_key=True, unique=True) | ||||
|             password = StringField() | ||||
|  | ||||
|         User.drop_collection() | ||||
|  | ||||
|         user = User(name='huangz', password='secret') | ||||
|         user.save() | ||||
|  | ||||
|         user = User(name='huangz', password='secret2') | ||||
|         user.save() | ||||
|  | ||||
|         self.assertEqual(User.objects.count(), 1) | ||||
|         self.assertEqual(User.objects.get().password, 'secret2') | ||||
|  | ||||
|         User.drop_collection() | ||||
|  | ||||
|     def test_index_with_pk(self): | ||||
|         """Ensure you can use `pk` as part of a query""" | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             comment_id = IntField(required=True) | ||||
|  | ||||
|         try: | ||||
|             class BlogPost(Document): | ||||
|                 comments = EmbeddedDocumentField(Comment) | ||||
|                 meta = {'indexes': [ | ||||
|                             {'fields': ['pk', 'comments.comment_id'], | ||||
|                              'unique': True}]} | ||||
|         except UnboundLocalError: | ||||
|             self.fail('Unbound local error at index + pk definition') | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         info = [value['key'] for key, value in info.iteritems()] | ||||
|         index_item = [('_id', 1), ('comments.comment_id', 1)] | ||||
|         self.assertTrue(index_item in info) | ||||
|  | ||||
|     def test_compound_key_embedded(self): | ||||
|  | ||||
|         class CompoundKey(EmbeddedDocument): | ||||
|             name = StringField(required=True) | ||||
|             term = StringField(required=True) | ||||
|  | ||||
|         class Report(Document): | ||||
|             key = EmbeddedDocumentField(CompoundKey, primary_key=True) | ||||
|             text = StringField() | ||||
|  | ||||
|         Report.drop_collection() | ||||
|  | ||||
|         my_key = CompoundKey(name="n", term="ok") | ||||
|         report = Report(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, | ||||
|                          report.to_mongo()) | ||||
|         self.assertEqual(report, Report.objects.get(pk=my_key)) | ||||
|  | ||||
|     def test_compound_key_dictfield(self): | ||||
|  | ||||
|         class Report(Document): | ||||
|             key = DictField(primary_key=True) | ||||
|             text = StringField() | ||||
|  | ||||
|         Report.drop_collection() | ||||
|  | ||||
|         my_key = {"name": "n", "term": "ok"} | ||||
|         report = Report(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, | ||||
|                          report.to_mongo()) | ||||
|         self.assertEqual(report, Report.objects.get(pk=my_key)) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										449
									
								
								tests/document/inheritance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										449
									
								
								tests/document/inheritance.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,449 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from datetime import datetime | ||||
|  | ||||
| from tests.fixtures import Base | ||||
|  | ||||
| from mongoengine import Document, EmbeddedDocument, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import (BooleanField, GenericReferenceField, | ||||
|                                 IntField, StringField) | ||||
|  | ||||
| __all__ = ('InheritanceTest', ) | ||||
|  | ||||
|  | ||||
| class InheritanceTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_superclasses(self): | ||||
|         """Ensure that the correct list of superclasses is assembled. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Fish._superclasses, ('Animal',)) | ||||
|         self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) | ||||
|         self.assertEqual(Mammal._superclasses, ('Animal',)) | ||||
|         self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) | ||||
|         self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) | ||||
|  | ||||
|     def test_external_superclasses(self): | ||||
|         """Ensure that the correct list of super classes is assembled when | ||||
|         importing part of the model. | ||||
|         """ | ||||
|         class Animal(Base): pass | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ('Base', )) | ||||
|         self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) | ||||
|         self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', | ||||
|                                                'Base.Animal.Fish')) | ||||
|         self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) | ||||
|         self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', | ||||
|                                              'Base.Animal.Mammal')) | ||||
|         self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', | ||||
|                                                'Base.Animal.Mammal')) | ||||
|  | ||||
|     def test_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', | ||||
|                                          'Animal.Fish', | ||||
|                                          'Animal.Fish.Guppy', | ||||
|                                          'Animal.Mammal', | ||||
|                                          'Animal.Mammal.Dog', | ||||
|                                          'Animal.Mammal.Human')) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', | ||||
|                                        'Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Mammal._subclasses, ('Animal.Mammal', | ||||
|                                          'Animal.Mammal.Dog', | ||||
|                                          'Animal.Mammal.Human')) | ||||
|         self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) | ||||
|  | ||||
|     def test_external_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled when importing part of the model. | ||||
|         """ | ||||
|         class Animal(Base): pass | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._subclasses, ('Base.Animal', | ||||
|                                               'Base.Animal.Fish', | ||||
|                                               'Base.Animal.Fish.Guppy', | ||||
|                                               'Base.Animal.Mammal', | ||||
|                                               'Base.Animal.Mammal.Dog', | ||||
|                                               'Base.Animal.Mammal.Human')) | ||||
|         self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', | ||||
|                                             'Base.Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', | ||||
|                                               'Base.Animal.Mammal.Dog', | ||||
|                                               'Base.Animal.Mammal.Human')) | ||||
|         self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) | ||||
|  | ||||
|     def test_dynamic_declarations(self): | ||||
|         """Test that declaring an extra class updates meta data""" | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal',)) | ||||
|  | ||||
|         # Test dynamically adding a class changes the meta data | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) | ||||
|  | ||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish',)) | ||||
|  | ||||
|         # Test dynamically adding an inherited class changes the meta data | ||||
|         class Pike(Fish): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', | ||||
|                                               'Animal.Fish.Pike')) | ||||
|  | ||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) | ||||
|  | ||||
|         self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) | ||||
|         self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) | ||||
|  | ||||
|     def test_inheritance_meta_data(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          Person._get_collection_name()) | ||||
|  | ||||
|     def test_inheritance_to_mongo_keys(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), | ||||
|                          ['_cls', 'name', 'age']) | ||||
|         self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), | ||||
|                          ['_cls', 'name', 'age', 'salary']) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          Person._get_collection_name()) | ||||
|  | ||||
|     def test_indexes_and_multiple_inheritance(self): | ||||
|         """ Ensure that all of the indexes are created for a document with | ||||
|         multiple inheritance. | ||||
|         """ | ||||
|  | ||||
|         class A(Document): | ||||
|             a = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['a'] | ||||
|             } | ||||
|  | ||||
|         class B(Document): | ||||
|             b = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['b'] | ||||
|             } | ||||
|  | ||||
|         class C(A, B): | ||||
|             pass | ||||
|  | ||||
|         A.drop_collection() | ||||
|         B.drop_collection() | ||||
|         C.drop_collection() | ||||
|  | ||||
|         C.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual( | ||||
|             sorted([idx['key'] for idx in C._get_collection().index_information().values()]), | ||||
|             sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) | ||||
|         ) | ||||
|  | ||||
|     def test_polymorphic_queries(self): | ||||
|         """Ensure that the correct subclasses are returned from a query | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|  | ||||
|         Animal().save() | ||||
|         Fish().save() | ||||
|         Mammal().save() | ||||
|         Dog().save() | ||||
|         Human().save() | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Animal.objects] | ||||
|         self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Mammal.objects] | ||||
|         self.assertEqual(classes, [Mammal, Dog, Human]) | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Human.objects] | ||||
|         self.assertEqual(classes, [Human]) | ||||
|  | ||||
|     def test_allow_inheritance(self): | ||||
|         """Ensure that inheritance may be disabled on simple classes and that | ||||
|         _cls and _subclasses will not be used. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_dog_class(): | ||||
|             class Dog(Animal): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_dog_class) | ||||
|  | ||||
|         # Check that _cls etc aren't present on simple documents | ||||
|         dog = Animal(name='dog').save() | ||||
|         self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) | ||||
|  | ||||
|         collection = self.db[Animal._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         self.assertFalse('_cls' in obj) | ||||
|  | ||||
|     def test_cant_turn_off_inheritance_on_subclass(self): | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|             class Mammal(Animal): | ||||
|                 meta = {'allow_inheritance': False} | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|     def test_allow_inheritance_abstract_document(self): | ||||
|         """Ensure that abstract documents can set inheritance rules and that | ||||
|         _cls will not be used. | ||||
|         """ | ||||
|         class FinalDocument(Document): | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class Animal(FinalDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|             class Mammal(Animal): | ||||
|                 pass | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|         # Check that _cls isn't present in simple documents | ||||
|         doc = Animal(name='dog') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance | ||||
|         """ | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|  | ||||
|         def create_special_comment(): | ||||
|             class SpecialComment(Comment): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_special_comment) | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertTrue('_cls' in doc.to_mongo()) | ||||
|  | ||||
|     def test_document_inheritance(self): | ||||
|         """Ensure mutliple inheritance of abstract documents | ||||
|         """ | ||||
|         class DateCreatedDocument(Document): | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'abstract': True, | ||||
|             } | ||||
|  | ||||
|         class DateUpdatedDocument(Document): | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'abstract': True, | ||||
|             } | ||||
|  | ||||
|         try: | ||||
|             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||
|                 pass | ||||
|         except: | ||||
|             self.assertTrue(False, "Couldn't create MyDocument class") | ||||
|  | ||||
|     def test_abstract_documents(self): | ||||
|         """Ensure that a document superclass can be marked as abstract | ||||
|         thereby not using it as the name for the collection.""" | ||||
|  | ||||
|         defaults = {'index_background': True, | ||||
|                     'index_drop_dups': True, | ||||
|                     'index_opts': {'hello': 'world'}, | ||||
|                     'allow_inheritance': True, | ||||
|                     'queryset_class': 'QuerySet', | ||||
|                     'db_alias': 'myDB', | ||||
|                     'shard_key': ('hello', 'world')} | ||||
|  | ||||
|         meta_settings = {'abstract': True} | ||||
|         meta_settings.update(defaults) | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = meta_settings | ||||
|  | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             meta = {'abstract': True} | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         for k, v in defaults.iteritems(): | ||||
|             for cls in [Animal, Fish, Guppy]: | ||||
|                 self.assertEqual(cls._meta[k], v) | ||||
|  | ||||
|         self.assertFalse('collection' in Animal._meta) | ||||
|         self.assertFalse('collection' in Mammal._meta) | ||||
|  | ||||
|         self.assertEqual(Animal._get_collection_name(), None) | ||||
|         self.assertEqual(Mammal._get_collection_name(), None) | ||||
|  | ||||
|         self.assertEqual(Fish._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Guppy._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Human._get_collection_name(), 'human') | ||||
|  | ||||
|         def create_bad_abstract(): | ||||
|             class EvilHuman(Human): | ||||
|                 evil = BooleanField(default=True) | ||||
|                 meta = {'abstract': True} | ||||
|         self.assertRaises(ValueError, create_bad_abstract) | ||||
|  | ||||
|     def test_inherited_collections(self): | ||||
|         """Ensure that subclassed documents don't override parents' | ||||
|         collections | ||||
|         """ | ||||
|  | ||||
|         class Drink(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Drinker(Document): | ||||
|             drink = GenericReferenceField() | ||||
|  | ||||
|         try: | ||||
|             warnings.simplefilter("error") | ||||
|  | ||||
|             class AcloholicDrink(Drink): | ||||
|                 meta = {'collection': 'booze'} | ||||
|  | ||||
|         except SyntaxWarning: | ||||
|             warnings.simplefilter("ignore") | ||||
|  | ||||
|             class AlcoholicDrink(Drink): | ||||
|                 meta = {'collection': 'booze'} | ||||
|  | ||||
|         else: | ||||
|             raise AssertionError("SyntaxWarning should be triggered") | ||||
|  | ||||
|         warnings.resetwarnings() | ||||
|  | ||||
|         Drink.drop_collection() | ||||
|         AlcoholicDrink.drop_collection() | ||||
|         Drinker.drop_collection() | ||||
|  | ||||
|         red_bull = Drink(name='Red Bull') | ||||
|         red_bull.save() | ||||
|  | ||||
|         programmer = Drinker(drink=red_bull) | ||||
|         programmer.save() | ||||
|  | ||||
|         beer = AlcoholicDrink(name='Beer') | ||||
|         beer.save() | ||||
|         real_person = Drinker(drink=beer) | ||||
|         real_person.save() | ||||
|  | ||||
|         self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) | ||||
|         self.assertEqual(Drinker.objects[1].drink.name, beer.name) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										2456
									
								
								tests/document/instance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2456
									
								
								tests/document/instance.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										85
									
								
								tests/document/json_serialisation.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								tests/document/json_serialisation.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from datetime import datetime | ||||
| from bson import ObjectId | ||||
|  | ||||
| import pymongo | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| __all__ = ("TestJson",) | ||||
|  | ||||
|  | ||||
| class TestJson(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|     def test_json_simple(self): | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             string = StringField() | ||||
|  | ||||
|         class Doc(Document): | ||||
|             string = StringField() | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
|  | ||||
|         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) | ||||
|  | ||||
|         doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) | ||||
|         expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" | ||||
|         self.assertEqual(doc_json, expected_json) | ||||
|  | ||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||
|  | ||||
|     def test_json_complex(self): | ||||
|  | ||||
|         if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: | ||||
|             raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") | ||||
|  | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             pass | ||||
|  | ||||
|         class Simple(Document): | ||||
|             pass | ||||
|  | ||||
|         class Doc(Document): | ||||
|             string_field = StringField(default='1') | ||||
|             int_field = IntField(default=1) | ||||
|             float_field = FloatField(default=1.1) | ||||
|             boolean_field = BooleanField(default=True) | ||||
|             datetime_field = DateTimeField(default=datetime.now) | ||||
|             embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, | ||||
|                                         default=lambda: EmbeddedDoc()) | ||||
|             list_field = ListField(default=lambda: [1, 2, 3]) | ||||
|             dict_field = DictField(default=lambda: {"hello": "world"}) | ||||
|             objectid_field = ObjectIdField(default=ObjectId) | ||||
|             reference_field = ReferenceField(Simple, default=lambda: | ||||
|                                                         Simple().save()) | ||||
|             map_field = MapField(IntField(), default=lambda: {"simple": 1}) | ||||
|             decimal_field = DecimalField(default=1.0) | ||||
|             complex_datetime_field = ComplexDateTimeField(default=datetime.now) | ||||
|             url_field = URLField(default="http://mongoengine.org") | ||||
|             dynamic_field = DynamicField(default=1) | ||||
|             generic_reference_field = GenericReferenceField( | ||||
|                                             default=lambda: Simple().save()) | ||||
|             sorted_list_field = SortedListField(IntField(), | ||||
|                                                 default=lambda: [1, 2, 3]) | ||||
|             email_field = EmailField(default="ross@example.com") | ||||
|             geo_point_field = GeoPointField(default=lambda: [1, 2]) | ||||
|             sequence_field = SequenceField() | ||||
|             uuid_field = UUIDField(default=uuid.uuid4) | ||||
|             generic_embedded_document_field = GenericEmbeddedDocumentField( | ||||
|                                         default=lambda: EmbeddedDoc()) | ||||
|  | ||||
|         doc = Doc() | ||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										146
									
								
								tests/document/validation.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										146
									
								
								tests/document/validation.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,146 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| from datetime import datetime | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| __all__ = ("ValidatorErrorTest",) | ||||
|  | ||||
|  | ||||
| class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|     def test_to_dict(self): | ||||
|         """Ensure a ValidationError handles error to_dict correctly. | ||||
|         """ | ||||
|         error = ValidationError('root') | ||||
|         self.assertEqual(error.to_dict(), {}) | ||||
|  | ||||
|         # 1st level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st'), } | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertEqual(error.to_dict()['1st'], 'bad 1st') | ||||
|  | ||||
|         # 2nd level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd'), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||
|  | ||||
|         # moar levels | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd', errors={ | ||||
|                 '3rd': ValidationError('bad 3rd', errors={ | ||||
|                     '4th': ValidationError('Inception'), | ||||
|                 }), | ||||
|             }), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||
|         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||
|                          'Inception') | ||||
|  | ||||
|         self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") | ||||
|  | ||||
|     def test_model_validation(self): | ||||
|  | ||||
|         class User(Document): | ||||
|             username = StringField(primary_key=True) | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|         try: | ||||
|             User().validate() | ||||
|         except ValidationError, e: | ||||
|             self.assertTrue("User:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'username': 'Field is required', | ||||
|                 'name': 'Field is required'}) | ||||
|  | ||||
|         user = User(username="RossC0", name="Ross").save() | ||||
|         user.name = None | ||||
|         try: | ||||
|             user.save() | ||||
|         except ValidationError, e: | ||||
|             self.assertTrue("User:RossC0" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'name': 'Field is required'}) | ||||
|  | ||||
|     def test_fields_rewrite(self): | ||||
|         class BasePerson(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|             meta = {'abstract': True} | ||||
|  | ||||
|         class Person(BasePerson): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|         p = Person(age=15) | ||||
|         self.assertRaises(ValidationError, p.validate) | ||||
|  | ||||
|     def test_embedded_document_validation(self): | ||||
|         """Ensure that embedded documents may be validated. | ||||
|         """ | ||||
|         class Comment(EmbeddedDocument): | ||||
|             date = DateTimeField() | ||||
|             content = StringField(required=True) | ||||
|  | ||||
|         comment = Comment() | ||||
|         self.assertRaises(ValidationError, comment.validate) | ||||
|  | ||||
|         comment.content = 'test' | ||||
|         comment.validate() | ||||
|  | ||||
|         comment.date = 4 | ||||
|         self.assertRaises(ValidationError, comment.validate) | ||||
|  | ||||
|         comment.date = datetime.now() | ||||
|         comment.validate() | ||||
|         self.assertEqual(comment._instance, None) | ||||
|  | ||||
|     def test_embedded_db_field_validate(self): | ||||
|  | ||||
|         class SubDoc(EmbeddedDocument): | ||||
|             val = IntField(required=True) | ||||
|  | ||||
|         class Doc(Document): | ||||
|             id = StringField(primary_key=True) | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') | ||||
|  | ||||
|         try: | ||||
|             Doc(id="bad").validate() | ||||
|         except ValidationError, e: | ||||
|             self.assertTrue("SubDoc:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         Doc(id="test", e=SubDoc(val=15)).save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         keys = doc._data.keys() | ||||
|         self.assertEqual(2, len(keys)) | ||||
|         self.assertTrue('e' in keys) | ||||
|         self.assertTrue('id' in keys) | ||||
|  | ||||
|         doc.e.val = "OK" | ||||
|         try: | ||||
|             doc.save() | ||||
|         except ValidationError, e: | ||||
|             self.assertTrue("Doc:test" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										3
									
								
								tests/fields/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								tests/fields/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| from fields import * | ||||
| from file_tests import * | ||||
| from geo import * | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										486
									
								
								tests/fields/file_tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										486
									
								
								tests/fields/file_tests.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,486 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import copy | ||||
| import os | ||||
| import unittest | ||||
| import tempfile | ||||
|  | ||||
| import gridfs | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.python_support import PY3, b, StringIO | ||||
|  | ||||
| try: | ||||
|     from PIL import Image | ||||
|     HAS_PIL = True | ||||
| except ImportError: | ||||
|     HAS_PIL = False | ||||
|  | ||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | ||||
|  | ||||
|  | ||||
| class FileTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         self.db.drop_collection('fs.files') | ||||
|         self.db.drop_collection('fs.chunks') | ||||
|  | ||||
|     def test_file_field_optional(self): | ||||
|         # Make sure FileField is optional and not required | ||||
|         class DemoFile(Document): | ||||
|             the_file = FileField() | ||||
|         DemoFile.objects.create() | ||||
|  | ||||
|     def test_file_fields(self): | ||||
|         """Ensure that file fields can be written to and their data retrieved | ||||
|         """ | ||||
|  | ||||
|         class PutFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
|         putfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         putfile.save() | ||||
|  | ||||
|         result = PutFile.objects.first() | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>") | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete()  # Remove file from GridFS | ||||
|         PutFile.objects.delete() | ||||
|  | ||||
|         # Ensure file-like objects are stored | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         putfile = PutFile() | ||||
|         putstring = StringIO() | ||||
|         putstring.write(text) | ||||
|         putstring.seek(0) | ||||
|         putfile.the_file.put(putstring, content_type=content_type) | ||||
|         putfile.save() | ||||
|  | ||||
|         result = PutFile.objects.first() | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete() | ||||
|  | ||||
|     def test_file_fields_stream(self): | ||||
|         """Ensure that file fields can be written to and their data retrieved | ||||
|         """ | ||||
|         class StreamFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.the_file.new_file(content_type=content_type) | ||||
|         streamfile.the_file.write(text) | ||||
|         streamfile.the_file.write(more_text) | ||||
|         streamfile.the_file.close() | ||||
|         streamfile.save() | ||||
|  | ||||
|         result = StreamFile.objects.first() | ||||
|         self.assertTrue(streamfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text + more_text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.seek(0) | ||||
|         self.assertEqual(result.the_file.tell(), 0) | ||||
|         self.assertEqual(result.the_file.read(len(text)), text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text)) | ||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) | ||||
|         result.the_file.delete() | ||||
|  | ||||
|         # Ensure deleted file returns None | ||||
|         self.assertTrue(result.the_file.read() == None) | ||||
|  | ||||
|     def test_file_fields_set(self): | ||||
|  | ||||
|         class SetFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|  | ||||
|         SetFile.drop_collection() | ||||
|  | ||||
|         setfile = SetFile() | ||||
|         setfile.the_file = text | ||||
|         setfile.save() | ||||
|  | ||||
|         result = SetFile.objects.first() | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
|  | ||||
|         # Try replacing file with new one | ||||
|         result.the_file.replace(more_text) | ||||
|         result.save() | ||||
|  | ||||
|         result = SetFile.objects.first() | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEqual(result.the_file.read(), more_text) | ||||
|         result.the_file.delete() | ||||
|  | ||||
|     def test_file_field_no_default(self): | ||||
|  | ||||
|         class GridDocument(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         GridDocument.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             # Test without default | ||||
|             doc_a = GridDocument() | ||||
|             doc_a.save() | ||||
|  | ||||
|             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||
|             doc_b.the_file.replace(f, filename='doc_b') | ||||
|             doc_b.save() | ||||
|             self.assertNotEqual(doc_b.the_file.grid_id, None) | ||||
|  | ||||
|             # Test it matches | ||||
|             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||
|             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||
|  | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file=b('')) | ||||
|             doc_d.save() | ||||
|  | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
|             self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) | ||||
|  | ||||
|             doc_e.the_file.replace(f, filename='doc_e') | ||||
|             doc_e.save() | ||||
|  | ||||
|             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||
|             self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) | ||||
|  | ||||
|         db = GridDocument._get_db() | ||||
|         grid_fs = gridfs.GridFS(db) | ||||
|         self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) | ||||
|  | ||||
|     def test_file_uniqueness(self): | ||||
|         """Ensure that each instance of a FileField is unique | ||||
|         """ | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             the_file = FileField() | ||||
|  | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!')) | ||||
|         test_file.save() | ||||
|  | ||||
|         # Second instance | ||||
|         test_file_dupe = TestFile() | ||||
|         data = test_file_dupe.the_file.read()  # Should be None | ||||
|  | ||||
|         self.assertTrue(test_file.name != test_file_dupe.name) | ||||
|         self.assertTrue(test_file.the_file.read() != data) | ||||
|  | ||||
|         TestFile.drop_collection() | ||||
|  | ||||
|     def test_file_saving(self): | ||||
|         """Ensure you can add meta data to file""" | ||||
|  | ||||
|         class Animal(Document): | ||||
|             genus = StringField() | ||||
|             family = StringField() | ||||
|             photo = FileField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
|  | ||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||
|         marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||
|         marmot.photo.close() | ||||
|         marmot.save() | ||||
|  | ||||
|         marmot = Animal.objects.get() | ||||
|         self.assertEqual(marmot.photo.content_type, 'image/jpeg') | ||||
|         self.assertEqual(marmot.photo.foo, 'bar') | ||||
|  | ||||
|     def test_file_reassigning(self): | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|         TestFile.drop_collection() | ||||
|  | ||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||
|         self.assertEqual(test_file.the_file.get().length, 8313) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||
|         test_file.save() | ||||
|         self.assertEqual(test_file.the_file.get().length, 4971) | ||||
|  | ||||
|     def test_file_boolean(self): | ||||
|         """Ensure that a boolean test of a FileField indicates its presence | ||||
|         """ | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|         TestFile.drop_collection() | ||||
|  | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(bool(test_file.the_file)) | ||||
|         test_file.the_file.put(b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.save() | ||||
|         self.assertTrue(bool(test_file.the_file)) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.content_type, "text/plain") | ||||
|  | ||||
|     def test_file_cmp(self): | ||||
|         """Test comparing against other types""" | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||
|  | ||||
|     def test_image_field(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestImage(Document): | ||||
|             image = ImageField() | ||||
|  | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             t = TestImage() | ||||
|             try: | ||||
|                 t.image.put(f) | ||||
|                 self.fail("Should have raised an invalidation error") | ||||
|             except ValidationError, e: | ||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file") | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
|  | ||||
|         t = TestImage.objects.first() | ||||
|  | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
|  | ||||
|         w, h = t.image.size | ||||
|         self.assertEqual(w, 371) | ||||
|         self.assertEqual(h, 76) | ||||
|  | ||||
|         t.image.delete() | ||||
|  | ||||
|     def test_image_field_reassigning(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestFile(Document): | ||||
|             the_file = ImageField() | ||||
|         TestFile.drop_collection() | ||||
|  | ||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||
|         self.assertEqual(test_file.the_file.size, (371, 76)) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||
|         test_file.save() | ||||
|         self.assertEqual(test_file.the_file.size, (45, 101)) | ||||
|  | ||||
|     def test_image_field_resize(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(size=(185, 37)) | ||||
|  | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
|  | ||||
|         t = TestImage.objects.first() | ||||
|  | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
|         w, h = t.image.size | ||||
|  | ||||
|         self.assertEqual(w, 185) | ||||
|         self.assertEqual(h, 37) | ||||
|  | ||||
|         t.image.delete() | ||||
|  | ||||
|     def test_image_field_resize_force(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(size=(185, 37, True)) | ||||
|  | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
|  | ||||
|         t = TestImage.objects.first() | ||||
|  | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
|         w, h = t.image.size | ||||
|  | ||||
|         self.assertEqual(w, 185) | ||||
|         self.assertEqual(h, 37) | ||||
|  | ||||
|         t.image.delete() | ||||
|  | ||||
|     def test_image_field_thumbnail(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(thumbnail_size=(92, 18)) | ||||
|  | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
|  | ||||
|         t = TestImage.objects.first() | ||||
|  | ||||
|         self.assertEqual(t.image.thumbnail.format, 'PNG') | ||||
|         self.assertEqual(t.image.thumbnail.width, 92) | ||||
|         self.assertEqual(t.image.thumbnail.height, 18) | ||||
|  | ||||
|         t.image.delete() | ||||
|  | ||||
|     def test_file_multidb(self): | ||||
|         register_connection('test_files', 'test_files') | ||||
|  | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             the_file = FileField(db_alias="test_files", | ||||
|                                  collection_name="macumba") | ||||
|  | ||||
|         TestFile.drop_collection() | ||||
|  | ||||
|         # delete old filesystem | ||||
|         get_db("test_files").macumba.files.drop() | ||||
|         get_db("test_files").macumba.chunks.drop() | ||||
|  | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!'), | ||||
|                           name="hello.txt") | ||||
|         test_file.save() | ||||
|  | ||||
|         data = get_db("test_files").macumba.files.find_one() | ||||
|         self.assertEqual(data.get('name'), 'hello.txt') | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('Hello, World!')) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = b('HELLO, WORLD!') | ||||
|         test_file.save() | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('HELLO, WORLD!')) | ||||
|  | ||||
|     def test_copyable(self): | ||||
|         class PutFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
|         putfile.the_file.put(text, content_type=content_type) | ||||
|         putfile.save() | ||||
|  | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         self.assertEqual(putfile, copy.copy(putfile)) | ||||
|         self.assertEqual(putfile, copy.deepcopy(putfile)) | ||||
|  | ||||
|     def test_get_image_by_grid_id(self): | ||||
|  | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
|  | ||||
|         class TestImage(Document): | ||||
|  | ||||
|             image1 = ImageField() | ||||
|             image2 = ImageField() | ||||
|  | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         t = TestImage() | ||||
|         t.image1.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.image2.put(open(TEST_IMAGE2_PATH, 'rb')) | ||||
|         t.save() | ||||
|  | ||||
|         test = TestImage.objects.first() | ||||
|         grid_id = test.image1.grid_id | ||||
|  | ||||
|         self.assertEqual(1, TestImage.objects(Q(image1=grid_id) | ||||
|                                               or Q(image2=grid_id)).count()) | ||||
|  | ||||
|     def test_complex_field_filefield(self): | ||||
|         """Ensure you can add meta data to file""" | ||||
|  | ||||
|         class Animal(Document): | ||||
|             genus = StringField() | ||||
|             family = StringField() | ||||
|             photos = ListField(FileField()) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
|  | ||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||
|  | ||||
|         photos_field = marmot._fields['photos'].field | ||||
|         new_proxy = photos_field.get_proxy_obj('photos', marmot) | ||||
|         new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||
|         marmot_photo.close() | ||||
|  | ||||
|         marmot.photos.append(new_proxy) | ||||
|         marmot.save() | ||||
|  | ||||
|         marmot = Animal.objects.get() | ||||
|         self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') | ||||
|         self.assertEqual(marmot.photos[0].foo, 'bar') | ||||
|         self.assertEqual(marmot.photos[0].get().length, 8313) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										280
									
								
								tests/fields/geo.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										280
									
								
								tests/fields/geo.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,280 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("GeoFieldTest", ) | ||||
|  | ||||
|  | ||||
| class GeoFieldTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def _test_for_expected_error(self, Cls, loc, expected): | ||||
|         try: | ||||
|             Cls(loc=loc).validate() | ||||
|             self.fail() | ||||
|         except ValidationError, e: | ||||
|             self.assertEqual(expected, e.to_dict()['loc']) | ||||
|  | ||||
|     def test_geopoint_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = GeoPointField() | ||||
|  | ||||
|         invalid_coords = [{"x": 1, "y": 2}, 5, "a"] | ||||
|         expected = 'GeoPointField can only accept tuples or lists of (x, y)' | ||||
|  | ||||
|         for coord in invalid_coords: | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         invalid_coords = [[], [1], [1, 2, 3]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Value (%s) must be a two-dimensional point" % repr(coord) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         invalid_coords = [[{}, {}], ("a", "b")] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|     def test_point_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = PointField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": []} | ||||
|         expected = 'PointField type must be "Point"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]} | ||||
|         expected = "Value ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [5, "a"] | ||||
|         expected = "PointField can only accept lists of [x, y]" | ||||
|         for coord in invalid_coords: | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         invalid_coords = [[], [1], [1, 2, 3]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Value (%s) must be a two-dimensional point" % repr(coord) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         invalid_coords = [[{}, {}], ("a", "b")] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         Location(loc=[1, 2]).validate() | ||||
|         Location(loc={ | ||||
|             "type": "Point", | ||||
|             "coordinates": [ | ||||
|               81.4471435546875, | ||||
|               23.61432859499169 | ||||
|             ]}).validate() | ||||
|  | ||||
|     def test_linestring_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = LineStringField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
|         expected = 'LineStringField type must be "LineString"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} | ||||
|         expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [5, "a"] | ||||
|         expected = "Invalid LineString must contain at least one valid point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[1]] | ||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[1, 2, 3]] | ||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[{}, {}]], [("a", "b")]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
|  | ||||
|         Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate() | ||||
|  | ||||
|     def test_polygon_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = PolygonField() | ||||
|  | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
|         expected = 'PolygonField type must be "Polygon"' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]} | ||||
|         expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[5, "a"]]] | ||||
|         expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[]]] | ||||
|         expected = "Invalid Polygon must contain at least one valid linestring" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[1, 2, 3]]] | ||||
|         expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[{}, {}]], [("a", "b")]] | ||||
|         expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         invalid_coords = [[[1, 2], [3, 4]]] | ||||
|         expected = "Invalid Polygon:\nLineStrings must start and end at the same point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
|  | ||||
|         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() | ||||
|  | ||||
|     def test_indexes_geopoint(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             location = GeoPointField() | ||||
|  | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) | ||||
|  | ||||
|     def test_geopoint_embedded_indexes(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields on | ||||
|         embedded documents. | ||||
|         """ | ||||
|         class Venue(EmbeddedDocument): | ||||
|             location = GeoPointField() | ||||
|             name = StringField() | ||||
|  | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             venue = EmbeddedDocumentField(Venue) | ||||
|  | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) | ||||
|  | ||||
|     def test_indexes_2dsphere(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             point = PointField() | ||||
|             line = LineStringField() | ||||
|             polygon = PolygonField() | ||||
|  | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) | ||||
|  | ||||
|     def test_indexes_2dsphere_embedded(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
|         class Venue(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             point = PointField() | ||||
|             line = LineStringField() | ||||
|             polygon = PolygonField() | ||||
|  | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             venue = EmbeddedDocumentField(Venue) | ||||
|  | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) | ||||
|  | ||||
|     def test_geo_indexes_recursion(self): | ||||
|  | ||||
|         class Location(Document): | ||||
|             name = StringField() | ||||
|             location = GeoPointField() | ||||
|  | ||||
|         class Parent(Document): | ||||
|             name = StringField() | ||||
|             location = ReferenceField(Location) | ||||
|  | ||||
|         Location.drop_collection() | ||||
|         Parent.drop_collection() | ||||
|  | ||||
|         list(Parent.objects) | ||||
|  | ||||
|         collection = Parent._get_collection() | ||||
|         info = collection.index_information() | ||||
|  | ||||
|         self.assertFalse('location_2d' in info) | ||||
|  | ||||
|         self.assertEqual(len(Parent._geo_indices()), 0) | ||||
|         self.assertEqual(len(Location._geo_indices()), 1) | ||||
|  | ||||
|     def test_geo_indexes_auto_index(self): | ||||
|  | ||||
|         # Test just listing the fields | ||||
|         class Log(Document): | ||||
|             location = PointField(auto_index=False) | ||||
|             datetime = DateTimeField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([], Log._geo_indices()) | ||||
|  | ||||
|         Log.drop_collection() | ||||
|         Log.ensure_indexes() | ||||
|  | ||||
|         info = Log._get_collection().index_information() | ||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], | ||||
|                          [('location', '2dsphere'), ('datetime', 1)]) | ||||
|  | ||||
|         # Test listing explicitly | ||||
|         class Log(Document): | ||||
|             location = PointField(auto_index=False) | ||||
|             datetime = DateTimeField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     {'fields': [("location", "2dsphere"), ("datetime", 1)]} | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([], Log._geo_indices()) | ||||
|  | ||||
|         Log.drop_collection() | ||||
|         Log.ensure_indexes() | ||||
|  | ||||
|         info = Log._get_collection().index_information() | ||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], | ||||
|                          [('location', '2dsphere'), ('datetime', 1)]) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										
											BIN
										
									
								
								tests/fields/mongodb_leaf.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fields/mongodb_leaf.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 4.9 KiB | 
| Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB | 
| @@ -1,6 +1,8 @@ | ||||
| import pickle | ||||
| from datetime import datetime | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine import signals | ||||
|  | ||||
|  | ||||
| class PickleEmbedded(EmbeddedDocument): | ||||
| @@ -15,6 +17,32 @@ class PickleTest(Document): | ||||
|     photo = FileField() | ||||
|  | ||||
|  | ||||
| class PickleDyanmicEmbedded(DynamicEmbeddedDocument): | ||||
|     date = DateTimeField(default=datetime.now) | ||||
|  | ||||
|  | ||||
| class PickleDynamicTest(DynamicDocument): | ||||
|     number = IntField() | ||||
|  | ||||
|  | ||||
| class PickleSignalsTest(Document): | ||||
|     number = IntField() | ||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||
|     lists = ListField(StringField()) | ||||
|  | ||||
|     @classmethod | ||||
|     def post_save(self, sender, document, created, **kwargs): | ||||
|         pickled = pickle.dumps(document) | ||||
|  | ||||
|     @classmethod | ||||
|     def post_delete(self, sender, document, **kwargs): | ||||
|         pickled = pickle.dumps(document) | ||||
|  | ||||
| signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | ||||
| signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | ||||
|  | ||||
|  | ||||
| class Mixin(object): | ||||
|     name = StringField() | ||||
|  | ||||
|   | ||||
							
								
								
									
										8
									
								
								tests/migration/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								tests/migration/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| from convert_to_new_inheritance_model import * | ||||
| from decimalfield_as_float import * | ||||
| from refrencefield_dbref_to_object_id import * | ||||
| from turn_off_inheritance import * | ||||
| from uuidfield_to_binary import * | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										51
									
								
								tests/migration/convert_to_new_inheritance_model.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								tests/migration/convert_to_new_inheritance_model.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('ConvertToNewInheritanceModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToNewInheritanceModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_convert_to_the_new_inheritance_model(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Declaration of the class | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Remove _types | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({'_types': {"$exists": True}}).count() | ||||
|         self.assertEqual(0, count) | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
							
								
								
									
										50
									
								
								tests/migration/decimalfield_as_float.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										50
									
								
								tests/migration/decimalfield_as_float.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,50 @@ | ||||
|  # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import decimal | ||||
| from decimal import Decimal | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, DecimalField, ListField | ||||
|  | ||||
| __all__ = ('ConvertDecimalField', ) | ||||
|  | ||||
|  | ||||
| class ConvertDecimalField(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_decimal_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(force_string=True) | ||||
|             monies = ListField(DecimalField(force_string=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", money=Decimal("2.50"), | ||||
|                monies=[Decimal("2.10"), Decimal("5.00")]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change DecimalField - add precision and rounding settings | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP) | ||||
|             monies = ListField(DecimalField(precision=2, | ||||
|                                             rounding=decimal.ROUND_HALF_UP)) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('money') | ||||
|             p._mark_as_changed('monies') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['money'], float)) | ||||
|         self.assertTrue(all([isinstance(m, float) for m in wilson['monies']])) | ||||
							
								
								
									
										52
									
								
								tests/migration/refrencefield_dbref_to_object_id.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								tests/migration/refrencefield_dbref_to_object_id.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, ReferenceField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToObjectIdsModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToObjectIdsModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_object_id_reference_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self', dbref=True) | ||||
|             friends = ListField(ReferenceField('self', dbref=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="Wilson", parent=None).save() | ||||
|         f1 = Person(name="John", parent=None).save() | ||||
|         f2 = Person(name="Paul", parent=None).save() | ||||
|         f3 = Person(name="George", parent=None).save() | ||||
|         f4 = Person(name="Ringo", parent=None).save() | ||||
|         Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change ReferenceField as now dbref defaults to False | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self') | ||||
|             friends = ListField(ReferenceField('self')) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('parent') | ||||
|             p._mark_as_changed('friends') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertEqual(p1.id, wilson['parent']) | ||||
|         self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends']) | ||||
							
								
								
									
										62
									
								
								tests/migration/turn_off_inheritance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								tests/migration/turn_off_inheritance.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('TurnOffInheritanceTest', ) | ||||
|  | ||||
|  | ||||
| class TurnOffInheritanceTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_turn_off_inheritance(self): | ||||
|         """Demonstrates migrating from allow_inheritance = True to False. | ||||
|         """ | ||||
|  | ||||
|         # 1. Old declaration of the class | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Turn off inheritance | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': False, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 3. Remove _types and _cls | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({"$or": [{'_types': {"$exists": True}}, | ||||
|                                          {'_cls': {"$exists": True}}]}).count() | ||||
|         assert count == 0 | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key']) | ||||
|                               or '_cls' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
							
								
								
									
										48
									
								
								tests/migration/uuidfield_to_binary.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								tests/migration/uuidfield_to_binary.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, UUIDField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToBinaryUUID', ) | ||||
|  | ||||
|  | ||||
| class ConvertToBinaryUUID(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_binary_uuid_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField(binary=False) | ||||
|             uuids = ListField(UUIDField(binary=False)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", uuid=uuid.uuid4(), | ||||
|                uuids=[uuid.uuid4(), uuid.uuid4()]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change UUIDFIeld as now binary defaults to True | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField() | ||||
|             uuids = ListField(UUIDField()) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('uuid') | ||||
|             p._mark_as_changed('uuids') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['uuid'], uuid.UUID)) | ||||
|         self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']])) | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user