Compare commits
1833 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
824ec42005 | ||
|
|
466935e9a3 | ||
|
|
b52d3e3a7b | ||
|
|
888a6da4a5 | ||
|
|
972ac73dd9 | ||
|
|
d8b238d5f1 | ||
|
|
63206c3da2 | ||
|
|
5713de8966 | ||
|
|
58f293fef3 | ||
|
|
ffbb2c9689 | ||
|
|
9cd3dcdebf | ||
|
|
f2fe58c3c5 | ||
|
|
b78010aa94 | ||
|
|
49035543b9 | ||
|
|
f9ccf635ca | ||
|
|
e8ea294964 | ||
|
|
19ef2be88b | ||
|
|
30e8b8186f | ||
|
|
741643af5f | ||
|
|
6aaf9ba470 | ||
|
|
5957dc72eb | ||
|
|
e32a9777d7 | ||
|
|
84a8f1eb2b | ||
|
|
6810953014 | ||
|
|
398964945a | ||
|
|
5f43c032f2 | ||
|
|
627cf90de0 | ||
|
|
2bedb36d7f | ||
|
|
e93a95d0cb | ||
|
|
3f31666796 | ||
|
|
3fe8031cf3 | ||
|
|
b27c7ce11b | ||
|
|
ed34c2ca68 | ||
|
|
3ca2e953fb | ||
|
|
d8a7328365 | ||
|
|
f33cd625bf | ||
|
|
80530bb13c | ||
|
|
affc12df4b | ||
|
|
4eedf00025 | ||
|
|
e5acbcc0dd | ||
|
|
1b6743ee53 | ||
|
|
b5fb82d95d | ||
|
|
193aa4e1f2 | ||
|
|
ebd34427c7 | ||
|
|
3d75573889 | ||
|
|
c6240ca415 | ||
|
|
2ee8984b44 | ||
|
|
b7ec587e5b | ||
|
|
47c58bce2b | ||
|
|
96e95ac533 | ||
|
|
b013a065f7 | ||
|
|
74b37d11cf | ||
|
|
c6cc013617 | ||
|
|
f4e1d80a87 | ||
|
|
91dad4060f | ||
|
|
e07cb82c15 | ||
|
|
2770cec187 | ||
|
|
5c3928190a | ||
|
|
9f4b04ea0f | ||
|
|
96d20756ca | ||
|
|
b8454c7f5b | ||
|
|
c84f703f92 | ||
|
|
57c2e867d8 | ||
|
|
553f496d84 | ||
|
|
b1d8aca46a | ||
|
|
8e884fd3ea | ||
|
|
76524b7498 | ||
|
|
65914fb2b2 | ||
|
|
a4d0da0085 | ||
|
|
c9d496e9a0 | ||
|
|
88a951ba4f | ||
|
|
403ceb19dc | ||
|
|
835d3c3d18 | ||
|
|
3135b456be | ||
|
|
0be6d3661a | ||
|
|
6f5f5b4711 | ||
|
|
c6c5f85abb | ||
|
|
7b860f7739 | ||
|
|
e28804c03a | ||
|
|
1b9432824b | ||
|
|
3b71a6b5c5 | ||
|
|
7ce8768c19 | ||
|
|
25e0f12976 | ||
|
|
f168682a68 | ||
|
|
d25058a46d | ||
|
|
4d0c092d9f | ||
|
|
15714ef855 | ||
|
|
eb743beaa3 | ||
|
|
0007535a46 | ||
|
|
8391af026c | ||
|
|
800f656dcf | ||
|
|
088c5f49d9 | ||
|
|
d8d98b6143 | ||
|
|
02fb3b9315 | ||
|
|
4f87db784e | ||
|
|
7e6287b925 | ||
|
|
999cdfd997 | ||
|
|
8d6cb087c6 | ||
|
|
2b7417c728 | ||
|
|
3c455cf1c1 | ||
|
|
5135185e31 | ||
|
|
b461f26e5d | ||
|
|
faef5b8570 | ||
|
|
0a20e04c10 | ||
|
|
d19bb2308d | ||
|
|
d8dd07d9ef | ||
|
|
36c56243cd | ||
|
|
23d06b79a6 | ||
|
|
e4c4e923ee | ||
|
|
936d2f1f47 | ||
|
|
07018b5060 | ||
|
|
ac90d6ae5c | ||
|
|
2141f2c4c5 | ||
|
|
81870777a9 | ||
|
|
845092dcad | ||
|
|
dd473d1e1e | ||
|
|
d2869bf4ed | ||
|
|
891a3f4b29 | ||
|
|
6767b50d75 | ||
|
|
d9e4b562a9 | ||
|
|
fb3243f1bc | ||
|
|
5fe1497c92 | ||
|
|
5446592d44 | ||
|
|
40ed9a53c9 | ||
|
|
f7ac8cea90 | ||
|
|
4ef5d1f0cd | ||
|
|
6992615c98 | ||
|
|
43dabb2825 | ||
|
|
05e40e5681 | ||
|
|
2c4536e137 | ||
|
|
3dc81058a0 | ||
|
|
bd84667a2b | ||
|
|
e5b6a12977 | ||
|
|
ca415d5d62 | ||
|
|
99b4fe7278 | ||
|
|
327e164869 | ||
|
|
25bc571f30 | ||
|
|
38c7e8a1d2 | ||
|
|
ca282e28e0 | ||
|
|
5ef59c06df | ||
|
|
8f55d385d6 | ||
|
|
cd2fc25c19 | ||
|
|
709983eea6 | ||
|
|
40e99b1b80 | ||
|
|
488684d960 | ||
|
|
f35034b989 | ||
|
|
9d6f9b1f26 | ||
|
|
6148a608fb | ||
|
|
3fa9e70383 | ||
|
|
16fea6f009 | ||
|
|
df9ed835ca | ||
|
|
e394c8f0f2 | ||
|
|
21974f7288 | ||
|
|
5ef0170d77 | ||
|
|
c21dcf14de | ||
|
|
a8d20d4e1e | ||
|
|
8b307485b0 | ||
|
|
4544afe422 | ||
|
|
9d7eba5f70 | ||
|
|
be0aee95f2 | ||
|
|
3469ed7ab9 | ||
|
|
1f223aa7e6 | ||
|
|
0a431ead5e | ||
|
|
f750796444 | ||
|
|
c82bcd882a | ||
|
|
7d0ec33b54 | ||
|
|
43d48b3feb | ||
|
|
2e406d2687 | ||
|
|
3f30808104 | ||
|
|
ab10217c86 | ||
|
|
00430491ca | ||
|
|
109202329f | ||
|
|
3b1509f307 | ||
|
|
7ad7b08bed | ||
|
|
4650e5e8fb | ||
|
|
af59d4929e | ||
|
|
e34100bab4 | ||
|
|
d9b3a9fb60 | ||
|
|
39eec59c90 | ||
|
|
d651d0d472 | ||
|
|
87a2358a65 | ||
|
|
cef4e313e1 | ||
|
|
7cc1a4eba0 | ||
|
|
c6cc0133b3 | ||
|
|
7748e68440 | ||
|
|
6c2230a076 | ||
|
|
66b233eaea | ||
|
|
fed58f3920 | ||
|
|
815b2be7f7 | ||
|
|
f420c9fb7c | ||
|
|
01bdf10b94 | ||
|
|
ddedc1ee92 | ||
|
|
9e9703183f | ||
|
|
adce9e6220 | ||
|
|
c499133bbe | ||
|
|
8f505c2dcc | ||
|
|
b320064418 | ||
|
|
a643933d16 | ||
|
|
2659ec5887 | ||
|
|
9f8327926d | ||
|
|
7a568dc118 | ||
|
|
c946b06be5 | ||
|
|
c65fd0e477 | ||
|
|
8f8217e928 | ||
|
|
6c9e1799c7 | ||
|
|
decd70eb23 | ||
|
|
a20d40618f | ||
|
|
b4af8ec751 | ||
|
|
feb5eed8a5 | ||
|
|
f4fa39c70e | ||
|
|
7b7165f5d8 | ||
|
|
13897db6d3 | ||
|
|
c4afdb7198 | ||
|
|
0284975f3f | ||
|
|
269e3d1303 | ||
|
|
8c81f7ece9 | ||
|
|
f6e0593774 | ||
|
|
3d80e549cb | ||
|
|
acc7448dc5 | ||
|
|
35d3d3de72 | ||
|
|
0372e07eb0 | ||
|
|
00221e3410 | ||
|
|
9c264611cf | ||
|
|
31d7f70e27 | ||
|
|
04e8b83d45 | ||
|
|
e87bf71f20 | ||
|
|
2dd70c8d62 | ||
|
|
a3886702a3 | ||
|
|
713af133a0 | ||
|
|
057ffffbf2 | ||
|
|
a81d6d124b | ||
|
|
23f07fde5e | ||
|
|
b42b760393 | ||
|
|
bf6f4c48c0 | ||
|
|
6133f04841 | ||
|
|
3c18f79ea4 | ||
|
|
2af8342fea | ||
|
|
fc3db7942d | ||
|
|
164e2b2678 | ||
|
|
b7b28390df | ||
|
|
a6e996d921 | ||
|
|
07e666345d | ||
|
|
007f10d29d | ||
|
|
f9284d20ca | ||
|
|
9050869781 | ||
|
|
54975de0f3 | ||
|
|
a7aead5138 | ||
|
|
6868f66f24 | ||
|
|
3c0b00e42d | ||
|
|
3327388f1f | ||
|
|
04497aec36 | ||
|
|
aa9d596930 | ||
|
|
f96e68cd11 | ||
|
|
013227323d | ||
|
|
19cbb442ee | ||
|
|
c0e7f341cb | ||
|
|
0a1ba7c434 | ||
|
|
b708dabf98 | ||
|
|
899e56e5b8 | ||
|
|
f6d3bd8ccb | ||
|
|
deb5677a57 | ||
|
|
5c464c3f5a | ||
|
|
cceef33fef | ||
|
|
ed8174fe36 | ||
|
|
3c8906494f | ||
|
|
6e745e9882 | ||
|
|
fb4e9c3772 | ||
|
|
2c282f9550 | ||
|
|
d92d41cb05 | ||
|
|
82e7050561 | ||
|
|
44f92d4169 | ||
|
|
2f1fae38dd | ||
|
|
9fe99979fe | ||
|
|
6399de0b51 | ||
|
|
959740a585 | ||
|
|
159b082828 | ||
|
|
8e7c5af16c | ||
|
|
c1645ab7a7 | ||
|
|
2ae2bfdde9 | ||
|
|
3fe93968a6 | ||
|
|
79a2d715b0 | ||
|
|
50b271c868 | ||
|
|
a57f28ac83 | ||
|
|
3f3747a2fe | ||
|
|
d133913c3d | ||
|
|
e049cef00a | ||
|
|
eb8176971c | ||
|
|
5bbfca45fa | ||
|
|
9b500cd867 | ||
|
|
b52cae6575 | ||
|
|
35a0142f9b | ||
|
|
d4f6ef4f1b | ||
|
|
11024deaae | ||
|
|
5a038de1d5 | ||
|
|
903982e896 | ||
|
|
6355c404cc | ||
|
|
92b9cb5d43 | ||
|
|
7580383d26 | ||
|
|
ba0934e41e | ||
|
|
a6a1021521 | ||
|
|
33b4d83c73 | ||
|
|
6cf630c74a | ||
|
|
736fe5b84e | ||
|
|
4241bde6ea | ||
|
|
b4ce14d744 | ||
|
|
10832a2ccc | ||
|
|
91aca44f67 | ||
|
|
96cfbb201a | ||
|
|
b2bc155701 | ||
|
|
a70ef5594d | ||
|
|
6d991586fd | ||
|
|
f8890ca841 | ||
|
|
0752c6b24f | ||
|
|
3ffaf2c0e1 | ||
|
|
a3e0fbd606 | ||
|
|
9c8ceb6b4e | ||
|
|
bebce2c053 | ||
|
|
34c6790762 | ||
|
|
a5fb009b62 | ||
|
|
9671ca5ebf | ||
|
|
5334ea393e | ||
|
|
2aaacc02e3 | ||
|
|
222e929b2d | ||
|
|
6f16d35a92 | ||
|
|
d7a2ccf5ac | ||
|
|
9ce605221a | ||
|
|
1e930fe950 | ||
|
|
4dc158589c | ||
|
|
4525eb457b | ||
|
|
56a2e07dc2 | ||
|
|
9b7fe9ac31 | ||
|
|
c3da07ccf7 | ||
|
|
b691a56d51 | ||
|
|
13e0a1b5bb | ||
|
|
646baddce4 | ||
|
|
02f61c323d | ||
|
|
1e3d2df9e7 | ||
|
|
e43fae86f1 | ||
|
|
c6151e34e0 | ||
|
|
45cb991254 | ||
|
|
839bc99f94 | ||
|
|
0aeb1ca408 | ||
|
|
cd76a906f4 | ||
|
|
e438491938 | ||
|
|
307b35a5bf | ||
|
|
217c9720ea | ||
|
|
778c7dc5f2 | ||
|
|
4c80154437 | ||
|
|
6bd9529a66 | ||
|
|
33ea2b4844 | ||
|
|
5c807f3dc8 | ||
|
|
9063b559c4 | ||
|
|
40f6df7160 | ||
|
|
95165aa92f | ||
|
|
d96fcdb35c | ||
|
|
5efabdcea3 | ||
|
|
2d57dc0565 | ||
|
|
576629f825 | ||
|
|
5badb9d151 | ||
|
|
45dc379d9a | ||
|
|
49c0c9f44c | ||
|
|
ef5fa4d062 | ||
|
|
35b66d5d94 | ||
|
|
d0b749a43c | ||
|
|
bcc4d4e8c6 | ||
|
|
41bff0b293 | ||
|
|
dfc7f35ef1 | ||
|
|
0bbbbdde80 | ||
|
|
5fa5284b58 | ||
|
|
b7ef82cb67 | ||
|
|
1233780265 | ||
|
|
dd095279c8 | ||
|
|
4d5200c50f | ||
|
|
1bcd675ead | ||
|
|
2a3d3de0b2 | ||
|
|
b124836f3a | ||
|
|
93ba95971b | ||
|
|
7b193b3745 | ||
|
|
2b647d2405 | ||
|
|
7714cca599 | ||
|
|
42511aa9cf | ||
|
|
ace2a2f3d1 | ||
|
|
2062fe7a08 | ||
|
|
d4c02c3988 | ||
|
|
4c1496b4a4 | ||
|
|
eec876295d | ||
|
|
3093175f54 | ||
|
|
dd05c4d34a | ||
|
|
57e3a40321 | ||
|
|
9e70152076 | ||
|
|
e1da83a8f6 | ||
|
|
8108198613 | ||
|
|
915849b2ce | ||
|
|
2e96302336 | ||
|
|
051cd744ad | ||
|
|
53fbc165ba | ||
|
|
1862bcf867 | ||
|
|
8909d1d144 | ||
|
|
a2f0f20284 | ||
|
|
1951b52aa5 | ||
|
|
cd7a9345ec | ||
|
|
dba4c33c81 | ||
|
|
153c239c9b | ||
|
|
4034ab4182 | ||
|
|
9c917c3bd3 | ||
|
|
cca0222e1d | ||
|
|
682db9b81f | ||
|
|
3e000f9be1 | ||
|
|
548a552638 | ||
|
|
1d5b5b7d15 | ||
|
|
91aa4586e2 | ||
|
|
6d3bc43ef6 | ||
|
|
0f63e26641 | ||
|
|
ab2ef69c6a | ||
|
|
621350515e | ||
|
|
03ed5c398a | ||
|
|
65d6f8c018 | ||
|
|
79d0673ae6 | ||
|
|
cbd488e19f | ||
|
|
380d869195 | ||
|
|
73893f2a33 | ||
|
|
ad81470d35 | ||
|
|
fc140d04ef | ||
|
|
a0257ed7e7 | ||
|
|
4769487c3b | ||
|
|
29def587ff | ||
|
|
f35d0b2b37 | ||
|
|
283e92d55d | ||
|
|
c82b26d334 | ||
|
|
2753e02cda | ||
|
|
fde733c205 | ||
|
|
f730591f2c | ||
|
|
94eac1e79d | ||
|
|
9f2b6d0ec6 | ||
|
|
7d7d0ea001 | ||
|
|
794101691c | ||
|
|
a443144a5c | ||
|
|
73f0867061 | ||
|
|
f97db93212 | ||
|
|
d36708933c | ||
|
|
14f82ea0a9 | ||
|
|
c41dd6495d | ||
|
|
1005c99e9c | ||
|
|
f4478fc762 | ||
|
|
c5ed308ea5 | ||
|
|
3ab5ba6149 | ||
|
|
9b2fde962c | ||
|
|
571a7dc42d | ||
|
|
3421fffa9b | ||
|
|
c25619fd63 | ||
|
|
76adb13a64 | ||
|
|
33b1eed361 | ||
|
|
c44891a1a8 | ||
|
|
f31f52ff1c | ||
|
|
6ad9a56bd9 | ||
|
|
a5c2fc4f9d | ||
|
|
0a65006bb4 | ||
|
|
3db896c4e2 | ||
|
|
e80322021a | ||
|
|
48316ba60d | ||
|
|
c0f1493473 | ||
|
|
ccbd128fa2 | ||
|
|
46817caa68 | ||
|
|
775c8624d4 | ||
|
|
36eedc987c | ||
|
|
3b8f31c888 | ||
|
|
a34fa74eaa | ||
|
|
d6b2d8dcb5 | ||
|
|
aab0599280 | ||
|
|
dfa8eaf24e | ||
|
|
63d55cb797 | ||
|
|
c642eee0d2 | ||
|
|
5f33d298d7 | ||
|
|
fc39fd7519 | ||
|
|
7f442f7485 | ||
|
|
0ee3203a5a | ||
|
|
43a5df8780 | ||
|
|
0949df014b | ||
|
|
01f4dd8f97 | ||
|
|
8b7599f5d9 | ||
|
|
9bdc320cf8 | ||
|
|
d9c8285806 | ||
|
|
4b8344082f | ||
|
|
e5cf76b460 | ||
|
|
422ca87a12 | ||
|
|
a512ccca28 | ||
|
|
ba215be97c | ||
|
|
ca16050681 | ||
|
|
06e4ed1bb4 | ||
|
|
d4a8ae5743 | ||
|
|
a4f2f811d3 | ||
|
|
ebaba95eb3 | ||
|
|
31f7769199 | ||
|
|
7726be94be | ||
|
|
f2cbcea6d7 | ||
|
|
5d6a28954b | ||
|
|
319f1deceb | ||
|
|
3f14958741 | ||
|
|
42ba4a5c56 | ||
|
|
c804c395ed | ||
|
|
58c8cf1a3a | ||
|
|
76ea8c86b7 | ||
|
|
050378fa72 | ||
|
|
29d858d58c | ||
|
|
dc45920afb | ||
|
|
15fcb57e2f | ||
|
|
91ee85152c | ||
|
|
aa7bf7af1e | ||
|
|
02c1ba39ad | ||
|
|
8e8d9426df | ||
|
|
57f301815d | ||
|
|
dfc9dc713c | ||
|
|
1a0cad7f5f | ||
|
|
3df436f0d8 | ||
|
|
d737fca295 | ||
|
|
da5a3532d7 | ||
|
|
27111e7b29 | ||
|
|
b847bc0aba | ||
|
|
6eb0bc50e2 | ||
|
|
7530f03bf6 | ||
|
|
24a9633edc | ||
|
|
7e1a5ce445 | ||
|
|
2ffdbc7fc0 | ||
|
|
52c7b68cc3 | ||
|
|
ddbcc8e84b | ||
|
|
2bfb195ad6 | ||
|
|
cd2d9517a0 | ||
|
|
19dc312128 | ||
|
|
175659628d | ||
|
|
8fea2b09be | ||
|
|
f77f45b70c | ||
|
|
103a287f11 | ||
|
|
d600ade40c | ||
|
|
a6a7cba121 | ||
|
|
7fff635a3f | ||
|
|
7a749b88c7 | ||
|
|
1ce6a7f4be | ||
|
|
a092910fdd | ||
|
|
bb77838b3e | ||
|
|
1001f1bd36 | ||
|
|
de0e5583a5 | ||
|
|
cbd2a44350 | ||
|
|
c888e461ba | ||
|
|
d135522087 | ||
|
|
ce2b148dd2 | ||
|
|
2d075c4dd6 | ||
|
|
bcd1841f71 | ||
|
|
029cf4ad1f | ||
|
|
ed7fc86d69 | ||
|
|
82a9e43b6f | ||
|
|
9ae2c731ed | ||
|
|
7d1ba466b4 | ||
|
|
4f1d8678ea | ||
|
|
4bd72ebc63 | ||
|
|
e5986e0ae2 | ||
|
|
fae39e4bc9 | ||
|
|
dbe8357dd5 | ||
|
|
3234f0bdd7 | ||
|
|
47a4d58009 | ||
|
|
4ae60da58d | ||
|
|
47f995bda3 | ||
|
|
42721628eb | ||
|
|
f42ab957d4 | ||
|
|
ce9d0d7e82 | ||
|
|
baf79dda21 | ||
|
|
b71a9bc097 | ||
|
|
129632cd6b | ||
|
|
aca8899c4d | ||
|
|
5c3d91e65e | ||
|
|
0205d827f1 | ||
|
|
225c31d583 | ||
|
|
b18d87ddba | ||
|
|
25298c72bb | ||
|
|
3df3d27533 | ||
|
|
cbb0b57018 | ||
|
|
65f205bca8 | ||
|
|
1cc7f80109 | ||
|
|
213a0a18a5 | ||
|
|
1a24d599b3 | ||
|
|
d80be60e2b | ||
|
|
0ffe79d76c | ||
|
|
db36d0a375 | ||
|
|
ff659a0be3 | ||
|
|
8485b12102 | ||
|
|
d889cc3c5a | ||
|
|
7bb65fca4e | ||
|
|
8aaa5951ca | ||
|
|
d58f3b7520 | ||
|
|
e5a636a159 | ||
|
|
51f314e907 | ||
|
|
531fa30b69 | ||
|
|
2b3bb81fae | ||
|
|
80f80cd31f | ||
|
|
79705fbf11 | ||
|
|
191a4e569e | ||
|
|
1cac35be03 | ||
|
|
6d48100f44 | ||
|
|
4627af3e90 | ||
|
|
913952ffe1 | ||
|
|
67bf6afc89 | ||
|
|
06064decd2 | ||
|
|
4cca9f17df | ||
|
|
74a89223c0 | ||
|
|
2954017836 | ||
|
|
a03262fc01 | ||
|
|
d65ce6fc2c | ||
|
|
d27e1eee25 | ||
|
|
b1f00bb708 | ||
|
|
e0f1e79e6a | ||
|
|
d70b7d41e8 | ||
|
|
43af9f3fad | ||
|
|
bc53dd6830 | ||
|
|
263616ef01 | ||
|
|
285da0542e | ||
|
|
17f7e2f892 | ||
|
|
a29d8f1d68 | ||
|
|
8965172603 | ||
|
|
03c2967337 | ||
|
|
5b154a0da4 | ||
|
|
b2c8c326d7 | ||
|
|
96aedaa91f | ||
|
|
a22ad1ec32 | ||
|
|
a4244defb5 | ||
|
|
57328e55f3 | ||
|
|
87c32aeb40 | ||
|
|
2e01e0c30e | ||
|
|
a12b2de74a | ||
|
|
6b01d8f99b | ||
|
|
eac4f6062e | ||
|
|
5583cf0a5f | ||
|
|
57d772fa23 | ||
|
|
1bdc3988a9 | ||
|
|
2af55baa9a | ||
|
|
0452eec11d | ||
|
|
c4f7db6c04 | ||
|
|
3569529a84 | ||
|
|
70942ac0f6 | ||
|
|
dc02e39918 | ||
|
|
73d6bc35ec | ||
|
|
b1d558d700 | ||
|
|
897480265f | ||
|
|
73724f5a33 | ||
|
|
bdbd495a9e | ||
|
|
1fcf009804 | ||
|
|
914c5752a5 | ||
|
|
201b12a886 | ||
|
|
c5f23ad93d | ||
|
|
28d62009a7 | ||
|
|
1a5a436f82 | ||
|
|
1275ac0569 | ||
|
|
5112fb777e | ||
|
|
f571a944c9 | ||
|
|
bc9aff8c60 | ||
|
|
c4c7ab7888 | ||
|
|
d9819a990c | ||
|
|
aea400e26a | ||
|
|
eb4e7735c1 | ||
|
|
4b498ae8cd | ||
|
|
158e2a4ca9 | ||
|
|
b011d48d82 | ||
|
|
8ac3e725f8 | ||
|
|
9a4aef0358 | ||
|
|
7d3146234a | ||
|
|
5d2ca6493d | ||
|
|
4752f9aa37 | ||
|
|
025d3a03d6 | ||
|
|
aec06183e7 | ||
|
|
aa28abd517 | ||
|
|
7430b31697 | ||
|
|
759f72169a | ||
|
|
1f7135be61 | ||
|
|
6942f9c1cf | ||
|
|
d9da75d1c0 | ||
|
|
7ab7372be4 | ||
|
|
3503c98857 | ||
|
|
708c3f1e2a | ||
|
|
6f645e8619 | ||
|
|
bce7ca7ac4 | ||
|
|
350465c25d | ||
|
|
5b9c70ae22 | ||
|
|
9b30afeca9 | ||
|
|
c1b202c119 | ||
|
|
41cfe5d2ca | ||
|
|
05339e184f | ||
|
|
447127d956 | ||
|
|
394334fbea | ||
|
|
9f8cd33d43 | ||
|
|
f066e28c35 | ||
|
|
b349a449bb | ||
|
|
1c5898d396 | ||
|
|
6802967863 | ||
|
|
0462f18680 | ||
|
|
af6699098f | ||
|
|
6b7e7dc124 | ||
|
|
6bae4c6a66 | ||
|
|
46da918dbe | ||
|
|
bb7e5f17b5 | ||
|
|
b9d03114c2 | ||
|
|
436b1ce176 | ||
|
|
50fb5d83f1 | ||
|
|
fda672f806 | ||
|
|
2bf783b04d | ||
|
|
2f72b23a0d | ||
|
|
85336f9777 | ||
|
|
174d964553 | ||
|
|
cf8677248e | ||
|
|
1e6a3163af | ||
|
|
e008919978 | ||
|
|
4814066c67 | ||
|
|
f17f8b48c2 | ||
|
|
ab0aec0ac5 | ||
|
|
b49a641ba5 | ||
|
|
2f50051426 | ||
|
|
43cc32db40 | ||
|
|
b4d6f6b947 | ||
|
|
71ff533623 | ||
|
|
e33a5bbef5 | ||
|
|
6c0112c2be | ||
|
|
15bbf26b93 | ||
|
|
87c97efce0 | ||
|
|
6c4aee1479 | ||
|
|
73549a9044 | ||
|
|
30fdd3e184 | ||
|
|
c97eb5d63f | ||
|
|
5729c7d5e7 | ||
|
|
d77b13efcb | ||
|
|
c43faca7b9 | ||
|
|
892ddd5724 | ||
|
|
a9de779f33 | ||
|
|
1c2f016ba0 | ||
|
|
7b4d9140af | ||
|
|
c1fc87ff4e | ||
|
|
cd5ea5d4e0 | ||
|
|
30c01089f5 | ||
|
|
89825a2b21 | ||
|
|
a743b75bb4 | ||
|
|
f7ebf8dedd | ||
|
|
f6220cab3b | ||
|
|
0c5e1c4138 | ||
|
|
03fe431f1a | ||
|
|
a8e4554fec | ||
|
|
e81b09b9aa | ||
|
|
c6e846e0ae | ||
|
|
03dcfb5c4b | ||
|
|
3e54da03e2 | ||
|
|
c4b3196917 | ||
|
|
0d81e7933e | ||
|
|
b2a2735034 | ||
|
|
f865c5de90 | ||
|
|
4159369e8b | ||
|
|
170693cf0b | ||
|
|
4e7b5d4af8 | ||
|
|
67bf789fcf | ||
|
|
f5cf616c2f | ||
|
|
7975f19817 | ||
|
|
017602056d | ||
|
|
c63f43854b | ||
|
|
5cc71ec2ad | ||
|
|
80e81f8475 | ||
|
|
3685c8e015 | ||
|
|
99e943c365 | ||
|
|
21818e71f5 | ||
|
|
bcc6d25e21 | ||
|
|
7b885ee0d3 | ||
|
|
c10e808a4f | ||
|
|
54e9be0ed8 | ||
|
|
938cdf316a | ||
|
|
27c33911e6 | ||
|
|
e88f8759e7 | ||
|
|
f2992e3165 | ||
|
|
c71fd1ee3b | ||
|
|
fb45b19fdc | ||
|
|
c4ea8d4942 | ||
|
|
646aa131ef | ||
|
|
0adb40bf92 | ||
|
|
17d6014bf1 | ||
|
|
ff57cd4eaf | ||
|
|
74bd7c3744 | ||
|
|
cfbb283f85 | ||
|
|
74a3c4451b | ||
|
|
be3643c962 | ||
|
|
f4aa546af8 | ||
|
|
67b876a7f4 | ||
|
|
94e177c0ef | ||
|
|
1bd83cc9bc | ||
|
|
ecda3f4a7d | ||
|
|
8f972a965d | ||
|
|
0f051fc57c | ||
|
|
c3f8925f46 | ||
|
|
5d0cab2052 | ||
|
|
4d7492f682 | ||
|
|
fc9d99080f | ||
|
|
47ebac0276 | ||
|
|
cb3fca03e9 | ||
|
|
abbbd83729 | ||
|
|
1743ab7812 | ||
|
|
324e3972a6 | ||
|
|
1502dda2ab | ||
|
|
f31b2c4a79 | ||
|
|
89b9b60e0c | ||
|
|
de9ba12779 | ||
|
|
9cc4359c04 | ||
|
|
67eaf120b9 | ||
|
|
b8353c4a33 | ||
|
|
7013033ae4 | ||
|
|
cb8cd03852 | ||
|
|
f63fb62014 | ||
|
|
2e4fb86b86 | ||
|
|
5e776a07dd | ||
|
|
81e637e50e | ||
|
|
0971ad0a80 | ||
|
|
8267ded7ec | ||
|
|
7f36ea55f5 | ||
|
|
72a051f2d3 | ||
|
|
51b197888c | ||
|
|
cd63865d31 | ||
|
|
5be5685a09 | ||
|
|
76b2f25d46 | ||
|
|
58607d4a7f | ||
|
|
c0a5b16a7f | ||
|
|
3a0c69005b | ||
|
|
5c295fb9e3 | ||
|
|
4ee212e7d5 | ||
|
|
70651ce994 | ||
|
|
a778a91106 | ||
|
|
cfc31eead3 | ||
|
|
da0a1bbe9f | ||
|
|
bc66fb33e9 | ||
|
|
b1b6493755 | ||
|
|
1d189f239b | ||
|
|
5b90691bcc | ||
|
|
d1d5972277 | ||
|
|
2c07d77368 | ||
|
|
642cfbf59a | ||
|
|
bb1367cfb9 | ||
|
|
11724aa555 | ||
|
|
4d374712de | ||
|
|
eb9003187d | ||
|
|
caba444962 | ||
|
|
5b6c8c191f | ||
|
|
dd51589f67 | ||
|
|
b02a31d4b9 | ||
|
|
0e7878b406 | ||
|
|
cae91ce0c5 | ||
|
|
67a65a2aa9 | ||
|
|
364b0a7163 | ||
|
|
d6419f2059 | ||
|
|
6f7ad7ef91 | ||
|
|
5ae588833b | ||
|
|
a70dbac0e6 | ||
|
|
4d34a02afe | ||
|
|
4db4f45897 | ||
|
|
2d5280fc95 | ||
|
|
b8d568761e | ||
|
|
29309dac9a | ||
|
|
7f7745071a | ||
|
|
1914032e35 | ||
|
|
f44c8f1205 | ||
|
|
fe2ef4e61c | ||
|
|
fc3eda55c7 | ||
|
|
8adf1cdd02 | ||
|
|
adbbc656d4 | ||
|
|
8e852bce02 | ||
|
|
bb461b009f | ||
|
|
03559a3cc4 | ||
|
|
7bb2fe128a | ||
|
|
2312e17a8e | ||
|
|
9835b382da | ||
|
|
1eacc6fbff | ||
|
|
85187239b6 | ||
|
|
819ff2a902 | ||
|
|
c744104a18 | ||
|
|
c87801f0a9 | ||
|
|
39735594bd | ||
|
|
30964f65e4 | ||
|
|
ee0c7fd8bf | ||
|
|
dfdecef8e7 | ||
|
|
edcdfeb057 | ||
|
|
47f0de9836 | ||
|
|
9ba657797e | ||
|
|
07442a6f84 | ||
|
|
3faf3c84be | ||
|
|
abcacc82f3 | ||
|
|
9544b7d968 | ||
|
|
babbc8bcd6 | ||
|
|
12809ebc74 | ||
|
|
b45a601ad2 | ||
|
|
f099dc6a37 | ||
|
|
803caddbd4 | ||
|
|
4d7b988018 | ||
|
|
c1f88a4e14 | ||
|
|
5d9ec0b208 | ||
|
|
1877cacf9c | ||
|
|
2f4978cfea | ||
|
|
d27a1103fa | ||
|
|
b85bb95082 | ||
|
|
db7f93cff3 | ||
|
|
85e271098f | ||
|
|
17001e2f74 | ||
|
|
c82f4f0d45 | ||
|
|
88247a3af9 | ||
|
|
158578a406 | ||
|
|
19314e7e06 | ||
|
|
8bcbc6d545 | ||
|
|
ef55e6d476 | ||
|
|
295ef3dc1d | ||
|
|
9d125c9e79 | ||
|
|
86363986fc | ||
|
|
0a2dbbc58b | ||
|
|
673a966541 | ||
|
|
db1e69813b | ||
|
|
e60d56f060 | ||
|
|
328e062ae9 | ||
|
|
0523c2ea4b | ||
|
|
c5c7378c63 | ||
|
|
9b2080d036 | ||
|
|
d4b3649640 | ||
|
|
b085993901 | ||
|
|
0d4afad342 | ||
|
|
0da694b845 | ||
|
|
6d5e7d9e81 | ||
|
|
bc08bea284 | ||
|
|
0e5a0661e1 | ||
|
|
a839bd428f | ||
|
|
0277062693 | ||
|
|
7affa5ab69 | ||
|
|
ed22af4e73 | ||
|
|
63ebb6998e | ||
|
|
7914cd47ca | ||
|
|
708dbac70e | ||
|
|
1b62dd5c40 | ||
|
|
4911545843 | ||
|
|
c5cc4b7867 | ||
|
|
eacb614750 | ||
|
|
341e1e7a6d | ||
|
|
a02c820c2d | ||
|
|
2f6890c78a | ||
|
|
516591fe88 | ||
|
|
d2941a9110 | ||
|
|
f7302f710b | ||
|
|
6a02ac7e80 | ||
|
|
d1b86fdef5 | ||
|
|
57ac38ddca | ||
|
|
7a73a92074 | ||
|
|
d1b30f4792 | ||
|
|
16dcf78cab | ||
|
|
d868cfdeb0 | ||
|
|
c074f4d925 | ||
|
|
453024c58d | ||
|
|
fe8340617a | ||
|
|
b024dd913d | ||
|
|
a2a698ab0e | ||
|
|
bb56f92213 | ||
|
|
8dcd998945 | ||
|
|
bcbbbe4046 | ||
|
|
7200a8cb84 | ||
|
|
6925344807 | ||
|
|
60ceeb0ddd | ||
|
|
06caabf333 | ||
|
|
954131bd51 | ||
|
|
855efe7fe8 | ||
|
|
d902a74ab0 | ||
|
|
499e11f730 | ||
|
|
6db59a9c31 | ||
|
|
6465726008 | ||
|
|
3a3b96e0be | ||
|
|
992c91dc0c | ||
|
|
809473c15c | ||
|
|
d79a5ec3d6 | ||
|
|
237469ceaf | ||
|
|
c28d9135d9 | ||
|
|
48a5679087 | ||
|
|
7c938712f2 | ||
|
|
4df12bebc2 | ||
|
|
dfe8987aaa | ||
|
|
02dbe401d8 | ||
|
|
c18f8c92e7 | ||
|
|
857cd718df | ||
|
|
11d4f6499a | ||
|
|
f2c25b4744 | ||
|
|
27b846717f | ||
|
|
9ed138f896 | ||
|
|
1978dc80eb | ||
|
|
fc4b247f4f | ||
|
|
ebf7056f4a | ||
|
|
eb975d7e13 | ||
|
|
a2dd8cb6b9 | ||
|
|
7c254c6136 | ||
|
|
c8a33b83f1 | ||
|
|
1145c72b01 | ||
|
|
7fc45fb711 | ||
|
|
e146262c38 | ||
|
|
6f808bd06e | ||
|
|
0b6ab49325 | ||
|
|
66d9182e50 | ||
|
|
654cca82a9 | ||
|
|
89785da1c5 | ||
|
|
2f9964e46e | ||
|
|
168ecd67b0 | ||
|
|
bcbe740598 | ||
|
|
86c8929d77 | ||
|
|
6738a9433b | ||
|
|
23843ec86e | ||
|
|
f4db0da585 | ||
|
|
9ee3b796cd | ||
|
|
f57569f553 | ||
|
|
fffd0e8990 | ||
|
|
200e52bab5 | ||
|
|
a0ef649dd8 | ||
|
|
0dd01bda01 | ||
|
|
a707598042 | ||
|
|
8a3171308a | ||
|
|
29c887f30b | ||
|
|
661398d891 | ||
|
|
2cd722d751 | ||
|
|
49f5b4fa5c | ||
|
|
67baf465f4 | ||
|
|
ee7666ddea | ||
|
|
02fc41ff1c | ||
|
|
d07a9d2ef8 | ||
|
|
3622ebfabd | ||
|
|
70b320633f | ||
|
|
f30208f345 | ||
|
|
5bcc454678 | ||
|
|
473110568f | ||
|
|
88ca0f8196 | ||
|
|
a171005010 | ||
|
|
f56ad2fa58 | ||
|
|
c9dc441915 | ||
|
|
a0d255369a | ||
|
|
40b0a15b35 | ||
|
|
b98b06ff79 | ||
|
|
a448c9aebf | ||
|
|
b3f462a39d | ||
|
|
7ce34ca019 | ||
|
|
719bb53c3a | ||
|
|
214415969f | ||
|
|
7431b1f123 | ||
|
|
d8ffa843a9 | ||
|
|
a69db231cc | ||
|
|
c17f94422f | ||
|
|
b4777f7f4f | ||
|
|
a57d9a9303 | ||
|
|
5e70e1bcb2 | ||
|
|
0c43787996 | ||
|
|
dc310b99f9 | ||
|
|
e98c5e10bc | ||
|
|
f1b1090263 | ||
|
|
6efd6faa3f | ||
|
|
1e4d48d371 | ||
|
|
93a2adb3e6 | ||
|
|
a66d516777 | ||
|
|
7a97d42338 | ||
|
|
b66cdc8fa0 | ||
|
|
67f43b2aad | ||
|
|
d143e50238 | ||
|
|
e27439be6a | ||
|
|
2ad5ffbda2 | ||
|
|
dae9e662a5 | ||
|
|
f22737d6a4 | ||
|
|
a458d5a176 | ||
|
|
d92ed04538 | ||
|
|
80b3df8953 | ||
|
|
bcf83ec761 | ||
|
|
e44e72bce3 | ||
|
|
35f2781518 | ||
|
|
dc5512e403 | ||
|
|
48ef176e28 | ||
|
|
1aa2b86df3 | ||
|
|
73026047e9 | ||
|
|
6c2c33cac8 | ||
|
|
d593f7e04b | ||
|
|
6c599ef506 | ||
|
|
f48a0b7b7d | ||
|
|
d9f538170b | ||
|
|
1785ced655 | ||
|
|
e155e1fa86 | ||
|
|
e28fab0550 | ||
|
|
fb0dd2c1ca | ||
|
|
6e89e736b7 | ||
|
|
634b874c46 | ||
|
|
9d16364394 | ||
|
|
daeecef59e | ||
|
|
8131f0a752 | ||
|
|
f4ea1ad517 | ||
|
|
f34e8a0ff6 | ||
|
|
4209d61b13 | ||
|
|
fa83fba637 | ||
|
|
af86aee970 | ||
|
|
f26f1a526c | ||
|
|
7cb46d0761 | ||
|
|
0cb4070364 | ||
|
|
bc008c2597 | ||
|
|
a1d142d3a4 | ||
|
|
aa00dc1031 | ||
|
|
592c654916 | ||
|
|
5021b10535 | ||
|
|
43d6e64cfa | ||
|
|
8d21e5f3c1 | ||
|
|
fbe5df84c0 | ||
|
|
caff44c663 | ||
|
|
d6edef98c6 | ||
|
|
e0d2fab3c3 | ||
|
|
9867e918fa | ||
|
|
e6374ab425 | ||
|
|
e116bb9227 | ||
|
|
f1a1aa54d8 | ||
|
|
574f3c23d3 | ||
|
|
c31d6a6898 | ||
|
|
44a2a164c0 | ||
|
|
a7ca9950fc | ||
|
|
e0dd33e6be | ||
|
|
2e718e1130 | ||
|
|
ede9fcfb00 | ||
|
|
a3d43b77ca | ||
|
|
e2b32b4bb3 | ||
|
|
025c16c95d | ||
|
|
000eff73cc | ||
|
|
254efdde79 | ||
|
|
f0d4e76418 | ||
|
|
ba7101ff92 | ||
|
|
a2457df45e | ||
|
|
305540f0fd | ||
|
|
c2928d8a57 | ||
|
|
7451244cd2 | ||
|
|
d935b5764a | ||
|
|
f3af76e38c | ||
|
|
a7631223a3 | ||
|
|
8aae4f0ed0 | ||
|
|
542049f252 | ||
|
|
9f3394dc6d | ||
|
|
06f5dc6ad7 | ||
|
|
dc3b09c218 | ||
|
|
ad15781d8f | ||
|
|
ea53612822 | ||
|
|
c3a065dd33 | ||
|
|
5cb2812231 | ||
|
|
f8904a5504 | ||
|
|
eb1df23e68 | ||
|
|
e5648a4af9 | ||
|
|
a246154961 | ||
|
|
ce44843e27 | ||
|
|
1a54dad643 | ||
|
|
940dfff625 | ||
|
|
c2b15183cb | ||
|
|
27e8aa9c68 | ||
|
|
e1d8c6516a | ||
|
|
eba81e368b | ||
|
|
74a3fd7596 | ||
|
|
eeb5a83e98 | ||
|
|
d47134bbf1 | ||
|
|
ee725354db | ||
|
|
985bfd22de | ||
|
|
0d35e3a3e9 | ||
|
|
d94a191656 | ||
|
|
0eafa4acd8 | ||
|
|
f27a53653b | ||
|
|
3b60adc8da | ||
|
|
626a3369b5 | ||
|
|
4244e7569b | ||
|
|
ef4b32aca7 | ||
|
|
dcd23a0b4d | ||
|
|
5447c6e947 | ||
|
|
f1b97fbc8b | ||
|
|
4c8dfc3fc2 | ||
|
|
ceece5a7e2 | ||
|
|
7e6b035ca2 | ||
|
|
fbc46a52af | ||
|
|
8d2e7b4372 | ||
|
|
e7da9144f5 | ||
|
|
2128e169f3 | ||
|
|
8410d64daa | ||
|
|
b2f78fadd9 | ||
|
|
3656323f25 | ||
|
|
2fe1c20475 | ||
|
|
0fb976a80a | ||
|
|
3cf62de753 | ||
|
|
06119b306d | ||
|
|
0493bbbc76 | ||
|
|
4c9e90732e | ||
|
|
35f084ba76 | ||
|
|
f28f336026 | ||
|
|
122d75f677 | ||
|
|
12f6a3f5a3 | ||
|
|
5d44e1d6ca | ||
|
|
04592c876b | ||
|
|
c0571beec8 | ||
|
|
1302316eb0 | ||
|
|
18d8008b89 | ||
|
|
4670f09a67 | ||
|
|
159ef12ed7 | ||
|
|
7a760f5640 | ||
|
|
2b6c42a56c | ||
|
|
ab4ff99105 | ||
|
|
774895ec8c | ||
|
|
c5ce96c391 | ||
|
|
b4a98a4000 | ||
|
|
5f0d86f509 | ||
|
|
c96a1b00cf | ||
|
|
1eb6436682 | ||
|
|
a84e1f17bb | ||
|
|
3ffc9dffc2 | ||
|
|
048c84ab95 | ||
|
|
a7470360d2 | ||
|
|
50f1ca91d4 | ||
|
|
0d37e1cd98 | ||
|
|
9aa77bb3c9 | ||
|
|
fd11244966 | ||
|
|
d060da094f | ||
|
|
306f9c5ffd | ||
|
|
5ef5611682 | ||
|
|
ebdd2d730c | ||
|
|
1ddf8b3159 | ||
|
|
a6bc870815 | ||
|
|
56cd73823e | ||
|
|
6299015039 | ||
|
|
11b7cfb5ff | ||
|
|
367f49ce1c | ||
|
|
8165131419 | ||
|
|
e402157b4d | ||
|
|
967da7944f | ||
|
|
89f1c21f20 | ||
|
|
7e706190a5 | ||
|
|
36a3770673 | ||
|
|
bc92f78afb | ||
|
|
f7e22d2b8b | ||
|
|
0b1e11ba1f | ||
|
|
10e0b1daec | ||
|
|
731d8fc6be | ||
|
|
f6d0b53ae5 | ||
|
|
0efb90deb6 | ||
|
|
b16eabd2b6 | ||
|
|
f8350409ad | ||
|
|
5b498bd8d6 | ||
|
|
941042d0ba | ||
|
|
9251ce312b | ||
|
|
96a964a183 | ||
|
|
9e513e08ae | ||
|
|
9dfee83e68 | ||
|
|
7cde979736 | ||
|
|
870ff1d4d9 | ||
|
|
52c162a478 | ||
|
|
ddd11c7ed2 | ||
|
|
2c119dea47 | ||
|
|
ebd1561682 | ||
|
|
3ccc495c75 | ||
|
|
0eda7a5a3c | ||
|
|
f2c16452c6 | ||
|
|
a2c429a4a5 | ||
|
|
4a71c5b424 | ||
|
|
268dd80cd0 | ||
|
|
3002e79c98 | ||
|
|
5eab348e82 | ||
|
|
1cdbade761 | ||
|
|
8c9afbd278 | ||
|
|
cd73654683 | ||
|
|
9654fe0d8d | ||
|
|
3d49c33c6a | ||
|
|
e58b3390aa | ||
|
|
92a1f5736b | ||
|
|
00a57f6cea | ||
|
|
1c345edc49 | ||
|
|
7aa1f47378 | ||
|
|
473d5ead7b | ||
|
|
68f760b563 | ||
|
|
9c1cd81adb | ||
|
|
85b81fb12a | ||
|
|
5d7444c115 | ||
|
|
b0c1ec04b5 | ||
|
|
5cfd8909a8 | ||
|
|
6e2d2f33de | ||
|
|
5e65d27832 | ||
|
|
36993097b4 | ||
|
|
2447349383 | ||
|
|
7765f272ac | ||
|
|
13d8dfdb5f | ||
|
|
5e94637adc | ||
|
|
ac6e793bbe | ||
|
|
d0d9c3ea26 | ||
|
|
f7bc58a767 | ||
|
|
bafdf0381a | ||
|
|
3fc5dc8523 | ||
|
|
df4dc3492c | ||
|
|
10731b0fd8 | ||
|
|
cb9166aba4 | ||
|
|
fe62c3aacb | ||
|
|
c60ea40828 | ||
|
|
c59ea26845 | ||
|
|
9bd8b3e9a5 | ||
|
|
5271f3b4a0 | ||
|
|
8a7b619b77 | ||
|
|
88f96b0838 | ||
|
|
1e1e48732a | ||
|
|
3537897fc5 | ||
|
|
3653981416 | ||
|
|
94d1e566c0 | ||
|
|
a692316293 | ||
|
|
e2f3406e89 | ||
|
|
81c7007f80 | ||
|
|
e4f38b5665 | ||
|
|
14b6c471cf | ||
|
|
0d0befe23e | ||
|
|
efad628a87 | ||
|
|
c16e6d74e6 | ||
|
|
80db9e7716 | ||
|
|
7cf2a3e978 | ||
|
|
681b74a41c | ||
|
|
d39d10b9fb | ||
|
|
dff44ef74e | ||
|
|
485047f20b | ||
|
|
6affbbe865 | ||
|
|
e3600ef4de | ||
|
|
f0eaec98c7 | ||
|
|
6dcd7006d0 | ||
|
|
5de4812477 | ||
|
|
d5b28356bc | ||
|
|
76fddd0db0 | ||
|
|
1108586303 | ||
|
|
3f49923298 | ||
|
|
c277be8b6b | ||
|
|
6e083fa6a1 | ||
|
|
073091a06e | ||
|
|
03bfd01862 | ||
|
|
539f01d08e | ||
|
|
dcf3c86dce | ||
|
|
ec639cd6e9 | ||
|
|
420376d036 | ||
|
|
51e50bf0a9 | ||
|
|
c2d77f51bb | ||
|
|
b4d87d9128 | ||
|
|
4401a309ee | ||
|
|
b562e209d1 | ||
|
|
3a85422e8f | ||
|
|
e45397c975 | ||
|
|
1f9ec0c888 | ||
|
|
f8ee470e70 | ||
|
|
d02de0798f | ||
|
|
6fe074fb13 | ||
|
|
4db339c5f4 | ||
|
|
a525764359 | ||
|
|
f970d5878a | ||
|
|
cc0a2cbc6f | ||
|
|
add0b463f5 | ||
|
|
d80b1a7749 | ||
|
|
6186691259 | ||
|
|
b451cc567d | ||
|
|
757ff31661 | ||
|
|
97a98f0045 | ||
|
|
8f05896bc9 | ||
|
|
da7a8939df | ||
|
|
b6977a88ea | ||
|
|
eafbc7f20d | ||
|
|
d92f992c01 | ||
|
|
20a5d9051d | ||
|
|
c9a5710554 | ||
|
|
f10e946896 | ||
|
|
2f19b22bb2 | ||
|
|
d134e11c6d | ||
|
|
63edd16a92 | ||
|
|
37740dc010 | ||
|
|
04b85ddbf2 | ||
|
|
836dc96f67 | ||
|
|
49a7542b14 | ||
|
|
a84ffce5a0 | ||
|
|
210b3e5192 | ||
|
|
5f1d5ea056 | ||
|
|
19a7372ff9 | ||
|
|
cc5b60b004 | ||
|
|
b06f9dbf8d | ||
|
|
d9b8ee7895 | ||
|
|
e9ff655b0e | ||
|
|
d58341d7ae | ||
|
|
669d21a114 | ||
|
|
7e980a16d0 | ||
|
|
47df8deb58 | ||
|
|
dd006a502e | ||
|
|
782d48594a | ||
|
|
07d3e52e6a | ||
|
|
fc1ce6d39b | ||
|
|
32d5c0c946 | ||
|
|
dfabfce01b | ||
|
|
74f3f4eb15 | ||
|
|
20cb0285f0 | ||
|
|
faf840f924 | ||
|
|
165bea5bb9 | ||
|
|
f7515cfca8 | ||
|
|
a762a10dec | ||
|
|
a192029901 | ||
|
|
67182713d9 | ||
|
|
e9464e32db | ||
|
|
2d6ae16912 | ||
|
|
f9cd8b1841 | ||
|
|
41a698b442 | ||
|
|
9f58bc9207 | ||
|
|
d36f6e7f24 | ||
|
|
eeb672feb9 | ||
|
|
063a162ce0 | ||
|
|
3e4a900279 | ||
|
|
43327ea4e1 | ||
|
|
0d2e84b16b | ||
|
|
3c78757778 | ||
|
|
d0245bb5ba | ||
|
|
3477b0107a | ||
|
|
8df9ff90cb | ||
|
|
d6b4ca7a98 | ||
|
|
2e18199eb2 | ||
|
|
025e17701b | ||
|
|
156ca44a13 | ||
|
|
39dac7d4db | ||
|
|
9ca632d518 | ||
|
|
4177fc6df2 | ||
|
|
d90890c08e | ||
|
|
1ca098c402 | ||
|
|
3208a7f15d | ||
|
|
8eda52e8e0 | ||
|
|
5b161b7445 | ||
|
|
8c1f8e54cd | ||
|
|
03d3c26a99 | ||
|
|
0cbd3663e4 | ||
|
|
f182daa85e | ||
|
|
de2f774e85 | ||
|
|
9d9a4afee9 | ||
|
|
0ea363c7fc | ||
|
|
d7ee47ee25 | ||
|
|
eb1b6e34c7 | ||
|
|
621b2b3f72 | ||
|
|
83da08ef7d | ||
|
|
9f551121fb | ||
|
|
ba48dfb4bf | ||
|
|
ed2ea24b75 | ||
|
|
eefbd3f597 | ||
|
|
e38bf63be0 | ||
|
|
e7ba5eb160 | ||
|
|
fff27f9b87 | ||
|
|
d58f594c17 | ||
|
|
9797d7a7fb | ||
|
|
c8b65317ef | ||
|
|
3a6dc77d36 | ||
|
|
4f70c27b56 | ||
|
|
ea46edf50a | ||
|
|
e5e88d792e | ||
|
|
6d68ad735c | ||
|
|
c44b98a7e1 | ||
|
|
445f9453c4 | ||
|
|
3364e040c8 | ||
|
|
692f00864d | ||
|
|
344dc64df8 | ||
|
|
473425a36a | ||
|
|
3ba58ebaae | ||
|
|
2c7b12c022 | ||
|
|
17eeeb7536 | ||
|
|
de5fbfde2c | ||
|
|
f5d02e1b10 | ||
|
|
e508625935 | ||
|
|
0b177ec4c1 | ||
|
|
87c965edd3 | ||
|
|
72dd9daa23 | ||
|
|
a68529fba8 | ||
|
|
06681a453f | ||
|
|
5907dde4a8 | ||
|
|
8e038dd563 | ||
|
|
50905ab459 | ||
|
|
7bb9c7d47f | ||
|
|
5c45eee817 | ||
|
|
0f9e4ef352 | ||
|
|
85173d188b | ||
|
|
d9ed33d1b1 | ||
|
|
e6ac8cab53 | ||
|
|
f890ebd0f4 | ||
|
|
e537369d98 | ||
|
|
9bbd8dbe62 | ||
|
|
09a5f5c8f3 | ||
|
|
b9e0f52526 | ||
|
|
1cdf71b647 | ||
|
|
3aff461039 | ||
|
|
bf74d7537c | ||
|
|
0c2fb6807e | ||
|
|
b9c9d127a2 | ||
|
|
286beca6c5 | ||
|
|
3a1521a34e | ||
|
|
c5b047d0cd | ||
|
|
485b811bd0 | ||
|
|
f335591045 | ||
|
|
1c10f3020b | ||
|
|
3074dad293 | ||
|
|
42f506adc6 | ||
|
|
50b755db0c | ||
|
|
420c3e0073 | ||
|
|
4a57fc33e4 | ||
|
|
25cdf16cc0 | ||
|
|
7f732459a1 | ||
|
|
9cc02d4dbe | ||
|
|
c528ac09d6 | ||
|
|
1a131ff120 | ||
|
|
accdd82970 | ||
|
|
3e8f02c64b | ||
|
|
3425264077 | ||
|
|
148f8b8a3a | ||
|
|
74343841e4 | ||
|
|
3b3738b36b | ||
|
|
b15c3f6a3f | ||
|
|
2459f9b0aa | ||
|
|
6ff1bd9b3c | ||
|
|
1bc2d2ec37 | ||
|
|
d7fd6a4628 | ||
|
|
9236f365fa | ||
|
|
90d22c2a28 | ||
|
|
c9f6e6b62a | ||
|
|
260d9377f5 | ||
|
|
22d1ce6319 | ||
|
|
6997e02476 | ||
|
|
155d79ff4d | ||
|
|
452cd125fa | ||
|
|
e62c35b040 | ||
|
|
d5ec3c6a31 | ||
|
|
ad983dc279 | ||
|
|
bb15bf8d13 | ||
|
|
94adc207ad | ||
|
|
376d1c97ab | ||
|
|
4fe87b40da | ||
|
|
b10d76cf4b | ||
|
|
3bdc9a2f09 | ||
|
|
9d52e18659 | ||
|
|
f6f7c12f0e | ||
|
|
219b28c97b | ||
|
|
3598fe0fb4 | ||
|
|
f9dd051ec9 | ||
|
|
68e4a27aaf | ||
|
|
b849c719a8 | ||
|
|
59e7617e82 | ||
|
|
b5e868655e | ||
|
|
027b3d36de | ||
|
|
653c4259ee | ||
|
|
9f5ab8149f | ||
|
|
66c6d14f7a | ||
|
|
2c0fc142a3 | ||
|
|
003454573c | ||
|
|
aa5a9ff1f4 | ||
|
|
28ef54986d | ||
|
|
0da2dfd191 | ||
|
|
787fc1cd8b | ||
|
|
dfdc0d92c3 | ||
|
|
f265915aa2 | ||
|
|
4228d06934 | ||
|
|
1a93b9b226 | ||
|
|
363e50abbe | ||
|
|
b8d53a6f0d | ||
|
|
4b45c0cd14 | ||
|
|
e7c0da38c2 | ||
|
|
8706fbe461 | ||
|
|
9ca96e4e17 | ||
|
|
99fe1da345 | ||
|
|
1986e82783 | ||
|
|
7073b9d395 | ||
|
|
f2049e9c18 | ||
|
|
f0f1308465 | ||
|
|
7d90aa76ff | ||
|
|
3cc2c617fd | ||
|
|
c31488add9 | ||
|
|
3d5b6ae332 | ||
|
|
59826c8cfd | ||
|
|
6f29d12386 | ||
|
|
0a89899ad0 | ||
|
|
e4af0e361a | ||
|
|
31ec7907b5 | ||
|
|
12f3f8c694 | ||
|
|
79098e997e | ||
|
|
dc1849bad5 | ||
|
|
e2d826c412 | ||
|
|
e6d796832e | ||
|
|
6f0a6df4f6 | ||
|
|
7a877a00d5 | ||
|
|
e8604d100e | ||
|
|
1647441ce8 | ||
|
|
9f8d6b3a00 | ||
|
|
0bfc96e459 | ||
|
|
3425574ddc | ||
|
|
4b2ad25405 | ||
|
|
3ce163b1a0 | ||
|
|
7c1ee28f13 | ||
|
|
2645e43da1 | ||
|
|
59bfe551a3 | ||
|
|
6a31736644 | ||
|
|
e2c78047b1 | ||
|
|
6a4351e44f | ||
|
|
adb60ef1ac | ||
|
|
3090adac04 | ||
|
|
b9253d86cc | ||
|
|
ab4d4e6230 | ||
|
|
7cd38c56c6 | ||
|
|
864053615b | ||
|
|
db2366f112 | ||
|
|
4defc82192 | ||
|
|
5949970a95 | ||
|
|
0ea4abda81 | ||
|
|
5c6035d636 | ||
|
|
a2183e3dcc | ||
|
|
99637151b5 | ||
|
|
a8e787c120 | ||
|
|
53339c7c72 | ||
|
|
3534bf7d70 | ||
|
|
1cf3989664 | ||
|
|
fd296918da | ||
|
|
8ad1f03dc5 | ||
|
|
fe7e17dbd5 | ||
|
|
d582394a42 | ||
|
|
02ef0df019 | ||
|
|
0dfd6aa518 | ||
|
|
0b23bc9cf2 | ||
|
|
f108c4288e | ||
|
|
9b9696aefd | ||
|
|
576e198ece | ||
|
|
52f85aab18 | ||
|
|
ab60fd0490 | ||
|
|
d79ae30f31 | ||
|
|
f27debe7f9 | ||
|
|
735e043ff6 | ||
|
|
6e7f2b73cf | ||
|
|
d645ce9745 | ||
|
|
7c08c140da | ||
|
|
81d402dc17 | ||
|
|
966fa12358 | ||
|
|
87792e1921 | ||
|
|
4c8296acc6 | ||
|
|
9989da07ed | ||
|
|
1c5e6a3425 | ||
|
|
eedf908770 | ||
|
|
5c9ef41403 | ||
|
|
0bf2ad5b67 | ||
|
|
a0e3f382cd | ||
|
|
f09c39b5d7 | ||
|
|
89c67bf259 | ||
|
|
ea666d4607 | ||
|
|
b8af154439 | ||
|
|
f594ece32a | ||
|
|
03beb6852a | ||
|
|
ab9e9a3329 | ||
|
|
a4b09344af | ||
|
|
8cb8aa392c | ||
|
|
3255519792 | ||
|
|
7e64bb2503 | ||
|
|
86a78402c3 | ||
|
|
ba276452fb | ||
|
|
4ffa8d0124 | ||
|
|
4bc5082681 | ||
|
|
0e3c34e1da | ||
|
|
658b3784ae | ||
|
|
0526f577ff | ||
|
|
bb1b9bc1d3 | ||
|
|
b1eeb77ddc | ||
|
|
999d4a7676 | ||
|
|
1b80193aac | ||
|
|
be8d39a48c | ||
|
|
a2f3d70f28 | ||
|
|
676a7bf712 | ||
|
|
e990a6c70c | ||
|
|
90fa0f6c4a | ||
|
|
22010d7d95 | ||
|
|
66279bd90f | ||
|
|
19da228855 | ||
|
|
9e67941bad | ||
|
|
0454fc74e9 | ||
|
|
2f6b1c7611 | ||
|
|
f00bed6058 | ||
|
|
529c522594 | ||
|
|
2bb9493fcf | ||
|
|
839ed8a64a | ||
|
|
500eb920e4 | ||
|
|
017a31ffd0 | ||
|
|
83b961c84d | ||
|
|
fa07423ca5 | ||
|
|
dd4af2df81 | ||
|
|
44bd8cb85b | ||
|
|
52d80ac23c | ||
|
|
43a5d73e14 | ||
|
|
abc764951d | ||
|
|
9cc6164026 | ||
|
|
475488b9f2 | ||
|
|
95b1783834 | ||
|
|
12c8b5c0b9 | ||
|
|
f99b7a811b | ||
|
|
0575abab23 | ||
|
|
9eebcf7beb | ||
|
|
ed74477150 | ||
|
|
2801b38c75 | ||
|
|
dc3fea875e | ||
|
|
aab8c2b687 | ||
|
|
3577773af3 | ||
|
|
dd023edc0f | ||
|
|
8ac9e6dc19 | ||
|
|
f45d4d781d | ||
|
|
c95652d6a8 | ||
|
|
97b37f75d3 | ||
|
|
95dae48778 | ||
|
|
73635033bd | ||
|
|
c1619d2a62 | ||
|
|
b87ef982f6 | ||
|
|
91aa90ad4a | ||
|
|
4b3cea9e78 | ||
|
|
2420b5e937 | ||
|
|
f23a976bea | ||
|
|
4226cd08f1 | ||
|
|
7a230f1693 | ||
|
|
a43d0d4612 | ||
|
|
78a40a0c70 | ||
|
|
2c69d8f0b0 | ||
|
|
0018c38b83 | ||
|
|
8df81571fc | ||
|
|
d1add62a06 | ||
|
|
c419f3379a | ||
|
|
69d57209f7 | ||
|
|
7ca81d6fb8 | ||
|
|
8a046bfa5d | ||
|
|
3628a7653c | ||
|
|
48f988acd7 | ||
|
|
6526923345 | ||
|
|
24fd1acce6 | ||
|
|
cbb9235dc5 | ||
|
|
19ec2c9bc9 | ||
|
|
6459d4c0b6 | ||
|
|
1304f2721f | ||
|
|
8bde0c0e53 | ||
|
|
598ffd3e5c | ||
|
|
1a4533a9cf | ||
|
|
601f0eb168 | ||
|
|
3070e0bf5d | ||
|
|
83c11a9834 | ||
|
|
5c912b930e | ||
|
|
1b17fb0ae7 | ||
|
|
d83e67c121 | ||
|
|
ae39ed94c9 | ||
|
|
1e51180d42 | ||
|
|
87ba69d02e | ||
|
|
8879d5560b | ||
|
|
c1621ee39c | ||
|
|
b0aa98edb4 | ||
|
|
a7a2fe0216 | ||
|
|
8e50f5fa3c | ||
|
|
31793520bf | ||
|
|
0b6b0368c5 | ||
|
|
d1d30a9280 | ||
|
|
420c6f2d1e | ||
|
|
34f06c4971 | ||
|
|
9cc4bbd49d | ||
|
|
f66b312869 | ||
|
|
2405ba8708 | ||
|
|
a91b6bff8b | ||
|
|
450dc11a68 | ||
|
|
1ce2f84ce5 | ||
|
|
f55b241cfa | ||
|
|
34d08ce8ef | ||
|
|
4f5aa8c43b | ||
|
|
27b375060d | ||
|
|
cbfdc401f7 | ||
|
|
b58bf3e0ce | ||
|
|
1fff7e9aca | ||
|
|
494b981b13 | ||
|
|
dd93995bd0 | ||
|
|
b3bb4add9c | ||
|
|
d305e71c27 | ||
|
|
0d92baa670 | ||
|
|
7a1b110f62 | ||
|
|
db8df057ce | ||
|
|
5d8ffded40 | ||
|
|
07f3e5356d | ||
|
|
1ece62f960 | ||
|
|
056c604dc3 | ||
|
|
2d08eec093 | ||
|
|
614b590551 | ||
|
|
6d90ce250a | ||
|
|
ea31846a19 | ||
|
|
e6317776c1 | ||
|
|
efeaba39a4 | ||
|
|
1a97dfd479 | ||
|
|
9fecf2b303 | ||
|
|
3d0d2f48ad | ||
|
|
581605e0e2 | ||
|
|
45d3a7f6ff | ||
|
|
7ca2ea0766 | ||
|
|
89220c142b | ||
|
|
c73ce3d220 | ||
|
|
b0f127af4e | ||
|
|
766d54795f | ||
|
|
bd41c6eea4 | ||
|
|
2435786713 | ||
|
|
9e7ea64bd2 | ||
|
|
89a6eee6af | ||
|
|
2ec1476e50 | ||
|
|
2d9b581f34 | ||
|
|
5bb63f645b | ||
|
|
a856c7cc37 | ||
|
|
26db9d8a9d | ||
|
|
8060179f6d | ||
|
|
77ebd87fed | ||
|
|
e4bc92235d | ||
|
|
27a4d83ce8 | ||
|
|
ece9b902f8 | ||
|
|
65a2f8a68b | ||
|
|
9c212306b8 | ||
|
|
1fdc7ce6bb | ||
|
|
0b22c140c5 | ||
|
|
944aa45459 | ||
|
|
c9842ba13a | ||
|
|
8840680303 | ||
|
|
376b9b1316 | ||
|
|
54bb1cb3d9 | ||
|
|
43468b474e | ||
|
|
28a957c684 | ||
|
|
ec5ddbf391 | ||
|
|
bab186e195 | ||
|
|
bc7e874476 | ||
|
|
97114b5948 | ||
|
|
45e015d71d | ||
|
|
0ff6531953 | ||
|
|
ba298c3cfc | ||
|
|
0479bea40b | ||
|
|
a536097804 | ||
|
|
bbefd0fdf9 | ||
|
|
2aa8b04c21 | ||
|
|
aeebdfec51 | ||
|
|
debfcdf498 | ||
|
|
5c4b33e8e6 | ||
|
|
eb54037b66 | ||
|
|
f48af8db3b | ||
|
|
97c5b957dd | ||
|
|
95e7397803 | ||
|
|
43a989978a | ||
|
|
27734a7c26 | ||
|
|
dd786d6fc4 | ||
|
|
be1c28fc45 | ||
|
|
20e41b3523 | ||
|
|
e07ecc5cf8 | ||
|
|
3360b72531 |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -13,4 +13,8 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
tests/bugfix.py
|
tests/test_bugfix.py
|
||||||
|
htmlcov/
|
||||||
|
venv
|
||||||
|
venv3
|
||||||
|
scratchpad
|
||||||
|
|||||||
23
.install_mongodb_on_travis.sh
Normal file
23
.install_mongodb_on_travis.sh
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||||
|
|
||||||
|
if [ "$MONGODB" = "2.4" ]; then
|
||||||
|
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install mongodb-10gen=2.4.14
|
||||||
|
sudo service mongodb start
|
||||||
|
elif [ "$MONGODB" = "2.6" ]; then
|
||||||
|
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install mongodb-org-server=2.6.12
|
||||||
|
# service should be started automatically
|
||||||
|
elif [ "$MONGODB" = "3.0" ]; then
|
||||||
|
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install mongodb-org-server=3.0.14
|
||||||
|
# service should be started automatically
|
||||||
|
else
|
||||||
|
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||||
|
exit 1
|
||||||
|
fi;
|
||||||
22
.landscape.yml
Normal file
22
.landscape.yml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
pylint:
|
||||||
|
disable:
|
||||||
|
# We use this a lot (e.g. via document._meta)
|
||||||
|
- protected-access
|
||||||
|
|
||||||
|
options:
|
||||||
|
additional-builtins:
|
||||||
|
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||||
|
# translated into range and long is translated into int via 2to3 (see
|
||||||
|
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||||
|
# 2 support (which probably won't happen any time soon).
|
||||||
|
- xrange
|
||||||
|
- long
|
||||||
|
|
||||||
|
pyflakes:
|
||||||
|
disable:
|
||||||
|
# undefined variables are already covered by pylint (and exclude
|
||||||
|
# xrange & long)
|
||||||
|
- F821
|
||||||
|
|
||||||
|
ignore-paths:
|
||||||
|
- benchmark.py
|
||||||
101
.travis.yml
Normal file
101
.travis.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||||
|
# PyMongo combinations. However, that would result in an overly long build
|
||||||
|
# with a very large number of jobs, hence we only test a subset of all the
|
||||||
|
# combinations:
|
||||||
|
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
|
||||||
|
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
|
||||||
|
# * MongoDB v3.0 is tested against PyMongo v3.x.
|
||||||
|
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
|
||||||
|
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
|
||||||
|
#
|
||||||
|
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||||
|
|
||||||
|
language: python
|
||||||
|
|
||||||
|
python:
|
||||||
|
- 2.7
|
||||||
|
- 3.5
|
||||||
|
- pypy
|
||||||
|
- pypy3
|
||||||
|
|
||||||
|
env:
|
||||||
|
- MONGODB=2.6 PYMONGO=2.7
|
||||||
|
- MONGODB=2.6 PYMONGO=2.8
|
||||||
|
- MONGODB=2.6 PYMONGO=3.0
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
# Finish the build as soon as one job fails
|
||||||
|
fast_finish: true
|
||||||
|
|
||||||
|
include:
|
||||||
|
- python: 2.7
|
||||||
|
env: MONGODB=2.4 PYMONGO=2.7
|
||||||
|
- python: 2.7
|
||||||
|
env: MONGODB=2.4 PYMONGO=3.0
|
||||||
|
- python: 2.7
|
||||||
|
env: MONGODB=3.0 PYMONGO=3.0
|
||||||
|
- python: 3.5
|
||||||
|
env: MONGODB=2.4 PYMONGO=2.7
|
||||||
|
- python: 3.5
|
||||||
|
env: MONGODB=2.4 PYMONGO=3.0
|
||||||
|
- python: 3.5
|
||||||
|
env: MONGODB=3.0 PYMONGO=3.0
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- bash .install_mongodb_on_travis.sh
|
||||||
|
|
||||||
|
install:
|
||||||
|
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||||
|
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||||
|
python-tk
|
||||||
|
- travis_retry pip install --upgrade pip
|
||||||
|
- travis_retry pip install coveralls
|
||||||
|
- travis_retry pip install flake8 flake8-import-order
|
||||||
|
- travis_retry pip install tox>=1.9
|
||||||
|
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||||
|
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||||
|
|
||||||
|
# Cache dependencies installed via pip
|
||||||
|
cache: pip
|
||||||
|
|
||||||
|
# Run flake8 for py27
|
||||||
|
before_script:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
|
||||||
|
|
||||||
|
script:
|
||||||
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||||
|
|
||||||
|
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||||
|
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||||
|
# code in a separate dir and runs tests on that.
|
||||||
|
after_success:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
irc: irc.freenode.org#mongoengine
|
||||||
|
|
||||||
|
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- /^v.*$/
|
||||||
|
|
||||||
|
# Whenever a new release is created via GitHub, publish it on PyPI.
|
||||||
|
deploy:
|
||||||
|
provider: pypi
|
||||||
|
user: the_drow
|
||||||
|
password:
|
||||||
|
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||||
|
|
||||||
|
# create a source distribution and a pure python wheel for faster installs
|
||||||
|
distributions: "sdist bdist_wheel"
|
||||||
|
|
||||||
|
# only deploy on tagged commits (aka GitHub releases) and only for the
|
||||||
|
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
|
||||||
|
# Travis against many different Python and PyMongo versions and we don't
|
||||||
|
# want the deploy to occur multiple times).
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
repo: MongoEngine/mongoengine
|
||||||
|
condition: "$PYMONGO = 3.0"
|
||||||
|
python: 2.7
|
||||||
150
AUTHORS
150
AUTHORS
@@ -8,15 +8,14 @@ Florian Schlachter <flori@n-schlachter.de>
|
|||||||
Steve Challis <steve@stevechallis.com>
|
Steve Challis <steve@stevechallis.com>
|
||||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||||
Dan Crosta https://github.com/dcrosta
|
Dan Crosta https://github.com/dcrosta
|
||||||
|
Laine Herron https://github.com/LaineHerron
|
||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
Derived from the git logs, inevitably incomplete but all of whom and others
|
||||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
that much better:
|
that much better:
|
||||||
|
|
||||||
* Harry Marr
|
|
||||||
* Ross Lawley
|
|
||||||
* blackbrrr
|
* blackbrrr
|
||||||
* Florian Schlachter
|
* Florian Schlachter
|
||||||
* Vincent Driessen
|
* Vincent Driessen
|
||||||
@@ -24,7 +23,7 @@ that much better:
|
|||||||
* flosch
|
* flosch
|
||||||
* Deepak Thukral
|
* Deepak Thukral
|
||||||
* Colin Howe
|
* Colin Howe
|
||||||
* Wilson Júnior
|
* Wilson Júnior (https://github.com/wpjunior)
|
||||||
* Alistair Roche
|
* Alistair Roche
|
||||||
* Dan Crosta
|
* Dan Crosta
|
||||||
* Viktor Kerkez
|
* Viktor Kerkez
|
||||||
@@ -76,7 +75,7 @@ that much better:
|
|||||||
* Adam Parrish
|
* Adam Parrish
|
||||||
* jpfarias
|
* jpfarias
|
||||||
* jonrscott
|
* jonrscott
|
||||||
* Alice Zoë Bevan-McGregor
|
* Alice Zoë Bevan-McGregor (https://github.com/amcgregor/)
|
||||||
* Stephen Young
|
* Stephen Young
|
||||||
* tkloc
|
* tkloc
|
||||||
* aid
|
* aid
|
||||||
@@ -103,3 +102,144 @@ that much better:
|
|||||||
* Greg Banks
|
* Greg Banks
|
||||||
* swashbuckler
|
* swashbuckler
|
||||||
* Adam Reeve
|
* Adam Reeve
|
||||||
|
* Anthony Nemitz
|
||||||
|
* deignacio
|
||||||
|
* Shaun Duncan
|
||||||
|
* Meir Kriheli
|
||||||
|
* Andrey Fedoseev
|
||||||
|
* aparajita
|
||||||
|
* Tristan Escalada
|
||||||
|
* Alexander Koshelev
|
||||||
|
* Jaime Irurzun
|
||||||
|
* Alexandre González
|
||||||
|
* Thomas Steinacher
|
||||||
|
* Tommi Komulainen
|
||||||
|
* Peter Landry
|
||||||
|
* biszkoptwielki
|
||||||
|
* Anton Kolechkin
|
||||||
|
* Sergey Nikitin
|
||||||
|
* psychogenic
|
||||||
|
* Stefan Wójcik (https://github.com/wojcikstefan)
|
||||||
|
* dimonb
|
||||||
|
* Garry Polley
|
||||||
|
* James Slagle
|
||||||
|
* Adrian Scott
|
||||||
|
* Peter Teichman
|
||||||
|
* Jakub Kot
|
||||||
|
* Jorge Bastida
|
||||||
|
* Aleksandr Sorokoumov
|
||||||
|
* Yohan Graterol
|
||||||
|
* bool-dev
|
||||||
|
* Russ Weeks
|
||||||
|
* Paul Swartz
|
||||||
|
* Sundar Raman
|
||||||
|
* Benoit Louy
|
||||||
|
* Loic Raucy (https://github.com/lraucy)
|
||||||
|
* hellysmile
|
||||||
|
* Jaepil Jeong
|
||||||
|
* Daniil Sharou
|
||||||
|
* Pete Campton
|
||||||
|
* Martyn Smith
|
||||||
|
* Marcelo Anton
|
||||||
|
* Aleksey Porfirov (https://github.com/lexqt)
|
||||||
|
* Nicolas Trippar
|
||||||
|
* Manuel Hermann
|
||||||
|
* Gustavo Gawryszewski
|
||||||
|
* Max Countryman
|
||||||
|
* caitifbrito
|
||||||
|
* lcya86 刘春洋
|
||||||
|
* Martin Alderete (https://github.com/malderete)
|
||||||
|
* Nick Joyce
|
||||||
|
* Jared Forsyth
|
||||||
|
* Kenneth Falck
|
||||||
|
* Lukasz Balcerzak
|
||||||
|
* Nicolas Cortot
|
||||||
|
* Alex (https://github.com/kelsta)
|
||||||
|
* Jin Zhang
|
||||||
|
* Daniel Axtens
|
||||||
|
* Leo-Naeka
|
||||||
|
* Ryan Witt (https://github.com/ryanwitt)
|
||||||
|
* Jiequan (https://github.com/Jiequan)
|
||||||
|
* hensom (https://github.com/hensom)
|
||||||
|
* zhy0216 (https://github.com/zhy0216)
|
||||||
|
* istinspring (https://github.com/istinspring)
|
||||||
|
* Massimo Santini (https://github.com/mapio)
|
||||||
|
* Nigel McNie (https://github.com/nigelmcnie)
|
||||||
|
* ygbourhis (https://github.com/ygbourhis)
|
||||||
|
* Bob Dickinson (https://github.com/BobDickinson)
|
||||||
|
* Michael Bartnett (https://github.com/michaelbartnett)
|
||||||
|
* Alon Horev (https://github.com/alonho)
|
||||||
|
* Kelvin Hammond (https://github.com/kelvinhammond)
|
||||||
|
* Jatin Chopra (https://github.com/jatin)
|
||||||
|
* Paul Uithol (https://github.com/PaulUithol)
|
||||||
|
* Thom Knowles (https://github.com/fleat)
|
||||||
|
* Paul (https://github.com/squamous)
|
||||||
|
* Olivier Cortès (https://github.com/Karmak23)
|
||||||
|
* crazyzubr (https://github.com/crazyzubr)
|
||||||
|
* FrankSomething (https://github.com/FrankSomething)
|
||||||
|
* Alexandr Morozov (https://github.com/LK4D4)
|
||||||
|
* mishudark (https://github.com/mishudark)
|
||||||
|
* Joe Friedl (https://github.com/grampajoe)
|
||||||
|
* Daniel Ward (https://github.com/danielward)
|
||||||
|
* Aniket Deshpande (https://github.com/anicake)
|
||||||
|
* rfkrocktk (https://github.com/rfkrocktk)
|
||||||
|
* Gustavo Andrés Angulo (https://github.com/woakas)
|
||||||
|
* Dmytro Popovych (https://github.com/drudim)
|
||||||
|
* Tom (https://github.com/tomprimozic)
|
||||||
|
* j0hnsmith (https://github.com/j0hnsmith)
|
||||||
|
* Damien Churchill (https://github.com/damoxc)
|
||||||
|
* Jonathan Simon Prates (https://github.com/jonathansp)
|
||||||
|
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
|
||||||
|
* Omer Katz (https://github.com/thedrow)
|
||||||
|
* Falcon Dai (https://github.com/falcondai)
|
||||||
|
* Polyrabbit (https://github.com/polyrabbit)
|
||||||
|
* Sagiv Malihi (https://github.com/sagivmalihi)
|
||||||
|
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
|
||||||
|
* Martyn Smith (https://github.com/martynsmith)
|
||||||
|
* Andrei Zbikowski (https://github.com/b1naryth1ef)
|
||||||
|
* Ronald van Rij (https://github.com/ronaldvanrij)
|
||||||
|
* François Schmidts (https://github.com/jaesivsm)
|
||||||
|
* Eric Plumb (https://github.com/professorplumb)
|
||||||
|
* Damien Churchill (https://github.com/damoxc)
|
||||||
|
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
|
||||||
|
* Clay McClure (https://github.com/claymation)
|
||||||
|
* Bruno Rocha (https://github.com/rochacbruno)
|
||||||
|
* Norberto Leite (https://github.com/nleite)
|
||||||
|
* Bob Cribbs (https://github.com/bocribbz)
|
||||||
|
* Jay Shirley (https://github.com/jshirley)
|
||||||
|
* David Bordeynik (https://github.com/DavidBord)
|
||||||
|
* Axel Haustant (https://github.com/noirbizarre)
|
||||||
|
* David Czarnecki (https://github.com/czarneckid)
|
||||||
|
* Vyacheslav Murashkin (https://github.com/a4tunado)
|
||||||
|
* André Ericson https://github.com/aericson)
|
||||||
|
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
|
||||||
|
* Diego Berrocal (https://github.com/cestdiego)
|
||||||
|
* Matthew Ellison (https://github.com/seglberg)
|
||||||
|
* Jimmy Shen (https://github.com/jimmyshen)
|
||||||
|
* J. Fernando Sánchez (https://github.com/balkian)
|
||||||
|
* Michael Chase (https://github.com/rxsegrxup)
|
||||||
|
* Eremeev Danil (https://github.com/elephanter)
|
||||||
|
* Catstyle Lee (https://github.com/Catstyle)
|
||||||
|
* Kiryl Yermakou (https://github.com/rma4ok)
|
||||||
|
* Matthieu Rigal (https://github.com/MRigal)
|
||||||
|
* Charanpal Dhanjal (https://github.com/charanpald)
|
||||||
|
* Emmanuel Leblond (https://github.com/touilleMan)
|
||||||
|
* Breeze.Kay (https://github.com/9nix00)
|
||||||
|
* Vicki Donchenko (https://github.com/kivistein)
|
||||||
|
* Emile Caron (https://github.com/emilecaron)
|
||||||
|
* Amit Lichtenberg (https://github.com/amitlicht)
|
||||||
|
* Gang Li (https://github.com/iici-gli)
|
||||||
|
* Lars Butler (https://github.com/larsbutler)
|
||||||
|
* George Macon (https://github.com/gmacon)
|
||||||
|
* Ashley Whetter (https://github.com/AWhetter)
|
||||||
|
* Paul-Armand Verhaegen (https://github.com/paularmand)
|
||||||
|
* Steven Rossiter (https://github.com/BeardedSteve)
|
||||||
|
* Luo Peng (https://github.com/RussellLuo)
|
||||||
|
* Bryan Bennett (https://github.com/bbenne10)
|
||||||
|
* Gilb's Gilb's (https://github.com/gilbsgilbs)
|
||||||
|
* Joshua Nedrud (https://github.com/Neurostack)
|
||||||
|
* Shu Shen (https://github.com/shushen)
|
||||||
|
* xiaost7 (https://github.com/xiaost7)
|
||||||
|
* Victor Varvaryuk
|
||||||
|
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||||
|
* Dmitry Yantsen (https://github.com/mrTable)
|
||||||
|
|||||||
80
CONTRIBUTING.rst
Normal file
80
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
Contributing to MongoEngine
|
||||||
|
===========================
|
||||||
|
|
||||||
|
MongoEngine has a large `community
|
||||||
|
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
|
||||||
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to the documentation. Please read these guidelines before
|
||||||
|
sending a pull request.
|
||||||
|
|
||||||
|
Bugfixes and New Features
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Before starting to write code, look for existing `tickets
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
||||||
|
issue or feature request. That way you avoid working on something
|
||||||
|
that might not be of interest or that has already been addressed. If in doubt
|
||||||
|
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||||
|
|
||||||
|
Supported Interpreters
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
MongoEngine supports CPython 2.7 and newer. Language
|
||||||
|
features not supported by all interpreters can not be used.
|
||||||
|
Please also ensure that your code is properly converted by
|
||||||
|
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||||
|
|
||||||
|
Style Guide
|
||||||
|
-----------
|
||||||
|
|
||||||
|
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||||
|
including 4 space indents. When possible we try to stick to 79 character line
|
||||||
|
limits. However, screens got bigger and an ORM has a strong focus on
|
||||||
|
readability and if it can help, we accept 119 as maximum line length, in a
|
||||||
|
similar way as `django does
|
||||||
|
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||||
|
|
||||||
|
Testing
|
||||||
|
-------
|
||||||
|
|
||||||
|
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||||
|
and any pull requests are automatically tested. Any pull requests without
|
||||||
|
tests will take longer to be integrated and might be refused.
|
||||||
|
|
||||||
|
You may also submit a simple failing test as a pull request if you don't know
|
||||||
|
how to fix it, it will be easier for other people to work on it and it may get
|
||||||
|
fixed faster.
|
||||||
|
|
||||||
|
General Guidelines
|
||||||
|
------------------
|
||||||
|
|
||||||
|
- Avoid backward breaking changes if at all possible.
|
||||||
|
- If you *have* to introduce a breaking change, make it very clear in your
|
||||||
|
pull request's description. Also, describe how users of this package
|
||||||
|
should adapt to the breaking change in docs/upgrade.rst.
|
||||||
|
- Write inline documentation for new classes and methods.
|
||||||
|
- Write tests and make sure they pass (make sure you have a mongod
|
||||||
|
running on the default port, then execute ``python setup.py nosetests``
|
||||||
|
from the cmd line to run the test suite).
|
||||||
|
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
||||||
|
You can test various Python and PyMongo versions locally by executing
|
||||||
|
``tox``. For different MongoDB versions, you can rely on our automated
|
||||||
|
Travis tests.
|
||||||
|
- Add enhancements or problematic bug fixes to docs/changelog.rst.
|
||||||
|
- Add yourself to AUTHORS :)
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
To contribute to the `API documentation
|
||||||
|
<http://docs.mongoengine.org/en/latest/apireference.html>`_
|
||||||
|
just make your changes to the inline documentation of the appropriate
|
||||||
|
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
|
||||||
|
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
|
||||||
|
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
||||||
|
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
||||||
|
button.
|
||||||
|
|
||||||
|
If you want to test your documentation changes locally, you need to install
|
||||||
|
the ``sphinx`` package.
|
||||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009-2010 Harry Marr
|
Copyright (c) 2009 See AUTHORS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
Permission is hereby granted, free of charge, to any person
|
||||||
obtaining a copy of this software and associated documentation
|
obtaining a copy of this software and associated documentation
|
||||||
|
|||||||
106
README.rst
106
README.rst
@@ -2,37 +2,74 @@
|
|||||||
MongoEngine
|
MongoEngine
|
||||||
===========
|
===========
|
||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan)
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
|
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||||
|
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||||
|
|
||||||
|
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||||
|
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||||
|
:alt: Code Health
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
Documentation is available at https://mongoengine-odm.readthedocs.io - there
|
||||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_,
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||||
|
|
||||||
|
Supported MongoDB Versions
|
||||||
|
==========================
|
||||||
|
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
|
||||||
|
versions should be supported as well, but aren't actively tested at the moment.
|
||||||
|
Make sure to open an issue or submit a pull request if you experience any
|
||||||
|
problems with MongoDB v3.2+.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||||
|
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
|
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||||
setup.py install``.
|
setup.py install``.
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
- pymongo 1.1+
|
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||||
- sphinx (optional - for documentation generation)
|
At the very least, you'll need these two packages to use MongoEngine:
|
||||||
|
|
||||||
|
- pymongo>=2.7.1
|
||||||
|
- six>=1.10.0
|
||||||
|
|
||||||
|
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||||
|
|
||||||
|
- dateutil>=2.1.0
|
||||||
|
|
||||||
|
If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
||||||
|
|
||||||
|
- Pillow>=2.0.0
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
Some simple examples of what MongoEngine code looks like::
|
Some simple examples of what MongoEngine code looks like:
|
||||||
|
|
||||||
|
.. code :: python
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
connect('mydb')
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
title = StringField(required=True, max_length=200)
|
title = StringField(required=True, max_length=200)
|
||||||
posted = DateTimeField(default=datetime.datetime.now)
|
posted = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
tags = ListField(StringField(max_length=50))
|
tags = ListField(StringField(max_length=50))
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class TextPost(BlogPost):
|
class TextPost(BlogPost):
|
||||||
content = StringField(required=True)
|
content = StringField(required=True)
|
||||||
@@ -59,29 +96,47 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
... print 'Link:', post.url
|
... print 'Link:', post.url
|
||||||
... print
|
... print
|
||||||
...
|
...
|
||||||
=== Using MongoEngine ===
|
|
||||||
See the tutorial
|
|
||||||
|
|
||||||
=== MongoEngine Docs ===
|
# Count all blog posts and its subtypes
|
||||||
Link: hmarr.com/mongoengine
|
>>> BlogPost.objects.count()
|
||||||
|
|
||||||
>>> len(BlogPost.objects)
|
|
||||||
2
|
2
|
||||||
>>> len(HtmlPost.objects)
|
>>> TextPost.objects.count()
|
||||||
1
|
1
|
||||||
>>> len(LinkPost.objects)
|
>>> LinkPost.objects.count()
|
||||||
1
|
1
|
||||||
|
|
||||||
# Find tagged posts
|
# Count tagged posts
|
||||||
>>> len(BlogPost.objects(tags='mongoengine'))
|
>>> BlogPost.objects(tags='mongoengine').count()
|
||||||
2
|
2
|
||||||
>>> len(BlogPost.objects(tags='mongodb'))
|
>>> BlogPost.objects(tags='mongodb').count()
|
||||||
1
|
1
|
||||||
|
|
||||||
Tests
|
Tests
|
||||||
=====
|
=====
|
||||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||||
the standard port, and run ``python setup.py test``.
|
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``.
|
||||||
|
|
||||||
|
To run the test suite on every supported Python and PyMongo version, you can
|
||||||
|
use ``tox``. You'll need to make sure you have each supported Python version
|
||||||
|
installed in your environment and then:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
# Install tox
|
||||||
|
$ pip install tox
|
||||||
|
# Run the test suites
|
||||||
|
$ tox
|
||||||
|
|
||||||
|
If you wish to run a subset of tests, use the nosetests convention:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
# Run all the tests in a particular test file
|
||||||
|
$ python setup.py nosetests --tests tests/fields/fields.py
|
||||||
|
# Run only particular test class in that file
|
||||||
|
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
|
||||||
|
# Use the -s option if you want to print some debug statements or use pdb
|
||||||
|
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
|
||||||
|
|
||||||
Community
|
Community
|
||||||
=========
|
=========
|
||||||
@@ -89,10 +144,7 @@ Community
|
|||||||
<http://groups.google.com/group/mongoengine-users>`_
|
<http://groups.google.com/group/mongoengine-users>`_
|
||||||
- `MongoEngine Developers mailing list
|
- `MongoEngine Developers mailing list
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_
|
<http://groups.google.com/group/mongoengine-dev>`_
|
||||||
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
|
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
|
||||||
contributions and suggestions are welcome!
|
|
||||||
|
|||||||
227
benchmark.py
227
benchmark.py
@@ -1,120 +1,111 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Simple benchmark comparing PyMongo and MongoEngine.
|
||||||
|
|
||||||
|
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
||||||
|
|
||||||
|
Benchmarking...
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
2.58979988098
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||||
|
1.26657605171
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
8.4351580143
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||||
|
7.20191693306
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
||||||
|
6.31104588509
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||||
|
6.07083487511
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||||
|
5.97704291344
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||||
|
5.9111430645
|
||||||
|
"""
|
||||||
|
|
||||||
import timeit
|
import timeit
|
||||||
|
|
||||||
|
|
||||||
def cprofile_main():
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.disconnect()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("timeit_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
|
|
||||||
for i in xrange(1):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
print("Benchmarking...")
|
||||||
0.4 Performance Figures ...
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.1141769886
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
2.37724113464
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
1.92479610443
|
|
||||||
|
|
||||||
0.5.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.10552310944
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
16.5169169903
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
14.9446101189
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
14.912801981
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
14.9617750645
|
|
||||||
|
|
||||||
Performance
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.10072994232
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
5.27341103554
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
4.49365401268
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
4.43459296227
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
4.40114378929
|
|
||||||
"""
|
|
||||||
|
|
||||||
setup = """
|
setup = """
|
||||||
from pymongo import Connection
|
from pymongo import MongoClient
|
||||||
connection = Connection()
|
connection = MongoClient()
|
||||||
connection.drop_database('timeit_test')
|
connection.drop_database('timeit_test')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
from pymongo import Connection
|
from pymongo import MongoClient
|
||||||
connection = Connection()
|
connection = MongoClient()
|
||||||
|
|
||||||
db = connection.timeit_test
|
db = connection.timeit_test
|
||||||
noddy = db.noddy
|
noddy = db.noddy
|
||||||
|
|
||||||
for i in xrange(10000):
|
for i in range(10000):
|
||||||
example = {'fields': {}}
|
example = {'fields': {}}
|
||||||
for j in range(20):
|
for j in range(20):
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
example['fields']['key' + str(j)] = 'value ' + str(j)
|
||||||
|
|
||||||
noddy.insert(example)
|
noddy.save(example)
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
myNoddys = noddy.find()
|
||||||
[n for n in myNoddys] # iterate
|
[n for n in myNoddys] # iterate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "-" * 100
|
print("-" * 100)
|
||||||
print """Creating 10000 dictionaries - Pymongo"""
|
print("""Creating 10000 dictionaries - Pymongo""")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print t.timeit(1)
|
print(t.timeit(1))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
from pymongo.write_concern import WriteConcern
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.save(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print(t.timeit(1))
|
||||||
|
|
||||||
setup = """
|
setup = """
|
||||||
from pymongo import Connection
|
from pymongo import MongoClient
|
||||||
connection = Connection()
|
connection = MongoClient()
|
||||||
connection.drop_database('timeit_test')
|
connection.drop_database('timeit_test')
|
||||||
connection.disconnect()
|
connection.close()
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
from mongoengine import Document, DictField, connect
|
||||||
connect("timeit_test")
|
connect('timeit_test')
|
||||||
|
|
||||||
class Noddy(Document):
|
class Noddy(Document):
|
||||||
fields = DictField()
|
fields = DictField()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in xrange(10000):
|
for i in range(10000):
|
||||||
noddy = Noddy()
|
noddy = Noddy()
|
||||||
for j in range(20):
|
for j in range(20):
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
@@ -124,59 +115,93 @@ myNoddys = Noddy.objects()
|
|||||||
[n for n in myNoddys] # iterate
|
[n for n in myNoddys] # iterate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "-" * 100
|
print("-" * 100)
|
||||||
print """Creating 10000 dictionaries - MongoEngine"""
|
print("""Creating 10000 dictionaries - MongoEngine""")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print t.timeit(1)
|
print(t.timeit(1))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in xrange(10000):
|
for i in range(10000):
|
||||||
noddy = Noddy()
|
noddy = Noddy()
|
||||||
|
fields = {}
|
||||||
for j in range(20):
|
for j in range(20):
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
fields["key"+str(j)] = "value "+str(j)
|
||||||
noddy.save(safe=False, validate=False)
|
noddy.fields = fields
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
myNoddys = Noddy.objects()
|
||||||
[n for n in myNoddys] # iterate
|
[n for n in myNoddys] # iterate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "-" * 100
|
print("-" * 100)
|
||||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print t.timeit(1)
|
print(t.timeit(1))
|
||||||
|
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in xrange(10000):
|
for i in range(10000):
|
||||||
noddy = Noddy()
|
noddy = Noddy()
|
||||||
for j in range(20):
|
for j in range(20):
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
noddy.save(safe=False, validate=False, cascade=False)
|
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
myNoddys = Noddy.objects()
|
||||||
[n for n in myNoddys] # iterate
|
[n for n in myNoddys] # iterate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "-" * 100
|
print("-" * 100)
|
||||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print t.timeit(1)
|
print(t.timeit(1))
|
||||||
|
|
||||||
stmt = """
|
stmt = """
|
||||||
for i in xrange(10000):
|
for i in range(10000):
|
||||||
noddy = Noddy()
|
noddy = Noddy()
|
||||||
for j in range(20):
|
for j in range(20):
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
myNoddys = Noddy.objects()
|
||||||
[n for n in myNoddys] # iterate
|
[n for n in myNoddys] # iterate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "-" * 100
|
print("-" * 100)
|
||||||
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
print t.timeit(1)
|
print(t.timeit(1))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(validate=False, write_concern={"w": 0})
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print(t.timeit(1))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print(t.timeit(1))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
229
docs/_themes/nature/static/nature.css_t
vendored
229
docs/_themes/nature/static/nature.css_t
vendored
@@ -1,229 +0,0 @@
|
|||||||
/**
|
|
||||||
* Sphinx stylesheet -- default theme
|
|
||||||
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
*/
|
|
||||||
|
|
||||||
@import url("basic.css");
|
|
||||||
|
|
||||||
/* -- page layout ----------------------------------------------------------- */
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
font-size: 100%;
|
|
||||||
background-color: #111;
|
|
||||||
color: #555;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.documentwrapper {
|
|
||||||
float: left;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 0 0 230px;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr{
|
|
||||||
border: 1px solid #B1B4B6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.document {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
background-color: #ffffff;
|
|
||||||
color: #3E4349;
|
|
||||||
padding: 0 30px 30px 30px;
|
|
||||||
font-size: 0.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer {
|
|
||||||
color: #555;
|
|
||||||
width: 100%;
|
|
||||||
padding: 13px 0;
|
|
||||||
text-align: center;
|
|
||||||
font-size: 75%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer a {
|
|
||||||
color: #444;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related {
|
|
||||||
background-color: #6BA81E;
|
|
||||||
line-height: 32px;
|
|
||||||
color: #fff;
|
|
||||||
text-shadow: 0px 1px 0 #444;
|
|
||||||
font-size: 0.80em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related a {
|
|
||||||
color: #E2F3CC;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
font-size: 0.75em;
|
|
||||||
line-height: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebarwrapper{
|
|
||||||
padding: 20px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h3,
|
|
||||||
div.sphinxsidebar h4 {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
color: #222;
|
|
||||||
font-size: 1.2em;
|
|
||||||
font-weight: normal;
|
|
||||||
margin: 0;
|
|
||||||
padding: 5px 10px;
|
|
||||||
background-color: #ddd;
|
|
||||||
text-shadow: 1px 1px 0 white
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h4{
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h3 a {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.sphinxsidebar p {
|
|
||||||
color: #888;
|
|
||||||
padding: 5px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar p.topless {
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul {
|
|
||||||
margin: 10px 20px;
|
|
||||||
padding: 0;
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar a {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar input {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar input[type=text]{
|
|
||||||
margin-left: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* -- body styles ----------------------------------------------------------- */
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: #005B81;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
color: #E32E00;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1,
|
|
||||||
div.body h2,
|
|
||||||
div.body h3,
|
|
||||||
div.body h4,
|
|
||||||
div.body h5,
|
|
||||||
div.body h6 {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
background-color: #BED4EB;
|
|
||||||
font-weight: normal;
|
|
||||||
color: #212224;
|
|
||||||
margin: 30px 0px 10px 0px;
|
|
||||||
padding: 5px 0 5px 10px;
|
|
||||||
text-shadow: 0px 1px 0 white
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
|
|
||||||
div.body h2 { font-size: 150%; background-color: #C8D5E3; }
|
|
||||||
div.body h3 { font-size: 120%; background-color: #D8DEE3; }
|
|
||||||
div.body h4 { font-size: 110%; background-color: #D8DEE3; }
|
|
||||||
div.body h5 { font-size: 100%; background-color: #D8DEE3; }
|
|
||||||
div.body h6 { font-size: 100%; background-color: #D8DEE3; }
|
|
||||||
|
|
||||||
a.headerlink {
|
|
||||||
color: #c60f0f;
|
|
||||||
font-size: 0.8em;
|
|
||||||
padding: 0 4px 0 4px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.headerlink:hover {
|
|
||||||
background-color: #c60f0f;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p, div.body dd, div.body li {
|
|
||||||
line-height: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p.admonition-title + p {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.highlight{
|
|
||||||
background-color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.seealso {
|
|
||||||
background-color: #ffc;
|
|
||||||
border: 1px solid #ff6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.topic {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning {
|
|
||||||
background-color: #ffe4e4;
|
|
||||||
border: 1px solid #f66;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title:after {
|
|
||||||
content: ":";
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
padding: 10px;
|
|
||||||
background-color: White;
|
|
||||||
color: #222;
|
|
||||||
line-height: 1.2em;
|
|
||||||
border: 1px solid #C6C9CB;
|
|
||||||
font-size: 1.2em;
|
|
||||||
margin: 1.5em 0 1.5em 0;
|
|
||||||
-webkit-box-shadow: 1px 1px 1px #d8d8d8;
|
|
||||||
-moz-box-shadow: 1px 1px 1px #d8d8d8;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt {
|
|
||||||
background-color: #ecf0f3;
|
|
||||||
color: #222;
|
|
||||||
padding: 1px 2px;
|
|
||||||
font-size: 1.2em;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
54
docs/_themes/nature/static/pygments.css
vendored
54
docs/_themes/nature/static/pygments.css
vendored
@@ -1,54 +0,0 @@
|
|||||||
.c { color: #999988; font-style: italic } /* Comment */
|
|
||||||
.k { font-weight: bold } /* Keyword */
|
|
||||||
.o { font-weight: bold } /* Operator */
|
|
||||||
.cm { color: #999988; font-style: italic } /* Comment.Multiline */
|
|
||||||
.cp { color: #999999; font-weight: bold } /* Comment.preproc */
|
|
||||||
.c1 { color: #999988; font-style: italic } /* Comment.Single */
|
|
||||||
.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
|
|
||||||
.ge { font-style: italic } /* Generic.Emph */
|
|
||||||
.gr { color: #aa0000 } /* Generic.Error */
|
|
||||||
.gh { color: #999999 } /* Generic.Heading */
|
|
||||||
.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
|
|
||||||
.go { color: #111 } /* Generic.Output */
|
|
||||||
.gp { color: #555555 } /* Generic.Prompt */
|
|
||||||
.gs { font-weight: bold } /* Generic.Strong */
|
|
||||||
.gu { color: #aaaaaa } /* Generic.Subheading */
|
|
||||||
.gt { color: #aa0000 } /* Generic.Traceback */
|
|
||||||
.kc { font-weight: bold } /* Keyword.Constant */
|
|
||||||
.kd { font-weight: bold } /* Keyword.Declaration */
|
|
||||||
.kp { font-weight: bold } /* Keyword.Pseudo */
|
|
||||||
.kr { font-weight: bold } /* Keyword.Reserved */
|
|
||||||
.kt { color: #445588; font-weight: bold } /* Keyword.Type */
|
|
||||||
.m { color: #009999 } /* Literal.Number */
|
|
||||||
.s { color: #bb8844 } /* Literal.String */
|
|
||||||
.na { color: #008080 } /* Name.Attribute */
|
|
||||||
.nb { color: #999999 } /* Name.Builtin */
|
|
||||||
.nc { color: #445588; font-weight: bold } /* Name.Class */
|
|
||||||
.no { color: #ff99ff } /* Name.Constant */
|
|
||||||
.ni { color: #800080 } /* Name.Entity */
|
|
||||||
.ne { color: #990000; font-weight: bold } /* Name.Exception */
|
|
||||||
.nf { color: #990000; font-weight: bold } /* Name.Function */
|
|
||||||
.nn { color: #555555 } /* Name.Namespace */
|
|
||||||
.nt { color: #000080 } /* Name.Tag */
|
|
||||||
.nv { color: purple } /* Name.Variable */
|
|
||||||
.ow { font-weight: bold } /* Operator.Word */
|
|
||||||
.mf { color: #009999 } /* Literal.Number.Float */
|
|
||||||
.mh { color: #009999 } /* Literal.Number.Hex */
|
|
||||||
.mi { color: #009999 } /* Literal.Number.Integer */
|
|
||||||
.mo { color: #009999 } /* Literal.Number.Oct */
|
|
||||||
.sb { color: #bb8844 } /* Literal.String.Backtick */
|
|
||||||
.sc { color: #bb8844 } /* Literal.String.Char */
|
|
||||||
.sd { color: #bb8844 } /* Literal.String.Doc */
|
|
||||||
.s2 { color: #bb8844 } /* Literal.String.Double */
|
|
||||||
.se { color: #bb8844 } /* Literal.String.Escape */
|
|
||||||
.sh { color: #bb8844 } /* Literal.String.Heredoc */
|
|
||||||
.si { color: #bb8844 } /* Literal.String.Interpol */
|
|
||||||
.sx { color: #bb8844 } /* Literal.String.Other */
|
|
||||||
.sr { color: #808000 } /* Literal.String.Regex */
|
|
||||||
.s1 { color: #bb8844 } /* Literal.String.Single */
|
|
||||||
.ss { color: #bb8844 } /* Literal.String.Symbol */
|
|
||||||
.bp { color: #999999 } /* Name.Builtin.Pseudo */
|
|
||||||
.vc { color: #ff99ff } /* Name.Variable.Class */
|
|
||||||
.vg { color: #ff99ff } /* Name.Variable.Global */
|
|
||||||
.vi { color: #ff99ff } /* Name.Variable.Instance */
|
|
||||||
.il { color: #009999 } /* Literal.Number.Integer.Long */
|
|
||||||
4
docs/_themes/nature/theme.conf
vendored
4
docs/_themes/nature/theme.conf
vendored
@@ -1,4 +0,0 @@
|
|||||||
[theme]
|
|
||||||
inherit = basic
|
|
||||||
stylesheet = nature.css
|
|
||||||
pygments_style = tango
|
|
||||||
17
docs/_themes/sphinx_rtd_theme/__init__.py
vendored
Executable file
17
docs/_themes/sphinx_rtd_theme/__init__.py
vendored
Executable file
@@ -0,0 +1,17 @@
|
|||||||
|
"""Sphinx ReadTheDocs theme.
|
||||||
|
|
||||||
|
From https://github.com/ryan-roemer/sphinx-bootstrap-theme.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
VERSION = (0, 1, 5)
|
||||||
|
|
||||||
|
__version__ = ".".join(str(v) for v in VERSION)
|
||||||
|
__version_full__ = __version__
|
||||||
|
|
||||||
|
|
||||||
|
def get_html_theme_path():
|
||||||
|
"""Return list of HTML theme paths."""
|
||||||
|
cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
return cur_dir
|
||||||
15
docs/_themes/sphinx_rtd_theme/breadcrumbs.html
vendored
Executable file
15
docs/_themes/sphinx_rtd_theme/breadcrumbs.html
vendored
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
<ul class="wy-breadcrumbs">
|
||||||
|
<li><a href="{{ pathto(master_doc) }}">Docs</a> »</li>
|
||||||
|
<li><a href="">{{ title }}</a></li>
|
||||||
|
<li class="wy-breadcrumbs-aside">
|
||||||
|
{% if display_github %}
|
||||||
|
<a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a>
|
||||||
|
{% elif display_bitbucket %}
|
||||||
|
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a>
|
||||||
|
{% elif show_source and has_source and sourcename %}
|
||||||
|
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
<hr/>
|
||||||
|
|
||||||
30
docs/_themes/sphinx_rtd_theme/footer.html
vendored
Executable file
30
docs/_themes/sphinx_rtd_theme/footer.html
vendored
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
<footer>
|
||||||
|
{% if next or prev %}
|
||||||
|
<div class="rst-footer-buttons">
|
||||||
|
{% if next %}
|
||||||
|
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||||
|
{% endif %}
|
||||||
|
{% if prev %}
|
||||||
|
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<hr/>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
{%- if show_copyright %}
|
||||||
|
{%- if hasdoc('copyright') %}
|
||||||
|
{% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
|
||||||
|
{%- else %}
|
||||||
|
{% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if last_updated %}
|
||||||
|
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
|
||||||
|
{%- endif %}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{% trans %}<a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %}
|
||||||
|
</footer>
|
||||||
142
docs/_themes/sphinx_rtd_theme/layout.html
vendored
Executable file
142
docs/_themes/sphinx_rtd_theme/layout.html
vendored
Executable file
@@ -0,0 +1,142 @@
|
|||||||
|
{# TEMPLATE VAR SETTINGS #}
|
||||||
|
{%- set url_root = pathto('', 1) %}
|
||||||
|
{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
|
||||||
|
{%- if not embedded and docstitle %}
|
||||||
|
{%- set titlesuffix = " — "|safe + docstitle|e %}
|
||||||
|
{%- else %}
|
||||||
|
{%- set titlesuffix = "" %}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
|
||||||
|
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
{% block htmltitle %}
|
||||||
|
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{# FAVICON #}
|
||||||
|
{% if favicon %}
|
||||||
|
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
|
||||||
|
{% endif %}
|
||||||
|
{# CANONICAL #}
|
||||||
|
{%- if theme_canonical_url %}
|
||||||
|
<link rel="canonical" href="{{ theme_canonical_url }}{{ pagename }}.html"/>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{# CSS #}
|
||||||
|
<link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
|
||||||
|
|
||||||
|
{# JS #}
|
||||||
|
{% if not embedded %}
|
||||||
|
|
||||||
|
<script type="text/javascript">
|
||||||
|
var DOCUMENTATION_OPTIONS = {
|
||||||
|
URL_ROOT:'{{ url_root }}',
|
||||||
|
VERSION:'{{ release|e }}',
|
||||||
|
COLLAPSE_INDEX:false,
|
||||||
|
FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}',
|
||||||
|
HAS_SOURCE: {{ has_source|lower }}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
{%- for scriptfile in script_files %}
|
||||||
|
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
|
||||||
|
{%- endfor %}
|
||||||
|
|
||||||
|
{% if use_opensearch %}
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# RTD hosts these file themselves, so just load on non RTD builds #}
|
||||||
|
{% if not READTHEDOCS %}
|
||||||
|
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
|
||||||
|
<script type="text/javascript" src="_static/js/theme.js"></script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for cssfile in css_files %}
|
||||||
|
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{%- block linktags %}
|
||||||
|
{%- if hasdoc('about') %}
|
||||||
|
<link rel="author" title="{{ _('About these documents') }}"
|
||||||
|
href="{{ pathto('about') }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('genindex') %}
|
||||||
|
<link rel="index" title="{{ _('Index') }}"
|
||||||
|
href="{{ pathto('genindex') }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('search') %}
|
||||||
|
<link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('copyright') %}
|
||||||
|
<link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
<link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/>
|
||||||
|
{%- if parents %}
|
||||||
|
<link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if next %}
|
||||||
|
<link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if prev %}
|
||||||
|
<link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block extrahead %} {% endblock %}
|
||||||
|
|
||||||
|
<script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body class="wy-body-for-nav">
|
||||||
|
|
||||||
|
<div class="wy-grid-for-nav">
|
||||||
|
|
||||||
|
{# SIDE NAV, TOGGLES ON MOBILE #}
|
||||||
|
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
|
||||||
|
<div class="wy-side-nav-search">
|
||||||
|
<a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a>
|
||||||
|
{% include "searchbox.html" %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="wy-menu wy-menu-vertical" data-spy="affix">
|
||||||
|
{% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %}
|
||||||
|
{% if toctree %}
|
||||||
|
{{ toctree }}
|
||||||
|
{% else %}
|
||||||
|
<!-- Local TOC -->
|
||||||
|
<div class="local-toc">{{ toc }}</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
|
||||||
|
|
||||||
|
{# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #}
|
||||||
|
<nav class="wy-nav-top">
|
||||||
|
<i data-toggle="wy-nav-top" class="icon icon-reorder"></i>
|
||||||
|
<a href="{{ pathto(master_doc) }}">{{ project }}</a>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
|
||||||
|
{# PAGE CONTENT #}
|
||||||
|
<div class="wy-nav-content">
|
||||||
|
<div class="rst-content">
|
||||||
|
{% include "breadcrumbs.html" %}
|
||||||
|
{% block body %}{% endblock %}
|
||||||
|
{% include "footer.html" %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% include "versions.html" %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
205
docs/_themes/sphinx_rtd_theme/layout_old.html
vendored
Executable file
205
docs/_themes/sphinx_rtd_theme/layout_old.html
vendored
Executable file
@@ -0,0 +1,205 @@
|
|||||||
|
{#
|
||||||
|
basic/layout.html
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Master layout template for Sphinx themes.
|
||||||
|
|
||||||
|
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
#}
|
||||||
|
{%- block doctype -%}
|
||||||
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||||
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
|
{%- endblock %}
|
||||||
|
{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %}
|
||||||
|
{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
|
||||||
|
{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and
|
||||||
|
(sidebars != []) %}
|
||||||
|
{%- set url_root = pathto('', 1) %}
|
||||||
|
{# XXX necessary? #}
|
||||||
|
{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
|
||||||
|
{%- if not embedded and docstitle %}
|
||||||
|
{%- set titlesuffix = " — "|safe + docstitle|e %}
|
||||||
|
{%- else %}
|
||||||
|
{%- set titlesuffix = "" %}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- macro relbar() %}
|
||||||
|
<div class="related">
|
||||||
|
<h3>{{ _('Navigation') }}</h3>
|
||||||
|
<ul>
|
||||||
|
{%- for rellink in rellinks %}
|
||||||
|
<li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
|
||||||
|
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
|
||||||
|
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
|
||||||
|
{%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- block rootrellink %}
|
||||||
|
<li><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
|
||||||
|
{%- endblock %}
|
||||||
|
{%- for parent in parents %}
|
||||||
|
<li><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- block relbaritems %} {% endblock %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{%- macro sidebar() %}
|
||||||
|
{%- if render_sidebar %}
|
||||||
|
<div class="sphinxsidebar">
|
||||||
|
<div class="sphinxsidebarwrapper">
|
||||||
|
{%- block sidebarlogo %}
|
||||||
|
{%- if logo %}
|
||||||
|
<p class="logo"><a href="{{ pathto(master_doc) }}">
|
||||||
|
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
|
||||||
|
</a></p>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- if sidebars != None %}
|
||||||
|
{#- new style sidebar: explicitly include/exclude templates #}
|
||||||
|
{%- for sidebartemplate in sidebars %}
|
||||||
|
{%- include sidebartemplate %}
|
||||||
|
{%- endfor %}
|
||||||
|
{%- else %}
|
||||||
|
{#- old style sidebars: using blocks -- should be deprecated #}
|
||||||
|
{%- block sidebartoc %}
|
||||||
|
{%- include "localtoc.html" %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebarrel %}
|
||||||
|
{%- include "relations.html" %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebarsourcelink %}
|
||||||
|
{%- include "sourcelink.html" %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- if customsidebar %}
|
||||||
|
{%- include customsidebar %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- block sidebarsearch %}
|
||||||
|
{%- include "searchbox.html" %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{%- macro script() %}
|
||||||
|
<script type="text/javascript">
|
||||||
|
var DOCUMENTATION_OPTIONS = {
|
||||||
|
URL_ROOT: '{{ url_root }}',
|
||||||
|
VERSION: '{{ release|e }}',
|
||||||
|
COLLAPSE_INDEX: false,
|
||||||
|
FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
|
||||||
|
HAS_SOURCE: {{ has_source|lower }}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
{%- for scriptfile in script_files %}
|
||||||
|
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{%- macro css() %}
|
||||||
|
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
|
||||||
|
<link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
|
||||||
|
{%- for cssfile in css_files %}
|
||||||
|
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
|
<head>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" />
|
||||||
|
{{ metatags }}
|
||||||
|
{%- block htmltitle %}
|
||||||
|
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
|
||||||
|
{%- endblock %}
|
||||||
|
{{ css() }}
|
||||||
|
{%- if not embedded %}
|
||||||
|
{{ script() }}
|
||||||
|
{%- if use_opensearch %}
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml"
|
||||||
|
title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}"
|
||||||
|
href="{{ pathto('_static/opensearch.xml', 1) }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if favicon %}
|
||||||
|
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- block linktags %}
|
||||||
|
{%- if hasdoc('about') %}
|
||||||
|
<link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('genindex') %}
|
||||||
|
<link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('search') %}
|
||||||
|
<link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- if hasdoc('copyright') %}
|
||||||
|
<link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" />
|
||||||
|
{%- endif %}
|
||||||
|
<link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}" />
|
||||||
|
{%- if parents %}
|
||||||
|
<link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- if next %}
|
||||||
|
<link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- if prev %}
|
||||||
|
<link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" />
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block extrahead %} {% endblock %}
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
{%- block header %}{% endblock %}
|
||||||
|
|
||||||
|
{%- block relbar1 %}{{ relbar() }}{% endblock %}
|
||||||
|
|
||||||
|
{%- block content %}
|
||||||
|
{%- block sidebar1 %} {# possible location for sidebar #} {% endblock %}
|
||||||
|
|
||||||
|
<div class="document">
|
||||||
|
{%- block document %}
|
||||||
|
<div class="documentwrapper">
|
||||||
|
{%- if render_sidebar %}
|
||||||
|
<div class="bodywrapper">
|
||||||
|
{%- endif %}
|
||||||
|
<div class="body">
|
||||||
|
{% block body %} {% endblock %}
|
||||||
|
</div>
|
||||||
|
{%- if render_sidebar %}
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
</div>
|
||||||
|
{%- endblock %}
|
||||||
|
|
||||||
|
{%- block sidebar2 %}{{ sidebar() }}{% endblock %}
|
||||||
|
<div class="clearer"></div>
|
||||||
|
</div>
|
||||||
|
{%- endblock %}
|
||||||
|
|
||||||
|
{%- block relbar2 %}{{ relbar() }}{% endblock %}
|
||||||
|
|
||||||
|
{%- block footer %}
|
||||||
|
<div class="footer">
|
||||||
|
{%- if show_copyright %}
|
||||||
|
{%- if hasdoc('copyright') %}
|
||||||
|
{% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
|
||||||
|
{%- else %}
|
||||||
|
{% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- if last_updated %}
|
||||||
|
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- if show_sphinx %}
|
||||||
|
{% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %}
|
||||||
|
{%- endif %}
|
||||||
|
</div>
|
||||||
|
<p>asdf asdf asdf asdf 22</p>
|
||||||
|
{%- endblock %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
50
docs/_themes/sphinx_rtd_theme/search.html
vendored
Executable file
50
docs/_themes/sphinx_rtd_theme/search.html
vendored
Executable file
@@ -0,0 +1,50 @@
|
|||||||
|
{#
|
||||||
|
basic/search.html
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Template for the search page.
|
||||||
|
|
||||||
|
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||||
|
:license: BSD, see LICENSE for details.
|
||||||
|
#}
|
||||||
|
{%- extends "layout.html" %}
|
||||||
|
{% set title = _('Search') %}
|
||||||
|
{% set script_files = script_files + ['_static/searchtools.js'] %}
|
||||||
|
{% block extrahead %}
|
||||||
|
<script type="text/javascript">
|
||||||
|
jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); });
|
||||||
|
</script>
|
||||||
|
{# this is used when loading the search index using $.ajax fails,
|
||||||
|
such as on Chrome for documents on localhost #}
|
||||||
|
<script type="text/javascript" id="searchindexloader"></script>
|
||||||
|
{{ super() }}
|
||||||
|
{% endblock %}
|
||||||
|
{% block body %}
|
||||||
|
<noscript>
|
||||||
|
<div id="fallback" class="admonition warning">
|
||||||
|
<p class="last">
|
||||||
|
{% trans %}Please activate JavaScript to enable the search
|
||||||
|
functionality.{% endtrans %}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</noscript>
|
||||||
|
|
||||||
|
{% if search_performed %}
|
||||||
|
<h2>{{ _('Search Results') }}</h2>
|
||||||
|
{% if not search_results %}
|
||||||
|
<p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
<div id="search-results">
|
||||||
|
{% if search_results %}
|
||||||
|
<ul>
|
||||||
|
{% for href, caption, context in search_results %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ pathto(item.href) }}">{{ caption }}</a>
|
||||||
|
<p class="context">{{ context|e }}</p>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
5
docs/_themes/sphinx_rtd_theme/searchbox.html
vendored
Executable file
5
docs/_themes/sphinx_rtd_theme/searchbox.html
vendored
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
<form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get">
|
||||||
|
<input type="text" name="q" placeholder="Search docs" />
|
||||||
|
<input type="hidden" name="check_keywords" value="yes" />
|
||||||
|
<input type="hidden" name="area" value="default" />
|
||||||
|
</form>
|
||||||
1
docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
vendored
Executable file
1
docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
vendored
Executable file
@@ -0,0 +1 @@
|
|||||||
|
.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}
|
||||||
1
docs/_themes/sphinx_rtd_theme/static/css/theme.css
vendored
Executable file
1
docs/_themes/sphinx_rtd_theme/static/css/theme.css
vendored
Executable file
File diff suppressed because one or more lines are too long
BIN
docs/_themes/sphinx_rtd_theme/static/favicon.ico
vendored
Normal file
BIN
docs/_themes/sphinx_rtd_theme/static/favicon.ico
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.1 KiB |
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
vendored
Executable file
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
vendored
Executable file
Binary file not shown.
399
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
vendored
Executable file
399
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
vendored
Executable file
@@ -0,0 +1,399 @@
|
|||||||
|
<?xml version="1.0" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<metadata></metadata>
|
||||||
|
<defs>
|
||||||
|
<font id="fontawesomeregular" horiz-adv-x="1536" >
|
||||||
|
<font-face units-per-em="1792" ascent="1536" descent="-256" />
|
||||||
|
<missing-glyph horiz-adv-x="448" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="448" />
|
||||||
|
<glyph unicode="	" horiz-adv-x="448" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="448" />
|
||||||
|
<glyph unicode="¨" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="©" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="®" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="´" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="Æ" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="768" />
|
||||||
|
<glyph unicode=" " />
|
||||||
|
<glyph unicode=" " horiz-adv-x="768" />
|
||||||
|
<glyph unicode=" " />
|
||||||
|
<glyph unicode=" " horiz-adv-x="512" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="384" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="256" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="256" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="192" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="307" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="85" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="307" />
|
||||||
|
<glyph unicode=" " horiz-adv-x="384" />
|
||||||
|
<glyph unicode="™" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="∞" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="≠" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="" horiz-adv-x="500" d="M0 0z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" />
|
||||||
|
<glyph unicode="" d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317q54 43 100.5 115.5t46.5 131.5z M1792 1120v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 q-18 -18 -44 -18z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 l502 -73q56 -9 56 -46z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1408 131q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81 t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45 t45 -19h128q26 0 45 19t19 45zM1792 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 704v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1792 320v128 q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 704v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19 t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1920 1248v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h960q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 t-28 -68l-294 -294l294 -294q28 -28 28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5 t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z " />
|
||||||
|
<glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0 52 38 90t90 38t90 -38t38 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5v222q0 12 8 23t19 13 l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10q129 -119 165 -170q7 -8 7 -22 q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832 q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280zM768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z " />
|
||||||
|
<glyph unicode="" d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39z" />
|
||||||
|
<glyph unicode="" d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1023 576h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8t-2.5 -8h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 q25 -61 25 -123z" />
|
||||||
|
<glyph unicode="" d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9 l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" />
|
||||||
|
<glyph unicode="" d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5v7q65 268 270 434.5t480 166.5 q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5z M1536 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5zM1536 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5 t9.5 -22.5zM1664 160v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 1248v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47 t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314q0 151 67 291t179 242.5 t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5 t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5q140 -59 225 -188.5t85 -282.5zM1664 640q0 -230 -127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289 t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19q13 0 26 -5q211 -91 338 -283.5t127 -422.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" />
|
||||||
|
<glyph unicode="" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906 q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57q38 -15 59 -43zM575 1056q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5 t-16.5 -22.5zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M725 977l-170 -450q73 -1 153.5 -2t119 -1.5t52.5 -0.5l29 2q-32 95 -92 241q-53 132 -92 211zM21 -128h-21l2 79q22 7 80 18q89 16 110 31q20 16 48 68l237 616l280 724h75h53l11 -21l205 -480q103 -242 124 -297q39 -102 96 -235q26 -58 65 -164q24 -67 65 -149 q22 -49 35 -57q22 -19 69 -23q47 -6 103 -27q6 -39 6 -57q0 -14 -1 -26q-80 0 -192 8q-93 8 -189 8q-79 0 -135 -2l-200 -11l-58 -2q0 45 4 78l131 28q56 13 68 23q12 12 12 27t-6 32l-47 114l-92 228l-450 2q-29 -65 -104 -274q-23 -64 -23 -84q0 -31 17 -43 q26 -21 103 -32q3 0 13.5 -2t30 -5t40.5 -6q1 -28 1 -58q0 -17 -2 -27q-66 0 -349 20l-48 -8q-81 -14 -167 -14z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M555 15q76 -32 140 -32q131 0 216 41t122 113q38 70 38 181q0 114 -41 180q-58 94 -141 126q-80 32 -247 32q-74 0 -101 -10v-144l-1 -173l3 -270q0 -15 12 -44zM541 761q43 -7 109 -7q175 0 264 65t89 224q0 112 -85 187q-84 75 -255 75q-52 0 -130 -13q0 -44 2 -77 q7 -122 6 -279l-1 -98q0 -43 1 -77zM0 -128l2 94q45 9 68 12q77 12 123 31q17 27 21 51q9 66 9 194l-2 497q-5 256 -9 404q-1 87 -11 109q-1 4 -12 12q-18 12 -69 15q-30 2 -114 13l-4 83l260 6l380 13l45 1q5 0 14 0.5t14 0.5q1 0 21.5 -0.5t40.5 -0.5h74q88 0 191 -27 q43 -13 96 -39q57 -29 102 -76q44 -47 65 -104t21 -122q0 -70 -32 -128t-95 -105q-26 -20 -150 -77q177 -41 267 -146q92 -106 92 -236q0 -76 -29 -161q-21 -62 -71 -117q-66 -72 -140 -108q-73 -36 -203 -60q-82 -15 -198 -11l-197 4q-84 2 -298 -11q-33 -3 -272 -11z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M0 -126l17 85q4 1 77 20q76 19 116 39q29 37 41 101l27 139l56 268l12 64q8 44 17 84.5t16 67t12.5 46.5t9 30.5t3.5 11.5l29 157l16 63l22 135l8 50v38q-41 22 -144 28q-28 2 -38 4l19 103l317 -14q39 -2 73 -2q66 0 214 9q33 2 68 4.5t36 2.5q-2 -19 -6 -38 q-7 -29 -13 -51q-55 -19 -109 -31q-64 -16 -101 -31q-12 -31 -24 -88q-9 -44 -13 -82q-44 -199 -66 -306l-61 -311l-38 -158l-43 -235l-12 -45q-2 -7 1 -27q64 -15 119 -21q36 -5 66 -10q-1 -29 -7 -58q-7 -31 -9 -41q-18 0 -23 -1q-24 -2 -42 -2q-9 0 -28 3q-19 4 -145 17 l-198 2q-41 1 -174 -11q-74 -7 -98 -9z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l215 -1h293l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -42.5 2t-103.5 -1t-111 -1 q-34 0 -67 -5q-10 -97 -8 -136l1 -152v-332l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-88 0 -233 -14q-48 -4 -70 -4q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q8 192 6 433l-5 428q-1 62 -0.5 118.5t0.5 102.5t-2 57t-6 15q-6 5 -14 6q-38 6 -148 6q-43 0 -100 -13.5t-73 -24.5q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1744 128q33 0 42 -18.5t-11 -44.5 l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5t42 18.5h80v1024h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80z" />
|
||||||
|
<glyph unicode="" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l446 -1h318l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -58.5 2t-138.5 -1t-128 -1 q-94 0 -127 -5q-10 -97 -8 -136l1 -152v52l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-82 0 -233 -13q-45 -5 -70 -5q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q6 137 6 433l-5 44q0 265 -2 278q-2 11 -6 15q-6 5 -14 6q-38 6 -148 6q-50 0 -168.5 -14t-132.5 -24q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1505 113q26 -20 26 -49t-26 -49l-162 -126 q-26 -20 -44.5 -11t-18.5 42v80h-1024v-80q0 -33 -18.5 -42t-44.5 11l-162 126q-26 20 -26 49t26 49l162 126q26 20 44.5 11t18.5 -42v-80h1024v80q0 33 18.5 42t44.5 -11z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344 q13 0 22.5 -9.5t9.5 -22.5zM256 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192 q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 q39 -17 39 -59z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 q53 0 91 -38l235 -234q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" />
|
||||||
|
<glyph unicode="" d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 1216l288 -288l-672 -672h-288v288zM1756 1084l-92 -92 l-288 288l92 92q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5 t-38.5 114t-17.5 122q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110 q24 -24 24 -57t-24 -57z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710 q19 19 32 13t13 -32v-710q4 11 13 19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" />
|
||||||
|
<glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710 q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1538" d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M742 -37l-652 651q-37 37 -37 90.5t37 90.5l652 651q37 37 90.5 37t90.5 -37l75 -75q37 -37 37 -90.5t-37 -90.5l-486 -486l486 -485q37 -38 37 -91t-37 -90l-75 -75q-37 -37 -90.5 -37t-90.5 37z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1099 704q0 -52 -37 -91l-652 -651q-37 -37 -90 -37t-90 37l-76 75q-37 39 -37 91q0 53 37 90l486 486l-486 485q-37 39 -37 91q0 53 37 90l76 75q36 38 90 38t90 -38l652 -651q37 -37 37 -90z" />
|
||||||
|
<glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143 q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5 t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" />
|
||||||
|
<glyph unicode="" d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 t32.5 -90.5z" />
|
||||||
|
<glyph unicode="" d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" />
|
||||||
|
<glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" />
|
||||||
|
<glyph unicode="" d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5 t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5 t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150.5t27.5 -184z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5 q16 -10 16 -27zM1344 704q0 -139 -79 -253.5t-209 -164.5l280 502q8 -45 8 -84zM1792 576q0 -35 -20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69z " />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h288v288h-288v-288zM1280 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64 q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47 h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5 v192h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111 t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" />
|
||||||
|
<glyph unicode="" d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1611 320q0 -53 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-486 485l-486 -485q-36 -38 -90 -38t-90 38l-75 75q-38 36 -38 90q0 53 38 91l651 651q37 37 90 37q52 0 91 -37l650 -651q38 -38 38 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1611 832q0 -53 -37 -90l-651 -651q-38 -38 -91 -38q-54 0 -90 38l-651 651q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l486 -486l486 486q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21 zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45z " />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5 l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5 t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M512 512v-384h-256v384h256zM896 1024v-896h-256v896h256zM1280 768v-640h-256v640h256zM1664 1152v-1024h-256v1024h256zM1792 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5z M1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5 t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1307 618l23 219h-198v109q0 49 15.5 68.5t71.5 19.5h110v219h-175q-152 0 -218 -72t-66 -213v-131h-131v-219h131v-635h262v635h175zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5v1280 q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5 t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5 l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7 l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8q144 -133 144 -160q0 -9 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5zM1920 198v-140q0 -16 -149 -31 q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20 t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31zM1920 1222v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68 q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70 q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7 q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230z" />
|
||||||
|
<glyph unicode="" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81zM1536 769 q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128 q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179z" />
|
||||||
|
<glyph unicode="" d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5z M1536 511q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5 h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -74 49 -163z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="896" d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5 q224 0 351 -124t127 -344z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143v128q0 40 28 68t68 28h288v96 q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" d="M394 184q-8 -9 -20 3q-13 11 -4 19q8 9 20 -3q12 -11 4 -19zM352 245q9 -12 0 -19q-8 -6 -17 7t0 18q9 7 17 -6zM291 305q-5 -7 -13 -2q-10 5 -7 12q3 5 13 2q10 -5 7 -12zM322 271q-6 -7 -16 3q-9 11 -2 16q6 6 16 -3q9 -11 2 -16zM451 159q-4 -12 -19 -6q-17 4 -13 15 t19 7q16 -5 13 -16zM514 154q0 -11 -16 -11q-17 -2 -17 11q0 11 16 11q17 2 17 -11zM572 164q2 -10 -14 -14t-18 8t14 15q16 2 18 -9zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 27.5v239q0 97 -52 142q57 6 102.5 18t94 39 t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103 q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -103t0.5 -68q0 -22 -11 -33.5t-22 -13t-33 -1.5 h-224q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69z" />
|
||||||
|
<glyph unicode="" d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t-281 -180.5q-124 -44 -326 -44 q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5 q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -10 1 -18.5t3 -17t4 -13.5t6.5 -16t6.5 -17q16 -40 25 -118.5t9 -136.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5 q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235t235 -174 q2 -1 19 -11.5t24 -14t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 q0 34 19.5 62t52.5 41q21 9 44 9h1048z" />
|
||||||
|
<glyph unicode="" d="M1280 343q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5 t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5t320.5 -216.5q6 -2 30 -11t33 -12.5 t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M511 980h257l-30 -284h-227v-824h-341v824h-170v284h170v171q0 182 86 275.5t283 93.5h227v-284h-142q-39 0 -62.5 -6.5t-34 -23.5t-13.5 -34.5t-3 -49.5v-142z" />
|
||||||
|
<glyph unicode="" d="M1536 640q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5 q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23 q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -89t0.5 -54q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 t316.5 -131.5t131.5 -316.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294 q187 -186 294 -425.5t120 -501.5z" />
|
||||||
|
<glyph unicode="" d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v320q0 25 16 75 l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM1664 128q0 -52 -38 -90t-90 -38 h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" />
|
||||||
|
<glyph unicode="" d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70 l-53 -186l188 -48q40 -10 52 -51q10 -42 -20 -70z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43 q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5 t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576 q-50 0 -89 -38.5t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32zM1664 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45 t45 -19t45 19t19 45zM1792 768v-640q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181q0 103 76 179t180 76h374q-22 60 -22 128 q0 122 81.5 189t206.5 67q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5z" />
|
||||||
|
<glyph unicode="" d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5 t-90.5 -37.5h-640q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76 q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227z" />
|
||||||
|
<glyph unicode="" d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 580 q0 -142 -77.5 -230t-217.5 -87l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100 q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317z" />
|
||||||
|
<glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5 q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5 q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5 t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-5 7 -8 9q-12 4 -27 -5q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3 q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25 q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5q-16 0 -22 -1q-146 -80 -235 -222q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5 t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5 t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10t17 -20q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21 q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5 q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3 q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5 t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q7 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5 q15 10 -7 16q-17 5 -43 -12zM879 10q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7t-10 1.5t-11.5 -7 q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5q0 -6 2 -16z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19z " />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75 t75 -181zM1344 896q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5zM1920 671q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128q81 117 81 256q0 29 -5 66q66 -23 133 -23q59 0 119 21.5t97.5 42.5 t43.5 21q124 0 124 -353zM1792 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15 t21.5 -21.5t18.5 -19q33 31 33 73zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204t85 203l147 146q83 83 203 83q121 0 204 -85l206 -207 q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z " />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148 q4 -48 -10 -97q4 -1 12 -5l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56zM579 836q46 42 21 108t-106 117q-92 59 -192 59q-74 0 -113 -36q-46 -42 -21 -108t106 -117q92 -59 192 -59q74 0 113 36zM494 91q81 51 106 117t-21 108 q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117t21 -108q39 -36 113 -36q100 0 192 59zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 480l96 -32l736 576l-128 64l-768 -431v-113l-160 -96l9 -8q2 -2 7 -6 q4 -4 11 -12t11 -12l26 -26zM1600 64l128 64l-520 408l-177 -138q-2 -3 -13 -7z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235 z" />
|
||||||
|
<glyph unicode="" d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88z" />
|
||||||
|
<glyph unicode="" d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z M1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105zM1792 224v-192q0 -13 -9.5 -22.5 t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1123v-99h-335v99h107q0 41 0.5 122t0.5 121v12h-2q-8 -17 -50 -54l-71 76l136 127h106v-404h108zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5 t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 97 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23 l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71z" />
|
||||||
|
<glyph unicode="" d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41 q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39zM1536 -96v64q0 14 -9 23t-23 9h-1472 q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 160v192 q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192 q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1664 1248v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113 z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5q0 26 19 45t45 19v320q0 8 -0.5 35t0 38 t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33 q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5 t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960z" />
|
||||||
|
<glyph unicode="" d="M678 -57q0 -38 -10 -71h-380q-95 0 -171.5 56.5t-103.5 147.5q24 45 69 77.5t100 49.5t107 24t107 7q32 0 49 -2q6 -4 30.5 -21t33 -23t31 -23t32 -25.5t27.5 -25.5t26.5 -29.5t21 -30.5t17.5 -34.5t9.5 -36t4.5 -40.5zM385 294q-234 -7 -385 -85v433q103 -118 273 -118 q32 0 70 5q-21 -61 -21 -86q0 -67 63 -149zM558 805q0 -100 -43.5 -160.5t-140.5 -60.5q-51 0 -97 26t-78 67.5t-56 93.5t-35.5 104t-11.5 99q0 96 51.5 165t144.5 69q66 0 119 -41t84 -104t47 -130t16 -128zM1536 896v-736q0 -119 -84.5 -203.5t-203.5 -84.5h-468 q39 73 39 157q0 66 -22 122.5t-55.5 93t-72 71t-72 59.5t-55.5 54.5t-22 59.5q0 36 23 68t56 61.5t65.5 64.5t55.5 93t23 131t-26.5 145.5t-75.5 118.5q-6 6 -14 11t-12.5 7.5t-10 9.5t-10.5 17h135l135 64h-437q-138 0 -244.5 -38.5t-182.5 -133.5q0 126 81 213t207 87h960 q119 0 203.5 -84.5t84.5 -203.5v-96h-256v256h-128v-256h-256v-128h256v-256h128v256h256z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M876 71q0 21 -4.5 40.5t-9.5 36t-17.5 34.5t-21 30.5t-26.5 29.5t-27.5 25.5t-32 25.5t-31 23t-33 23t-30.5 21q-17 2 -50 2q-54 0 -106 -7t-108 -25t-98 -46t-69 -75t-27 -107q0 -68 35.5 -121.5t93 -84t120.5 -45.5t127 -15q59 0 112.5 12.5t100.5 39t74.5 73.5 t27.5 110zM756 933q0 60 -16.5 127.5t-47 130.5t-84 104t-119.5 41q-93 0 -144 -69t-51 -165q0 -47 11.5 -99t35.5 -104t56 -93.5t78 -67.5t97 -26q97 0 140.5 60.5t43.5 160.5zM625 1408h437l-135 -79h-135q71 -45 110 -126t39 -169q0 -74 -23 -131.5t-56 -92.5t-66 -64.5 t-56 -61t-23 -67.5q0 -26 16.5 -51t43 -48t58.5 -48t64 -55.5t58.5 -66t43 -85t16.5 -106.5q0 -160 -140 -282q-152 -131 -420 -131q-59 0 -119.5 10t-122 33.5t-108.5 58t-77 89t-30 121.5q0 61 37 135q32 64 96 110.5t145 71t155 36t150 13.5q-64 83 -64 149q0 12 2 23.5 t5 19.5t8 21.5t7 21.5q-40 -5 -70 -5q-149 0 -255.5 98t-106.5 246q0 140 95 250.5t234 141.5q94 20 187 20zM1664 1152v-128h-256v-256h-128v256h-256v128h256v256h128v-256h256z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t118.5 52h1472q65 0 112.5 -47t47.5 -113z" />
|
||||||
|
<glyph unicode="" d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" />
|
||||||
|
<glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14 q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28 q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 1024q0 53 -37.5 90.5 t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1472 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 384q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29 q-141 221 -141 483q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 174 120 321.5 t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224q0 139 94 257t256.5 186.5 t353.5 68.5zM1526 111q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129 q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="896" d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68 z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69 q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7zM896 1408v-98q-42 2 -64 2t-64 -2v98q0 26 19 45t45 19t45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47 q0 -46 -31.5 -71t-77.5 -25q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268q0 99 44.5 184.5t117 142t164 89t186.5 32.5 t186.5 -32.5t164 -89t117 -142t44.5 -184.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68 t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5t81 -103t47.5 -132.5t24 -138t5.5 -131zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 t271.5 -112.5t112.5 -271.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252 t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 t66 -158z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1664 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5 q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1024 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1024 608v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280z M768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1536h-1152v-1536h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM1408 1472v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1152h-256v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM896 1056v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5 t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1408 1088v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1280q0 26 19 45t45 19h320 v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM1920 1344v-1152 q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128q-26 0 -45 19t-19 45t19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 158 -66t66 -158z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" />
|
||||||
|
<glyph unicode="" d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" />
|
||||||
|
<glyph unicode="" d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1568" d="M496 192q0 -60 -42.5 -102t-101.5 -42q-60 0 -102 42t-42 102t42 102t102 42q59 0 101.5 -42t42.5 -102zM928 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -66 -47 -113t-113 -47t-113 47t-47 113 t47 113t113 47t113 -47t47 -113zM1360 192q0 -46 -33 -79t-79 -33t-79 33t-33 79t33 79t79 33t79 -33t33 -79zM528 1088q0 -73 -51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5t51.5 124.5t124.5 51.5t124.5 -51.5t51.5 -124.5zM992 1280q0 -80 -56 -136t-136 -56 t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1536 640q0 -40 -28 -68t-68 -28t-68 28t-28 68t28 68t68 28t68 -28t28 -68zM1328 1088q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5z" />
|
||||||
|
<glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" />
|
||||||
|
<glyph unicode="" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150 t-150 362t150 362t362 150h896q212 0 362 -150t150 -362z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1024 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16 h96q16 0 16 -16zM896 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1280 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1152 880v-96 q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 880v-352q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h112v240q0 16 16 16h96q16 0 16 -16zM1792 128v896h-1664v-896 h1664zM1920 1024v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102 q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266 q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8 q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 -173 169 -509z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497 q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M439 265l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204zM1031 1044l-239 -18 l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56zM1664 960q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9 t-9 23t9 23t23 9h320q14 0 23 -9t9 -23zM1120 1504v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM1527 1353l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="640" d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" />
|
||||||
|
<glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1534 846v-206h-514l-3 27 q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5t-65.5 -51.5t-30.5 -63h232v80 h126z" />
|
||||||
|
<glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1536 -50v-206h-514l-4 27 q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73h232v80h126z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89 q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117 q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23 t-10 -23zM1005 1325l-621 -621v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" />
|
||||||
|
<glyph unicode="" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192q26 0 45 -19 t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1152" d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 v-320h736z" />
|
||||||
|
<glyph unicode="" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="384" d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" />
|
||||||
|
<glyph unicode="" d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10 t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 q16 -8 32 -8q17 0 32 9z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" />
|
||||||
|
<glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" />
|
||||||
|
<glyph unicode="" d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26 l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5l12 3l5 2q13 5 26 -2q12 -7 15 -21z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1024 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1024 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28 t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162 l230 -662h70z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248 v119h121z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1216 1504v-192q0 -14 -9 -23t-23 -9h-256 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1792 1504v-192q0 -14 -9 -23t-23 -9h-832 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5 t82 -252.5zM1456 882v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165z" />
|
||||||
|
<glyph unicode="" d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13 q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24 2 76 59t101 121q68 87 101 120q18 18 31 48t17.5 48.5 t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121 t-76 59q-25 2 -43 20.5t-18 43.5v641q0 26 19 44.5t45 19.5q35 1 158 44q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76z" />
|
||||||
|
<glyph unicode="" d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86v129q0 59 20 86q29 38 80 38t78 -38 q21 -28 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68zM785 1079v-156q0 -51 -32 -51t-32 51v156q0 52 32 52t32 -52zM1318 366q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5 q-20 -87 -20 -260q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73q20 84 20 260zM563 1017l90 296h-75l-51 -195l-53 195h-78l24 -69t23 -69q35 -103 46 -158v-201h74v201zM852 936v130q0 58 -21 87q-29 38 -78 38q-51 0 -78 -38 q-21 -29 -21 -87v-130q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87zM1033 816h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293q0 -37 6 -55q11 -27 43 -27q36 0 77 45v-40zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51 q-28 -37 -28 -116v-173q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54q2 9 2 58zM790 1011v210q0 69 -43 69t-43 -69v-210q0 -70 43 -70t43 70zM1509 260q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99 q-26 112 -26 350q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350zM511 1536h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187h106l71 -263zM881 1203v-175q0 -81 -28 -118q-37 -51 -106 -51q-67 0 -105 51 q-28 38 -28 118v175q0 80 28 117q38 51 105 51q69 0 106 -51q28 -37 28 -117zM1216 1365v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 q25 45 64 45h241q22 0 31 -15z" />
|
||||||
|
<glyph unicode="" d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1280 640q0 37 -30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54zM1792 640q0 -96 -1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58t-69.5 123 q-14 65 -21.5 147.5t-8.5 136.5t-1 150t1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M928 135v-151l-707 -1v151zM1169 481v-701l-1 -35v-1h-1132l-35 1h-1v736h121v-618h928v618h120zM241 393l704 -65l-13 -150l-705 65zM309 709l683 -183l-39 -146l-683 183zM472 1058l609 -360l-77 -130l-609 360zM832 1389l398 -585l-124 -85l-399 584zM1285 1536 l121 -697l-149 -26l-121 697z" />
|
||||||
|
<glyph unicode="" d="M1362 110v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5zM1078 643q0 124 -90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5 t90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5zM1362 1003v165q0 28 -20 48.5t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165q0 -29 20 -49t49 -20h174q29 0 49 20t20 49zM1536 1211v-1142q0 -81 -58 -139t-139 -58h-1142q-81 0 -139 58t-58 139v1142q0 81 58 139 t139 58h1142q81 0 139 -58t58 -139z" />
|
||||||
|
<glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" />
|
||||||
|
<glyph unicode="" d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18 t-76.5 27t-73 43.5t-52 61.5q-25 96 -57 292l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37zM1403 1166q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34t-6 39.5 t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54z" />
|
||||||
|
<glyph unicode="" d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71z M1272 1020q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63q24 13 39.5 23t31 29t19.5 40q48 267 80 473zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M390 1408h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170v217q91 30 155 84q64 55 103 132q39 78 54 196z " />
|
||||||
|
<glyph unicode="" d="M1123 127v181q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="768" d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1408" d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78v666h918zM931 1255q107 -55 171 -153.5t64 -215.5 h-925q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20zM1408 767v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5z" />
|
||||||
|
<glyph unicode="" d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18l-4 -5q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54t7 -70.5q46 24 7 92 q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5t60 -22.5zM626 1152 q3 17 -2.5 30t-11.5 15q-9 2 -9 -7q2 -5 5 -6q10 0 7 -15q-3 -20 8 -20q3 0 3 3zM1045 955q-2 8 -6.5 11.5t-13 5t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20zM867 1168q0 11 -5 19.5t-11 12.5t-9 3q-14 -1 -7 -7l4 -2 q14 -4 18 -31q0 -3 8 2zM921 1401q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9t3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7zM1486 60q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5t-30 -18.5 t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43q-19 4 -51 9.5 t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49t-14 -48 q3 -17 37 -26q20 -6 84.5 -18.5t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54q110 143 124 195 q-12 112 -16 310q-2 90 24 151.5t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5t-40.5 -33.5t-61 -14 q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5t15.5 47.5q1 -31 8 -56.5 t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13t16.5 -9.5z" />
|
||||||
|
<glyph unicode="" d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q-185 164 -433 164q-76 0 -155 -19 q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5zM1424 647q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5q25 -53 44 -95q2 -6 6.5 -17.5t7.5 -16.5q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5t36.5 -6 t25 -4.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 -92 122 -157.5t291 -65.5 q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5q0 73 16 150q-80 104 -80 234q0 159 112.5 271.5t271.5 112.5q130 0 234 -80 q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1483 512l-587 -587q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128t53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41 l619 619l2 -3q53 -53 53 -128t-53 -128zM1406 1138l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15v0q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36t14 35l121 120q13 15 35 15t36 -15l252 -252l574 575q15 15 36 15t36 -15z" />
|
||||||
|
<glyph unicode="" d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1280" d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1024" d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" />
|
||||||
|
<glyph unicode="" d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94 q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29z" />
|
||||||
|
<glyph unicode="" d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
|
</font>
|
||||||
|
</defs></svg>
|
||||||
|
After Width: | Height: | Size: 193 KiB |
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
vendored
Executable file
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
vendored
Executable file
Binary file not shown.
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
vendored
Executable file
BIN
docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
vendored
Executable file
Binary file not shown.
16
docs/_themes/sphinx_rtd_theme/static/js/theme.js
vendored
Executable file
16
docs/_themes/sphinx_rtd_theme/static/js/theme.js
vendored
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
$( document ).ready(function() {
|
||||||
|
// Shift nav in mobile when clicking the menu.
|
||||||
|
$("[data-toggle='wy-nav-top']").click(function() {
|
||||||
|
$("[data-toggle='wy-nav-shift']").toggleClass("shift");
|
||||||
|
$("[data-toggle='rst-versions']").toggleClass("shift");
|
||||||
|
});
|
||||||
|
// Close menu when you click a link.
|
||||||
|
$(".wy-menu-vertical .current ul li a").click(function() {
|
||||||
|
$("[data-toggle='wy-nav-shift']").removeClass("shift");
|
||||||
|
$("[data-toggle='rst-versions']").toggleClass("shift");
|
||||||
|
});
|
||||||
|
$("[data-toggle='rst-current-version']").click(function() {
|
||||||
|
$("[data-toggle='rst-versions']").toggleClass("shift-up");
|
||||||
|
});
|
||||||
|
$("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>");
|
||||||
|
});
|
||||||
8
docs/_themes/sphinx_rtd_theme/theme.conf
vendored
Executable file
8
docs/_themes/sphinx_rtd_theme/theme.conf
vendored
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = basic
|
||||||
|
stylesheet = css/theme.css
|
||||||
|
|
||||||
|
[options]
|
||||||
|
typekit_id = hiw1hhg
|
||||||
|
analytics_id =
|
||||||
|
canonical_url =
|
||||||
37
docs/_themes/sphinx_rtd_theme/versions.html
vendored
Executable file
37
docs/_themes/sphinx_rtd_theme/versions.html
vendored
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
{% if READTHEDOCS %}
|
||||||
|
{# Add rst-badge after rst-versions for small badge style. #}
|
||||||
|
<div class="rst-versions" data-toggle="rst-versions">
|
||||||
|
<span class="rst-current-version" data-toggle="rst-current-version">
|
||||||
|
<span class="icon icon-book"> Read the Docs</span>
|
||||||
|
v: {{ current_version }}
|
||||||
|
<span class="icon icon-caret-down"></span>
|
||||||
|
</span>
|
||||||
|
<div class="rst-other-versions">
|
||||||
|
<dl>
|
||||||
|
<dt>Versions</dt>
|
||||||
|
{% for slug, url in versions %}
|
||||||
|
<dd><a href="{{ url }}">{{ slug }}</a></dd>
|
||||||
|
{% endfor %}
|
||||||
|
</dl>
|
||||||
|
<dl>
|
||||||
|
<dt>Downloads</dt>
|
||||||
|
{% for type, url in downloads %}
|
||||||
|
<dd><a href="{{ url }}">{{ type }}</a></dd>
|
||||||
|
{% endfor %}
|
||||||
|
</dl>
|
||||||
|
<dl>
|
||||||
|
<dt>On Read the Docs</dt>
|
||||||
|
<dd>
|
||||||
|
<a href="//{{ PRODUCTION_DOMAIN }}/projects/{{ slug }}/?fromdocs={{ slug }}">Project Home</a>
|
||||||
|
</dd>
|
||||||
|
<dd>
|
||||||
|
<a href="//{{ PRODUCTION_DOMAIN }}/builds/{{ slug }}/?fromdocs={{ slug }}">Builds</a>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
<hr/>
|
||||||
|
Free document hosting provided by <a href="http://www.readthedocs.org">Read the Docs</a>.
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
@@ -34,38 +34,95 @@ Documents
|
|||||||
.. autoclass:: mongoengine.ValidationError
|
.. autoclass:: mongoengine.ValidationError
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.FieldDoesNotExist
|
||||||
|
|
||||||
|
|
||||||
|
Context Managers
|
||||||
|
================
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.context_managers.switch_db
|
||||||
|
.. autoclass:: mongoengine.context_managers.switch_collection
|
||||||
|
.. autoclass:: mongoengine.context_managers.no_dereference
|
||||||
|
.. autoclass:: mongoengine.context_managers.query_counter
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
.. autoclass:: mongoengine.queryset.QuerySet
|
.. automodule:: mongoengine.queryset
|
||||||
|
:synopsis: Queryset level operations
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.queryset.QuerySet
|
||||||
|
:members:
|
||||||
|
:inherited-members:
|
||||||
|
|
||||||
|
.. automethod:: QuerySet.__call__
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.queryset.QuerySetNoCache
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. automethod:: mongoengine.queryset.QuerySet.__call__
|
.. automethod:: mongoengine.queryset.QuerySetNoCache.__call__
|
||||||
|
|
||||||
.. autofunction:: mongoengine.queryset.queryset_manager
|
.. autofunction:: mongoengine.queryset.queryset_manager
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.base.fields.BaseField
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.fields.StringField
|
||||||
.. autoclass:: mongoengine.EmailField
|
.. autoclass:: mongoengine.fields.URLField
|
||||||
.. autoclass:: mongoengine.IntField
|
.. autoclass:: mongoengine.fields.EmailField
|
||||||
.. autoclass:: mongoengine.FloatField
|
.. autoclass:: mongoengine.fields.IntField
|
||||||
.. autoclass:: mongoengine.DecimalField
|
.. autoclass:: mongoengine.fields.LongField
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
.. autoclass:: mongoengine.fields.FloatField
|
||||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
.. autoclass:: mongoengine.fields.DecimalField
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.fields.BooleanField
|
||||||
.. autoclass:: mongoengine.SortedListField
|
.. autoclass:: mongoengine.fields.DateTimeField
|
||||||
.. autoclass:: mongoengine.DictField
|
.. autoclass:: mongoengine.fields.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.MapField
|
.. autoclass:: mongoengine.fields.EmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
.. autoclass:: mongoengine.fields.DynamicField
|
||||||
.. autoclass:: mongoengine.GenericReferenceField
|
.. autoclass:: mongoengine.fields.ListField
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
|
||||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
.. autoclass:: mongoengine.fields.SortedListField
|
||||||
.. autoclass:: mongoengine.BooleanField
|
.. autoclass:: mongoengine.fields.DictField
|
||||||
.. autoclass:: mongoengine.FileField
|
.. autoclass:: mongoengine.fields.MapField
|
||||||
.. autoclass:: mongoengine.BinaryField
|
.. autoclass:: mongoengine.fields.ReferenceField
|
||||||
.. autoclass:: mongoengine.GeoPointField
|
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||||
.. autoclass:: mongoengine.SequenceField
|
.. autoclass:: mongoengine.fields.CachedReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.BinaryField
|
||||||
|
.. autoclass:: mongoengine.fields.FileField
|
||||||
|
.. autoclass:: mongoengine.fields.ImageField
|
||||||
|
.. autoclass:: mongoengine.fields.SequenceField
|
||||||
|
.. autoclass:: mongoengine.fields.ObjectIdField
|
||||||
|
.. autoclass:: mongoengine.fields.UUIDField
|
||||||
|
.. autoclass:: mongoengine.fields.GeoPointField
|
||||||
|
.. autoclass:: mongoengine.fields.PointField
|
||||||
|
.. autoclass:: mongoengine.fields.LineStringField
|
||||||
|
.. autoclass:: mongoengine.fields.PolygonField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiPointField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiLineStringField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiPolygonField
|
||||||
|
.. autoclass:: mongoengine.fields.GridFSError
|
||||||
|
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||||
|
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||||
|
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
||||||
|
|
||||||
|
Embedded Document Querying
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. versionadded:: 0.9
|
||||||
|
|
||||||
|
Additional queries for Embedded Documents are available when using the
|
||||||
|
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
|
||||||
|
documents.
|
||||||
|
|
||||||
|
A list of embedded documents is returned as a special list with the
|
||||||
|
following methods:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
|
||||||
|
:members:
|
||||||
|
|
||||||
|
Misc
|
||||||
|
====
|
||||||
|
|
||||||
|
.. autofunction:: mongoengine.common._import_class
|
||||||
|
|||||||
@@ -2,6 +2,583 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Development
|
||||||
|
===========
|
||||||
|
- (Fill this out as you fix issues and develop your features).
|
||||||
|
|
||||||
|
Changes in 0.13.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see
|
||||||
|
docs/upgrade.rst for details.
|
||||||
|
|
||||||
|
Changes in 0.12.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
|
||||||
|
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
|
||||||
|
- Fixed the way `Document.objects.create` works with duplicate IDs #1485
|
||||||
|
- Fixed connecting to a replica set with PyMongo 2.x #1436
|
||||||
|
- Fixed using sets in field choices #1481
|
||||||
|
- Fixed deleting items from a `ListField` #1318
|
||||||
|
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
|
||||||
|
- Fixed behavior of a `dec` update operator #1450
|
||||||
|
- Added a `rename` update operator #1454
|
||||||
|
- Added validation for the `db_field` parameter #1448
|
||||||
|
- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440
|
||||||
|
- Fixed the error message displayed when validating unicode URLs #1486
|
||||||
|
- Raise an error when trying to save an abstract document #1449
|
||||||
|
|
||||||
|
Changes in 0.11.0
|
||||||
|
=================
|
||||||
|
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
|
||||||
|
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
|
||||||
|
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
|
||||||
|
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
|
||||||
|
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
|
||||||
|
|
||||||
|
Changes in 0.10.8
|
||||||
|
=================
|
||||||
|
- Added support for QuerySet.batch_size (#1426)
|
||||||
|
- Fixed query set iteration within iteration #1427
|
||||||
|
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
|
||||||
|
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
|
||||||
|
- Fixed delete cascade for models with a custom primary key field #1247
|
||||||
|
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
|
||||||
|
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
|
||||||
|
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
|
||||||
|
- Fixed filtering by embedded_doc=None #1422
|
||||||
|
- Added support for cursor.comment #1420
|
||||||
|
- Fixed doc.get_<field>_display #1419
|
||||||
|
- Fixed __repr__ method of the StrictDict #1424
|
||||||
|
- Added a deprecation warning for Python 2.6
|
||||||
|
|
||||||
|
Changes in 0.10.7
|
||||||
|
=================
|
||||||
|
- Dropped Python 3.2 support #1390
|
||||||
|
- Fixed the bug where dynamic doc has index inside a dict field #1278
|
||||||
|
- Fixed: ListField minus index assignment does not work #1128
|
||||||
|
- Fixed cascade delete mixing among collections #1224
|
||||||
|
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
|
||||||
|
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
|
||||||
|
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||||
|
- Fixed long fields stored as int32 in Python 3. #1253
|
||||||
|
- MapField now handles unicodes keys correctly. #1267
|
||||||
|
- ListField now handles negative indicies correctly. #1270
|
||||||
|
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
|
||||||
|
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
|
||||||
|
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
|
||||||
|
- Fixed support for `__` to escape field names that match operators names in `update` #1351
|
||||||
|
- Fixed BaseDocument#_mark_as_changed #1369
|
||||||
|
- Added support for pickling QuerySet instances. #1397
|
||||||
|
- Fixed connecting to a list of hosts #1389
|
||||||
|
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
|
||||||
|
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
|
||||||
|
- Improvements to the dictionary fields docs #1383
|
||||||
|
|
||||||
|
Changes in 0.10.6
|
||||||
|
=================
|
||||||
|
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||||
|
- Fixed not being able to run tests on Windows. #1153
|
||||||
|
- Allow creation of sparse compound indexes. #1114
|
||||||
|
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||||
|
|
||||||
|
Changes in 0.10.5
|
||||||
|
=================
|
||||||
|
- Fix for reloading of strict with special fields. #1156
|
||||||
|
|
||||||
|
Changes in 0.10.4
|
||||||
|
=================
|
||||||
|
- SaveConditionError is now importable from the top level package. #1165
|
||||||
|
- upsert_one method added. #1157
|
||||||
|
|
||||||
|
Changes in 0.10.3
|
||||||
|
=================
|
||||||
|
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
|
||||||
|
|
||||||
|
Changes in 0.10.2
|
||||||
|
=================
|
||||||
|
- Allow shard key to point to a field in an embedded document. #551
|
||||||
|
- Allow arbirary metadata in fields. #1129
|
||||||
|
- ReferenceFields now support abstract document types. #837
|
||||||
|
|
||||||
|
Changes in 0.10.1
|
||||||
|
=================
|
||||||
|
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
|
||||||
|
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
|
||||||
|
- Fix ignored chained options #842
|
||||||
|
- Document save's save_condition error raises `SaveConditionError` exception #1070
|
||||||
|
- Fix Document.reload for DynamicDocument. #1050
|
||||||
|
- StrictDict & SemiStrictDict are shadowed at init time. #1105
|
||||||
|
- Fix ListField minus index assignment does not work. #1119
|
||||||
|
- Remove code that marks field as changed when the field has default but not existed in database #1126
|
||||||
|
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
|
||||||
|
- Recursively build query when using elemMatch operator. #1130
|
||||||
|
- Fix instance back references for lists of embedded documents. #1131
|
||||||
|
|
||||||
|
Changes in 0.10.0
|
||||||
|
=================
|
||||||
|
- Django support was removed and will be available as a separate extension. #958
|
||||||
|
- Allow to load undeclared field with meta attribute 'strict': False #957
|
||||||
|
- Support for PyMongo 3+ #946
|
||||||
|
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||||
|
- Improve Document._created status when switch collection and db #1020
|
||||||
|
- Queryset update doesn't go through field validation #453
|
||||||
|
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||||
|
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||||
|
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||||
|
- Support += and *= for ListField #595
|
||||||
|
- Use sets for populating dbrefs to dereference
|
||||||
|
- Fixed unpickled documents replacing the global field's list. #888
|
||||||
|
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
||||||
|
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
||||||
|
- Fix for updating sorting in SortedListField. #978
|
||||||
|
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
||||||
|
- Fix for issue where FileField deletion did not free space in GridFS.
|
||||||
|
- No_dereference() not respected on embedded docs containing reference. #517
|
||||||
|
- Document save raise an exception if save_condition fails #1005
|
||||||
|
- Fixes some internal _id handling issue. #961
|
||||||
|
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||||
|
- Capped collection multiple of 256. #1011
|
||||||
|
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||||
|
- Fix for delete with write_concern {'w': 0}. #1008
|
||||||
|
- Allow dynamic lookup for more than two parts. #882
|
||||||
|
- Added support for min_distance on geo queries. #831
|
||||||
|
- Allow to add custom metadata to fields #705
|
||||||
|
|
||||||
|
Changes in 0.9.0
|
||||||
|
================
|
||||||
|
- Update FileField when creating a new file #714
|
||||||
|
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||||
|
- ComplexDateTimeField should fall back to None when null=True #864
|
||||||
|
- Request Support for $min, $max Field update operators #863
|
||||||
|
- `BaseDict` does not follow `setdefault` #866
|
||||||
|
- Add support for $type operator # 766
|
||||||
|
- Fix tests for pymongo 2.8+ #877
|
||||||
|
- No module named 'django.utils.importlib' (Django dev) #872
|
||||||
|
- Field Choices Now Accept Subclasses of Documents
|
||||||
|
- Ensure Indexes before Each Save #812
|
||||||
|
- Generate Unique Indices for Lists of EmbeddedDocuments #358
|
||||||
|
- Sparse fields #515
|
||||||
|
- write_concern not in params of Collection#remove #801
|
||||||
|
- Better BaseDocument equality check when not saved #798
|
||||||
|
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
|
||||||
|
- with_limit_and_skip for count should default like in pymongo #759
|
||||||
|
- Fix storing value of precision attribute in DecimalField #787
|
||||||
|
- Set attribute to None does not work (at least for fields with default values) #734
|
||||||
|
- Querying by a field defined in a subclass raises InvalidQueryError #744
|
||||||
|
- Add Support For MongoDB 2.6.X's maxTimeMS #778
|
||||||
|
- abstract shouldn't be inherited in EmbeddedDocument # 789
|
||||||
|
- Allow specifying the '_cls' as a field for indexes #397
|
||||||
|
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||||
|
- Not overriding default values when loading a subset of fields #399
|
||||||
|
- Saving document doesn't create new fields in existing collection #620
|
||||||
|
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||||
|
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||||
|
- Added Queryset.search_text to Text indexes searchs #700
|
||||||
|
- Fixed tests for Django 1.7 #696
|
||||||
|
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||||
|
- Added preliminary support for text indexes #680
|
||||||
|
- Added `elemMatch` operator as well - `match` is too obscure #653
|
||||||
|
- Added support for progressive JPEG #486 #548
|
||||||
|
- Allow strings to be used in index creation #675
|
||||||
|
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||||
|
- Fixed nested reference field distinct error #583
|
||||||
|
- Fixed change tracking on nested MapFields #539
|
||||||
|
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
|
||||||
|
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
|
||||||
|
- Implemented equality between Documents and DBRefs #597
|
||||||
|
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
|
||||||
|
- Added the ability to reload specific document fields #100
|
||||||
|
- Added db_alias support and fixes for custom map/reduce output #586
|
||||||
|
- post_save signal now has access to delta information about field changes #594 #589
|
||||||
|
- Don't query with $orderby for qs.get() #600
|
||||||
|
- Fix id shard key save issue #636
|
||||||
|
- Fixes issue with recursive embedded document errors #557
|
||||||
|
- Fix clear_changed_fields() clearing unsaved documents bug #602
|
||||||
|
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
|
||||||
|
- Removing support for Python < 2.6.6
|
||||||
|
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
|
||||||
|
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773
|
||||||
|
- Added support for the using() method on a queryset #676
|
||||||
|
- PYPY support #673
|
||||||
|
- Connection pooling #674
|
||||||
|
- Avoid to open all documents from cursors in an if stmt #655
|
||||||
|
- Ability to clear the ordering #657
|
||||||
|
- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626
|
||||||
|
- Slots - memory improvements #625
|
||||||
|
- Fixed incorrectly split a query key when it ends with "_" #619
|
||||||
|
- Geo docs updates #613
|
||||||
|
- Workaround a dateutil bug #608
|
||||||
|
- Conditional save for atomic-style operations #511
|
||||||
|
- Allow dynamic dictionary-style field access #559
|
||||||
|
- Increase email field length to accommodate new TLDs #726
|
||||||
|
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||||
|
- Make 'db' argument to connection optional #737
|
||||||
|
- Allow atomic update for the entire `DictField` #742
|
||||||
|
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||||
|
- Fix multiple connections aliases being rewritten #748
|
||||||
|
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||||
|
- Make `in_bulk()` respect `no_dereference()` #775
|
||||||
|
- Handle None from model __str__; Fixes #753 #754
|
||||||
|
- _get_changed_fields fix for embedded documents with id field. #925
|
||||||
|
|
||||||
|
Changes in 0.8.7
|
||||||
|
================
|
||||||
|
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538)
|
||||||
|
- Stop ensure_indexes running on a secondaries (#555)
|
||||||
|
- Fix circular import issue with django auth (#531) (#545)
|
||||||
|
|
||||||
|
Changes in 0.8.6
|
||||||
|
================
|
||||||
|
- Fix django auth import (#531)
|
||||||
|
|
||||||
|
Changes in 0.8.5
|
||||||
|
================
|
||||||
|
- Fix multi level nested fields getting marked as changed (#523)
|
||||||
|
- Django 1.6 login fix (#522) (#527)
|
||||||
|
- Django 1.6 session fix (#509)
|
||||||
|
- EmbeddedDocument._instance is now set when setting the attribute (#506)
|
||||||
|
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
|
||||||
|
- Fixed GenericReferenceField serialization order (#499)
|
||||||
|
- Fixed count and none bug (#498)
|
||||||
|
- Fixed bug with .only() and DictField with digit keys (#496)
|
||||||
|
- Added user_permissions to Django User object (#491, #492)
|
||||||
|
- Fix updating Geo Location fields (#488)
|
||||||
|
- Fix handling invalid dict field value (#485)
|
||||||
|
- Added app_label to MongoUser (#484)
|
||||||
|
- Use defaults when host and port are passed as None (#483)
|
||||||
|
- Fixed distinct casting issue with ListField of EmbeddedDocuments (#470)
|
||||||
|
- Fixed Django 1.6 sessions (#454, #480)
|
||||||
|
|
||||||
|
Changes in 0.8.4
|
||||||
|
================
|
||||||
|
- Remove database name necessity in uri connection schema (#452)
|
||||||
|
- Fixed "$pull" semantics for nested ListFields (#447)
|
||||||
|
- Allow fields to be named the same as query operators (#445)
|
||||||
|
- Updated field filter logic - can now exclude subclass fields (#443)
|
||||||
|
- Fixed dereference issue with embedded listfield referencefields (#439)
|
||||||
|
- Fixed slice when using inheritance causing fields to be excluded (#437)
|
||||||
|
- Fixed ._get_db() attribute after a Document.switch_db() (#441)
|
||||||
|
- Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449)
|
||||||
|
- Handle dynamic fieldnames that look like digits (#434)
|
||||||
|
- Added get_user_document and improve mongo_auth module (#423)
|
||||||
|
- Added str representation of GridFSProxy (#424)
|
||||||
|
- Update transform to handle docs erroneously passed to unset (#416)
|
||||||
|
- Fixed indexing - turn off _cls (#414)
|
||||||
|
- Fixed dereference threading issue in ComplexField.__get__ (#412)
|
||||||
|
- Fixed QuerySetNoCache.count() caching (#410)
|
||||||
|
- Don't follow references in _get_changed_fields (#422, #417)
|
||||||
|
- Allow args and kwargs to be passed through to_json (#420)
|
||||||
|
|
||||||
|
Changes in 0.8.3
|
||||||
|
================
|
||||||
|
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
|
||||||
|
- Added get_proxy_object helper to filefields (#391)
|
||||||
|
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
||||||
|
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
||||||
|
- Fixed as_pymongo to return the id (#386)
|
||||||
|
- Document.select_related() now respects `db_alias` (#377)
|
||||||
|
- Reload uses shard_key if applicable (#384)
|
||||||
|
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||||
|
|
||||||
|
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||||
|
|
||||||
|
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
|
||||||
|
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||||
|
- Added Django 1.5 PY3 support (#392)
|
||||||
|
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
||||||
|
- Fixed weakref being valid after reload (#374)
|
||||||
|
- Fixed queryset.get() respecting no_dereference (#373)
|
||||||
|
- Added full_result kwarg to update (#380)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Changes in 0.8.2
|
||||||
|
================
|
||||||
|
- Added compare_indexes helper (#361)
|
||||||
|
- Fixed cascading saves which weren't turned off as planned (#291)
|
||||||
|
- Fixed Datastructures so instances are a Document or EmbeddedDocument (#363)
|
||||||
|
- Improved cascading saves write performance (#361)
|
||||||
|
- Fixed ambiguity and differing behaviour regarding field defaults (#349)
|
||||||
|
- ImageFields now include PIL error messages if invalid error (#353)
|
||||||
|
- Added lock when calling doc.Delete() for when signals have no sender (#350)
|
||||||
|
- Reload forces read preference to be PRIMARY (#355)
|
||||||
|
- Querysets are now lest restrictive when querying duplicate fields (#332, #333)
|
||||||
|
- FileField now honouring db_alias (#341)
|
||||||
|
- Removed customised __set__ change tracking in ComplexBaseField (#344)
|
||||||
|
- Removed unused var in _get_changed_fields (#347)
|
||||||
|
- Added pre_save_post_validation signal (#345)
|
||||||
|
- DateTimeField now auto converts valid datetime isostrings into dates (#343)
|
||||||
|
- DateTimeField now uses dateutil for parsing if available (#343)
|
||||||
|
- Fixed Doc.objects(read_preference=X) not setting read preference (#352)
|
||||||
|
- Django session ttl index expiry fixed (#329)
|
||||||
|
- Fixed pickle.loads (#342)
|
||||||
|
- Documentation fixes
|
||||||
|
|
||||||
|
Changes in 0.8.1
|
||||||
|
================
|
||||||
|
- Fixed Python 2.6 django auth importlib issue (#326)
|
||||||
|
- Fixed pickle unsaved document regression (#327)
|
||||||
|
|
||||||
|
Changes in 0.8.0
|
||||||
|
================
|
||||||
|
- Fixed querying ReferenceField custom_id (#317)
|
||||||
|
- Fixed pickle issues with collections (#316)
|
||||||
|
- Added `get_next_value` preview for SequenceFields (#319)
|
||||||
|
- Added no_sub_classes context manager and queryset helper (#312)
|
||||||
|
- Querysets now utilises a local cache
|
||||||
|
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||||
|
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
||||||
|
- Added $setOnInsert support for upserts (#308)
|
||||||
|
- Upserts now possible with just query parameters (#309)
|
||||||
|
- Upserting is the only way to ensure docs are saved correctly (#306)
|
||||||
|
- Fixed register_delete_rule inheritance issue
|
||||||
|
- Fix cloning of sliced querysets (#303)
|
||||||
|
- Fixed update_one write concern (#302)
|
||||||
|
- Updated minimum requirement for pymongo to 2.5
|
||||||
|
- Add support for new geojson fields, indexes and queries (#299)
|
||||||
|
- If values cant be compared mark as changed (#287)
|
||||||
|
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
|
||||||
|
- Document serialization uses field order to ensure a strict order is set (#296)
|
||||||
|
- DecimalField now stores as float not string (#289)
|
||||||
|
- UUIDField now stores as a binary by default (#292)
|
||||||
|
- Added Custom User Model for Django 1.5 (#285)
|
||||||
|
- Cascading saves now default to off (#291)
|
||||||
|
- ReferenceField now store ObjectId's by default rather than DBRef (#290)
|
||||||
|
- Added ImageField support for inline replacements (#86)
|
||||||
|
- Added SequenceField.set_next_value(value) helper (#159)
|
||||||
|
- Updated .only() behaviour - now like exclude it is chainable (#202)
|
||||||
|
- Added with_limit_and_skip support to count() (#235)
|
||||||
|
- Objects queryset manager now inherited (#256)
|
||||||
|
- Updated connection to use MongoClient (#262, #274)
|
||||||
|
- Fixed db_alias and inherited Documents (#143)
|
||||||
|
- Documentation update for document errors (#124)
|
||||||
|
- Deprecated `get_or_create` (#35)
|
||||||
|
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||||
|
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||||
|
- Added to_json and from_json to Document (#1)
|
||||||
|
- Added to_json and from_json to QuerySet (#131)
|
||||||
|
- Updated index creation now tied to Document class (#102)
|
||||||
|
- Added none() to queryset (#127)
|
||||||
|
- Updated SequenceFields to allow post processing of the calculated counter value (#141)
|
||||||
|
- Added clean method to documents for pre validation data cleaning (#60)
|
||||||
|
- Added support setting for read prefrence at a query level (#157)
|
||||||
|
- Added _instance to EmbeddedDocuments pointing to the parent (#139)
|
||||||
|
- Inheritance is off by default (#122)
|
||||||
|
- Remove _types and just use _cls for inheritance (#148)
|
||||||
|
- Only allow QNode instances to be passed as query objects (#199)
|
||||||
|
- Dynamic fields are now validated on save (#153) (#154)
|
||||||
|
- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191)
|
||||||
|
- Fixed GridFSProxy __getattr__ behaviour (#196)
|
||||||
|
- Fix Django timezone support (#151)
|
||||||
|
- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171)
|
||||||
|
- FileFields now copyable (#198)
|
||||||
|
- Querysets now return clones and are no longer edit in place (#56)
|
||||||
|
- Added support for $maxDistance (#179)
|
||||||
|
- Uses getlasterror to test created on updated saves (#163)
|
||||||
|
- Fixed inheritance and unique index creation (#140)
|
||||||
|
- Fixed reverse delete rule with inheritance (#197)
|
||||||
|
- Fixed validation for GenericReferences which haven't been dereferenced
|
||||||
|
- Added switch_db context manager (#106)
|
||||||
|
- Added switch_db method to document instances (#106)
|
||||||
|
- Added no_dereference context manager (#82) (#61)
|
||||||
|
- Added switch_collection context manager (#220)
|
||||||
|
- Added switch_collection method to document instances (#220)
|
||||||
|
- Added support for compound primary keys (#149) (#121)
|
||||||
|
- Fixed overriding objects with custom manager (#58)
|
||||||
|
- Added no_dereference method for querysets (#82) (#61)
|
||||||
|
- Undefined data should not override instance methods (#49)
|
||||||
|
- Added Django Group and Permission (#142)
|
||||||
|
- Added Doc class and pk to Validation messages (#69)
|
||||||
|
- Fixed Documents deleted via a queryset don't call any signals (#105)
|
||||||
|
- Added the "get_decoded" method to the MongoSession class (#216)
|
||||||
|
- Fixed invalid choices error bubbling (#214)
|
||||||
|
- Updated Save so it calls $set and $unset in a single operation (#211)
|
||||||
|
- Fixed inner queryset looping (#204)
|
||||||
|
|
||||||
|
Changes in 0.7.10
|
||||||
|
=================
|
||||||
|
- Fix UnicodeEncodeError for dbref (#278)
|
||||||
|
- Allow construction using positional parameters (#268)
|
||||||
|
- Updated EmailField length to support long domains (#243)
|
||||||
|
- Added 64-bit integer support (#251)
|
||||||
|
- Added Django sessions TTL support (#224)
|
||||||
|
- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240)
|
||||||
|
- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242)
|
||||||
|
- Added "id" back to _data dictionary (#255)
|
||||||
|
- Only mark a field as changed if the value has changed (#258)
|
||||||
|
- Explicitly check for Document instances when dereferencing (#261)
|
||||||
|
- Fixed order_by chaining issue (#265)
|
||||||
|
- Added dereference support for tuples (#250)
|
||||||
|
- Resolve field name to db field name when using distinct(#260, #264, #269)
|
||||||
|
- Added kwargs to doc.save to help interop with django (#223, #270)
|
||||||
|
- Fixed cloning querysets in PY3
|
||||||
|
- Int fields no longer unset in save when changed to 0 (#272)
|
||||||
|
- Fixed ReferenceField query chaining bug fixed (#254)
|
||||||
|
|
||||||
|
Changes in 0.7.9
|
||||||
|
================
|
||||||
|
- Better fix handling for old style _types
|
||||||
|
- Embedded SequenceFields follow collection naming convention
|
||||||
|
|
||||||
|
Changes in 0.7.8
|
||||||
|
================
|
||||||
|
- Fix sequence fields in embedded documents (#166)
|
||||||
|
- Fix query chaining with .order_by() (#176)
|
||||||
|
- Added optional encoding and collection config for Django sessions (#180, #181, #183)
|
||||||
|
- Fixed EmailField so can add extra validation (#173, #174, #187)
|
||||||
|
- Fixed bulk inserts can now handle custom pk's (#192)
|
||||||
|
- Added as_pymongo method to return raw or cast results from pymongo (#193)
|
||||||
|
|
||||||
|
Changes in 0.7.7
|
||||||
|
================
|
||||||
|
- Fix handling for old style _types
|
||||||
|
|
||||||
|
Changes in 0.7.6
|
||||||
|
================
|
||||||
|
- Unicode fix for repr (#133)
|
||||||
|
- Allow updates with match operators (#144)
|
||||||
|
- Updated URLField - now can have a override the regex (#136)
|
||||||
|
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
|
||||||
|
- Fixed reload issue with ReferenceField where dbref=False (#138)
|
||||||
|
|
||||||
|
Changes in 0.7.5
|
||||||
|
================
|
||||||
|
- ReferenceFields with dbref=False use ObjectId instead of strings (#134)
|
||||||
|
See ticket for upgrade notes (#134)
|
||||||
|
|
||||||
|
Changes in 0.7.4
|
||||||
|
================
|
||||||
|
- Fixed index inheritance issues - firmed up testcases (#123) (#125)
|
||||||
|
|
||||||
|
Changes in 0.7.3
|
||||||
|
================
|
||||||
|
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119)
|
||||||
|
|
||||||
|
Changes in 0.7.2
|
||||||
|
================
|
||||||
|
- Update index spec generation so its not destructive (#113)
|
||||||
|
|
||||||
|
Changes in 0.7.1
|
||||||
|
================
|
||||||
|
- Fixed index spec inheritance (#111)
|
||||||
|
|
||||||
|
Changes in 0.7.0
|
||||||
|
================
|
||||||
|
- Updated queryset.delete so you can use with skip / limit (#107)
|
||||||
|
- Updated index creation allows kwargs to be passed through refs (#104)
|
||||||
|
- Fixed Q object merge edge case (#109)
|
||||||
|
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
|
||||||
|
- Added NotUniqueError for duplicate keys (#62)
|
||||||
|
- Added custom collection / sequence naming for SequenceFields (#92)
|
||||||
|
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||||
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
|
this will become the default in 0.8 (#89)
|
||||||
|
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||||
|
- Added example of indexing embedded document fields (#75)
|
||||||
|
- Fixed ImageField resizing when forcing size (#80)
|
||||||
|
- Add flexibility for fields handling bad data (#78)
|
||||||
|
- Embedded Documents no longer handle meta definitions
|
||||||
|
- Use weakref proxies in base lists / dicts (#74)
|
||||||
|
- Improved queryset filtering (hmarr/mongoengine#554)
|
||||||
|
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
||||||
|
- Fixed abstract classes and shard keys (#64)
|
||||||
|
- Fixed Python 2.5 support
|
||||||
|
- Added Python 3 support (thanks to Laine Heron)
|
||||||
|
|
||||||
|
Changes in 0.6.20
|
||||||
|
=================
|
||||||
|
- Added support for distinct and db_alias (#59)
|
||||||
|
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||||
|
- Fixed BinaryField lookup re (#48)
|
||||||
|
|
||||||
|
Changes in 0.6.19
|
||||||
|
=================
|
||||||
|
|
||||||
|
- Added Binary support to UUID (#47)
|
||||||
|
- Fixed MapField lookup for fields without declared lookups (#46)
|
||||||
|
- Fixed BinaryField python value issue (#48)
|
||||||
|
- Fixed SequenceField non numeric value lookup (#41)
|
||||||
|
- Fixed queryset manager issue (#52)
|
||||||
|
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||||
|
|
||||||
|
Changes in 0.6.18
|
||||||
|
=================
|
||||||
|
- Fixed recursion loading bug in _get_changed_fields
|
||||||
|
|
||||||
|
Changes in 0.6.17
|
||||||
|
=================
|
||||||
|
- Fixed issue with custom queryset manager expecting explict variable names
|
||||||
|
|
||||||
|
Changes in 0.6.16
|
||||||
|
=================
|
||||||
|
- Fixed issue where db_alias wasn't inherited
|
||||||
|
|
||||||
|
Changes in 0.6.15
|
||||||
|
=================
|
||||||
|
- Updated validation error messages
|
||||||
|
- Added support for null / zero / false values in item_frequencies
|
||||||
|
- Fixed cascade save edge case
|
||||||
|
- Fixed geo index creation through reference fields
|
||||||
|
- Added support for args / kwargs when using @queryset_manager
|
||||||
|
- Deref list custom id fix
|
||||||
|
|
||||||
|
Changes in 0.6.14
|
||||||
|
=================
|
||||||
|
- Fixed error dict with nested validation
|
||||||
|
- Fixed Int/Float fields and not equals None
|
||||||
|
- Exclude tests from installation
|
||||||
|
- Allow tuples for index meta
|
||||||
|
- Fixed use of str in instance checks
|
||||||
|
- Fixed unicode support in transform update
|
||||||
|
- Added support for add_to_set and each
|
||||||
|
|
||||||
|
Changes in 0.6.13
|
||||||
|
=================
|
||||||
|
- Fixed EmbeddedDocument db_field validation issue
|
||||||
|
- Fixed StringField unicode issue
|
||||||
|
- Fixes __repr__ modifying the cursor
|
||||||
|
|
||||||
|
Changes in 0.6.12
|
||||||
|
=================
|
||||||
|
- Fixes scalar lookups for primary_key
|
||||||
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
|
Changes in 0.6.11
|
||||||
|
=================
|
||||||
|
- Fixed inconsistency handling None values field attrs
|
||||||
|
- Fixed map_field embedded db_field issue
|
||||||
|
- Fixed .save() _delta issue with DbRefs
|
||||||
|
- Fixed Django TestCase
|
||||||
|
- Added cmp to Embedded Document
|
||||||
|
- Added PULL reverse_delete_rule
|
||||||
|
- Fixed CASCADE delete bug
|
||||||
|
- Fixed db_field data load error
|
||||||
|
- Fixed recursive save with FileField
|
||||||
|
|
||||||
|
Changes in 0.6.10
|
||||||
|
=================
|
||||||
|
- Fixed basedict / baselist to return super(..)
|
||||||
|
- Promoted BaseDynamicField to DynamicField
|
||||||
|
|
||||||
|
Changes in 0.6.9
|
||||||
|
================
|
||||||
|
- Fixed sparse indexes on inherited docs
|
||||||
|
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||||
|
|
||||||
|
Changes in 0.6.8
|
||||||
|
================
|
||||||
|
- Fixed FileField losing reference when no default set
|
||||||
|
- Removed possible race condition from FileField (grid_file)
|
||||||
|
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||||
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
|
- Added support for choices with GenericReferenceFields
|
||||||
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
|
- Fixed Django 1.4 sessions first save data loss
|
||||||
|
- FileField now automatically delete files on .delete()
|
||||||
|
- Fix for GenericReference to_mongo method
|
||||||
|
- Fixed connection regression
|
||||||
|
- Updated Django User document, now allows inheritance
|
||||||
|
|
||||||
Changes in 0.6.7
|
Changes in 0.6.7
|
||||||
================
|
================
|
||||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
@@ -47,7 +624,7 @@ Changes in 0.6.1
|
|||||||
- Fix for replicaSet connections
|
- Fix for replicaSet connections
|
||||||
|
|
||||||
Changes in 0.6
|
Changes in 0.6
|
||||||
================
|
==============
|
||||||
|
|
||||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||||
- Added support for covered indexes when inheritance is off
|
- Added support for covered indexes when inheritance is off
|
||||||
@@ -135,8 +712,8 @@ Changes in v0.5
|
|||||||
- Updated default collection naming convention
|
- Updated default collection naming convention
|
||||||
- Added Document Mixin support
|
- Added Document Mixin support
|
||||||
- Fixed queryet __repr__ mid iteration
|
- Fixed queryet __repr__ mid iteration
|
||||||
- Added hint() support, so cantell Mongo the proper index to use for the query
|
- Added hint() support, so can tell Mongo the proper index to use for the query
|
||||||
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
- Fixed issue with inconsistent setting of _cls breaking inherited referencing
|
||||||
- Added help_text and verbose_name to fields to help with some form libs
|
- Added help_text and verbose_name to fields to help with some form libs
|
||||||
- Updated item_frequencies to handle embedded document lookups
|
- Updated item_frequencies to handle embedded document lookups
|
||||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||||
|
|||||||
@@ -17,6 +17,10 @@ class Post(Document):
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
# bugfix
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
@@ -45,7 +49,8 @@ print 'ALL POSTS'
|
|||||||
print
|
print
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print post.title
|
||||||
print '=' * len(post.title)
|
#print '=' * post.title.count()
|
||||||
|
print "=" * 20
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print post.content
|
print post.content
|
||||||
|
|||||||
24
docs/conf.py
24
docs/conf.py
@@ -16,7 +16,7 @@ import sys, os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ master_doc = 'index'
|
|||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u'MongoEngine'
|
||||||
copyright = u'2009-2012, MongoEngine Authors'
|
copyright = u'2009, MongoEngine Authors'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
@@ -92,7 +92,7 @@ pygments_style = 'sphinx'
|
|||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||||
html_theme = 'nature'
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
@@ -116,7 +116,7 @@ html_theme_path = ['_themes']
|
|||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
html_favicon = "favicon.ico"
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
@@ -132,7 +132,11 @@ html_theme_path = ['_themes']
|
|||||||
html_use_smartypants = True
|
html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
html_sidebars = {
|
||||||
|
'index': ['globaltoc.html', 'searchbox.html'],
|
||||||
|
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
@@ -173,8 +177,8 @@ latex_paper_size = 'a4'
|
|||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'MongoEngine.tex', u'MongoEngine Documentation',
|
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
||||||
u'Harry Marr', 'manual'),
|
'Ross Lawley', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
@@ -193,3 +197,9 @@ latex_documents = [
|
|||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_use_modindex = True
|
#latex_use_modindex = True
|
||||||
|
|
||||||
|
autoclass_content = 'both'
|
||||||
|
|
||||||
|
html_theme_options = dict(
|
||||||
|
canonical_url='http://docs.mongoengine.org/en/latest/'
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,90 +1,19 @@
|
|||||||
=============================
|
|
||||||
Using MongoEngine with Django
|
|
||||||
=============================
|
|
||||||
|
|
||||||
.. note :: Updated to support Django 1.4
|
|
||||||
|
|
||||||
Connecting
|
|
||||||
==========
|
|
||||||
In your **settings.py** file, ignore the standard database settings (unless you
|
|
||||||
also plan to use the ORM in your project), and instead call
|
|
||||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
|
||||||
|
|
||||||
Authentication
|
|
||||||
==============
|
==============
|
||||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
Django Support
|
||||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
==============
|
||||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
|
||||||
attributes that the standard Django :class:`User` model does - so the two are
|
|
||||||
moderately compatible. Using this backend will allow you to store users in
|
|
||||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
|
||||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
|
||||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
|
||||||
file::
|
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = (
|
.. note:: Django support has been split from the main MongoEngine
|
||||||
'mongoengine.django.auth.MongoEngineBackend',
|
repository. The *legacy* Django extension may be found bundled with the
|
||||||
)
|
0.9 release of MongoEngine.
|
||||||
|
|
||||||
The :mod:`~mongoengine.django.auth` module also contains a
|
|
||||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
|
||||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
|
||||||
|
|
||||||
.. versionadded:: 0.1.3
|
|
||||||
|
|
||||||
Sessions
|
Help Wanted!
|
||||||
========
|
------------
|
||||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
|
||||||
provides a MongoDB-based session backend for Django, which allows you to use
|
|
||||||
sessions in you Django application with just MongoDB. To enable the MongoEngine
|
|
||||||
session backend, ensure that your settings module has
|
|
||||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
|
||||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
|
||||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
|
||||||
into you settings module::
|
|
||||||
|
|
||||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
The MongoEngine team is looking for help contributing and maintaining a new
|
||||||
|
Django extension for MongoEngine! If you have Django experience and would like
|
||||||
.. versionadded:: 0.2.1
|
to help contribute to the project, please get in touch on the
|
||||||
|
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||||
Storage
|
simply contributing on
|
||||||
=======
|
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
|
||||||
it is useful to have a Django file storage backend that wraps this. The new
|
|
||||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
|
||||||
Using it is very similar to using the default FileSystemStorage.::
|
|
||||||
|
|
||||||
from mongoengine.django.storage import GridFSStorage
|
|
||||||
fs = GridFSStorage()
|
|
||||||
|
|
||||||
filename = fs.save('hello.txt', 'Hello, World!')
|
|
||||||
|
|
||||||
All of the `Django Storage API methods
|
|
||||||
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
|
||||||
implemented except :func:`path`. If the filename provided already exists, an
|
|
||||||
underscore and a number (before # the file extension, if one exists) will be
|
|
||||||
appended to the filename until the generated filename doesn't exist. The
|
|
||||||
:func:`save` method will return the new filename.::
|
|
||||||
|
|
||||||
>>> fs.exists('hello.txt')
|
|
||||||
True
|
|
||||||
>>> fs.open('hello.txt').read()
|
|
||||||
'Hello, World!'
|
|
||||||
>>> fs.size('hello.txt')
|
|
||||||
13
|
|
||||||
>>> fs.url('hello.txt')
|
|
||||||
'http://your_media_url/hello.txt'
|
|
||||||
>>> fs.open('hello.txt').name
|
|
||||||
'hello.txt'
|
|
||||||
>>> fs.listdir()
|
|
||||||
([], [u'hello.txt'])
|
|
||||||
|
|
||||||
All files will be saved and retrieved in GridFS via the :class::`FileDocument`
|
|
||||||
document, allowing easy access to the files without the GridFSStorage
|
|
||||||
backend.::
|
|
||||||
|
|
||||||
>>> from mongoengine.django.storage import FileDocument
|
|
||||||
>>> FileDocument.objects()
|
|
||||||
[<FileDocument: FileDocument object>]
|
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
|
||||||
|
|||||||
@@ -6,60 +6,134 @@ Connecting to MongoDB
|
|||||||
|
|
||||||
To connect to a running instance of :program:`mongod`, use the
|
To connect to a running instance of :program:`mongod`, use the
|
||||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||||
database to connect to. If the database does not exist, it will be created. If
|
database to connect to::
|
||||||
the database requires authentication, :attr:`username` and :attr:`password`
|
|
||||||
arguments may be provided::
|
|
||||||
|
|
||||||
from mongoengine import connect
|
from mongoengine import connect
|
||||||
connect('project1', username='webapp', password='pwd123')
|
connect('project1')
|
||||||
|
|
||||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
|
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
|
||||||
provide :attr:`host` and :attr:`port` arguments to
|
provide the :attr:`host` and :attr:`port` arguments to
|
||||||
:func:`~mongoengine.connect`::
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
connect('project1', host='192.168.1.35', port=12345)
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
|
|
||||||
Uri style connections are also supported as long as you include the database
|
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||||
name - just supply the uri as the :attr:`host` to
|
arguments should be provided::
|
||||||
|
|
||||||
|
connect('project1', username='webapp', password='pwd123')
|
||||||
|
|
||||||
|
URI style connections are also supported -- just supply the URI as
|
||||||
|
the :attr:`host` to
|
||||||
:func:`~mongoengine.connect`::
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
connect('project1', host='mongodb://localhost/database_name')
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
ReplicaSets
|
.. note:: Database, username and password from URI string overrides
|
||||||
===========
|
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||||
|
|
||||||
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
connect(
|
||||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
db='test',
|
||||||
connection kwargs.
|
username='user',
|
||||||
|
password='12345',
|
||||||
|
host='mongodb://admin:qwerty@localhost/production'
|
||||||
|
)
|
||||||
|
|
||||||
|
will establish connection to ``production`` database using
|
||||||
|
``admin`` username and ``qwerty`` password.
|
||||||
|
|
||||||
|
Replica Sets
|
||||||
|
============
|
||||||
|
|
||||||
|
MongoEngine supports connecting to replica sets::
|
||||||
|
|
||||||
|
from mongoengine import connect
|
||||||
|
|
||||||
|
# Regular connect
|
||||||
|
connect('dbname', replicaset='rs-name')
|
||||||
|
|
||||||
|
# MongoDB URI-style connect
|
||||||
|
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
|
||||||
|
|
||||||
|
Read preferences are supported through the connection or via individual
|
||||||
|
queries by passing the read_preference ::
|
||||||
|
|
||||||
|
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||||
|
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||||
|
|
||||||
Multiple Databases
|
Multiple Databases
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Multiple database support was added in MongoEngine 0.6. To use multiple
|
To use multiple databases you can use :func:`~mongoengine.connect` and provide
|
||||||
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
an `alias` name for the connection - if no `alias` is provided then "default"
|
||||||
for the connection - if no `alias` is provided then "default" is used.
|
is used.
|
||||||
|
|
||||||
In the background this uses :func:`~mongoengine.register_connection` to
|
In the background this uses :func:`~mongoengine.register_connection` to
|
||||||
store the data and you can register all aliases up front if required.
|
store the data and you can register all aliases up front if required.
|
||||||
|
|
||||||
Individual documents can also support multiple databases by providing a
|
Individual documents can also support multiple databases by providing a
|
||||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
|
||||||
to point across databases and collections. Below is an example schema, using
|
objects to point across databases and collections. Below is an example schema,
|
||||||
3 different databases to store data::
|
using 3 different databases to store data::
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {"db_alias": "user-db"}
|
meta = {'db_alias': 'user-db'}
|
||||||
|
|
||||||
class Book(Document):
|
class Book(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {"db_alias": "book-db"}
|
meta = {'db_alias': 'book-db'}
|
||||||
|
|
||||||
class AuthorBooks(Document):
|
class AuthorBooks(Document):
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
book = ReferenceField(Book)
|
book = ReferenceField(Book)
|
||||||
|
|
||||||
meta = {"db_alias": "users-books-db"}
|
meta = {'db_alias': 'users-books-db'}
|
||||||
|
|
||||||
|
|
||||||
|
Context Managers
|
||||||
|
================
|
||||||
|
Sometimes you may want to switch the database or collection to query against.
|
||||||
|
For example, archiving older data into a separate database for performance
|
||||||
|
reasons or writing functions that dynamically choose collections to write
|
||||||
|
a document to.
|
||||||
|
|
||||||
|
Switch Database
|
||||||
|
---------------
|
||||||
|
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||||
|
you to change the database alias for a given class allowing quick and easy
|
||||||
|
access to the same User document across databases::
|
||||||
|
|
||||||
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {'db_alias': 'user-db'}
|
||||||
|
|
||||||
|
with switch_db(User, 'archive-user-db') as User:
|
||||||
|
User(name='Ross').save() # Saves the 'archive-user-db'
|
||||||
|
|
||||||
|
|
||||||
|
Switch Collection
|
||||||
|
-----------------
|
||||||
|
The :class:`~mongoengine.context_managers.switch_collection` context manager
|
||||||
|
allows you to change the collection for a given class allowing quick and easy
|
||||||
|
access to the same Group document across collection::
|
||||||
|
|
||||||
|
from mongoengine.context_managers import switch_collection
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group2000') as Group:
|
||||||
|
Group(name='hello Group 2000 collection!').save() # Saves in group2000 collection
|
||||||
|
|
||||||
|
|
||||||
|
.. note:: Make sure any aliases have been registered with
|
||||||
|
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
|
||||||
|
before using the context manager.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ Defining documents
|
|||||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||||
working with relational databases, rows are stored in **tables**, which have a
|
working with relational databases, rows are stored in **tables**, which have a
|
||||||
strict **schema** that the rows follow. MongoDB stores documents in
|
strict **schema** that the rows follow. MongoDB stores documents in
|
||||||
**collections** rather than tables - the principle difference is that no schema
|
**collections** rather than tables --- the principal difference is that no schema
|
||||||
is enforced at a database level.
|
is enforced at a database level.
|
||||||
|
|
||||||
Defining a document's schema
|
Defining a document's schema
|
||||||
@@ -24,9 +24,12 @@ objects** as class attributes to the document class::
|
|||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||||
|
|
||||||
|
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||||
|
documents are serialized based on their field order.
|
||||||
|
|
||||||
Dynamic document schemas
|
Dynamic document schemas
|
||||||
========================
|
========================
|
||||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
||||||
should be planned and organised (after all explicit is better than implicit!)
|
should be planned and organised (after all explicit is better than implicit!)
|
||||||
there are scenarios where having dynamic / expando style documents is desirable.
|
there are scenarios where having dynamic / expando style documents is desirable.
|
||||||
|
|
||||||
@@ -47,10 +50,11 @@ be saved ::
|
|||||||
>>> Page.objects(tags='mongoengine').count()
|
>>> Page.objects(tags='mongoengine').count()
|
||||||
>>> 1
|
>>> 1
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
|
|
||||||
|
Dynamic fields are stored in creation order *after* any declared fields.
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
@@ -62,28 +66,38 @@ not provided. Default values may optionally be a callable, which will be called
|
|||||||
to retrieve the value (such as in the above example). The field types available
|
to retrieve the value (such as in the above example). The field types available
|
||||||
are as follows:
|
are as follows:
|
||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.fields.BinaryField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.fields.BooleanField`
|
||||||
* :class:`~mongoengine.EmailField`
|
* :class:`~mongoengine.fields.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.IntField`
|
* :class:`~mongoengine.fields.DateTimeField`
|
||||||
* :class:`~mongoengine.FloatField`
|
* :class:`~mongoengine.fields.DecimalField`
|
||||||
* :class:`~mongoengine.DecimalField`
|
* :class:`~mongoengine.fields.DictField`
|
||||||
* :class:`~mongoengine.DateTimeField`
|
* :class:`~mongoengine.fields.DynamicField`
|
||||||
* :class:`~mongoengine.ComplexDateTimeField`
|
* :class:`~mongoengine.fields.EmailField`
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.SortedListField`
|
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.fields.FileField`
|
||||||
* :class:`~mongoengine.MapField`
|
* :class:`~mongoengine.fields.FloatField`
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
* :class:`~mongoengine.fields.GeoPointField`
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
* :class:`~mongoengine.fields.ImageField`
|
||||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
* :class:`~mongoengine.fields.IntField`
|
||||||
* :class:`~mongoengine.BooleanField`
|
* :class:`~mongoengine.fields.ListField`
|
||||||
* :class:`~mongoengine.FileField`
|
* :class:`~mongoengine.fields.MapField`
|
||||||
* :class:`~mongoengine.BinaryField`
|
* :class:`~mongoengine.fields.ObjectIdField`
|
||||||
* :class:`~mongoengine.GeoPointField`
|
* :class:`~mongoengine.fields.ReferenceField`
|
||||||
* :class:`~mongoengine.SequenceField`
|
* :class:`~mongoengine.fields.SequenceField`
|
||||||
|
* :class:`~mongoengine.fields.SortedListField`
|
||||||
|
* :class:`~mongoengine.fields.StringField`
|
||||||
|
* :class:`~mongoengine.fields.URLField`
|
||||||
|
* :class:`~mongoengine.fields.UUIDField`
|
||||||
|
* :class:`~mongoengine.fields.PointField`
|
||||||
|
* :class:`~mongoengine.fields.LineStringField`
|
||||||
|
* :class:`~mongoengine.fields.PolygonField`
|
||||||
|
* :class:`~mongoengine.fields.MultiPointField`
|
||||||
|
* :class:`~mongoengine.fields.MultiLineStringField`
|
||||||
|
* :class:`~mongoengine.fields.MultiPolygonField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -93,9 +107,6 @@ arguments can be set on all fields:
|
|||||||
:attr:`db_field` (Default: None)
|
:attr:`db_field` (Default: None)
|
||||||
The MongoDB field name.
|
The MongoDB field name.
|
||||||
|
|
||||||
:attr:`name` (Default: None)
|
|
||||||
The mongoengine field name.
|
|
||||||
|
|
||||||
:attr:`required` (Default: False)
|
:attr:`required` (Default: False)
|
||||||
If set to True and the field is not set on the document instance, a
|
If set to True and the field is not set on the document instance, a
|
||||||
:class:`~mongoengine.ValidationError` will be raised when the document is
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
@@ -104,10 +115,10 @@ arguments can be set on all fields:
|
|||||||
:attr:`default` (Default: None)
|
:attr:`default` (Default: None)
|
||||||
A value to use when no value is set for this field.
|
A value to use when no value is set for this field.
|
||||||
|
|
||||||
The definion of default parameters follow `the general rules on Python
|
The definition of default parameters follow `the general rules on Python
|
||||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||||
which means that some care should be taken when dealing with default mutable objects
|
which means that some care should be taken when dealing with default mutable objects
|
||||||
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`)::
|
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||||
|
|
||||||
class ExampleFirst(Document):
|
class ExampleFirst(Document):
|
||||||
# Default an empty list
|
# Default an empty list
|
||||||
@@ -122,6 +133,7 @@ arguments can be set on all fields:
|
|||||||
# instead to just an object
|
# instead to just an object
|
||||||
values = ListField(IntField(), default=[1,2,3])
|
values = ListField(IntField(), default=[1,2,3])
|
||||||
|
|
||||||
|
.. note:: Unsetting a field with a default value will revert back to the default.
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
When True, no documents in the collection will have the same value for this
|
When True, no documents in the collection will have the same value for this
|
||||||
@@ -132,10 +144,13 @@ arguments can be set on all fields:
|
|||||||
field, will not have two documents in the collection with the same value.
|
field, will not have two documents in the collection with the same value.
|
||||||
|
|
||||||
:attr:`primary_key` (Default: False)
|
:attr:`primary_key` (Default: False)
|
||||||
When True, use this field as a primary key for the collection.
|
When True, use this field as a primary key for the collection. `DictField`
|
||||||
|
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||||
|
|
||||||
|
.. note:: If set, this field is also accessible through the `pk` field.
|
||||||
|
|
||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
||||||
field should be limited.
|
field should be limited.
|
||||||
|
|
||||||
Can be either be a nested tuples of value (stored in mongo) and a
|
Can be either be a nested tuples of value (stored in mongo) and a
|
||||||
@@ -158,18 +173,18 @@ arguments can be set on all fields:
|
|||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=SIZE)
|
size = StringField(max_length=3, choices=SIZE)
|
||||||
|
|
||||||
:attr:`help_text` (Default: None)
|
:attr:`**kwargs` (Optional)
|
||||||
Optional help text to output with the field - used by form libraries
|
You can supply additional metadata as arbitrary additional keyword
|
||||||
|
arguments. You can not override existing attributes, however. Common
|
||||||
:attr:`verbose_name` (Default: None)
|
choices include `help_text` and `verbose_name`, commonly used by form and
|
||||||
Optional human-readable name for the field - used by form libraries
|
widget libraries.
|
||||||
|
|
||||||
|
|
||||||
List fields
|
List fields
|
||||||
-----------
|
-----------
|
||||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
MongoDB allows storing lists of items. To add a list of items to a
|
||||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
||||||
type. :class:`~mongoengine.ListField` takes another field object as its first
|
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
||||||
argument, which specifies which type elements may be stored within the list::
|
argument, which specifies which type elements may be stored within the list::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@@ -187,7 +202,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
To embed the document within another document, use the
|
To embed the document within another document, use the
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
|
||||||
document class as the first argument::
|
document class as the first argument::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@@ -199,10 +214,10 @@ document class as the first argument::
|
|||||||
|
|
||||||
Dictionary Fields
|
Dictionary Fields
|
||||||
-----------------
|
-----------------
|
||||||
Often, an embedded document may be used instead of a dictionary -- generally
|
Often, an embedded document may be used instead of a dictionary – generally
|
||||||
this is recommended as dictionaries don't support validation or custom field
|
embedded documents are recommended as dictionaries don’t support validation
|
||||||
types. However, sometimes you will not know the structure of what you want to
|
or custom field types. However, sometimes you will not know the structure of what you want to
|
||||||
store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
||||||
|
|
||||||
class SurveyResponse(Document):
|
class SurveyResponse(Document):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
@@ -220,7 +235,7 @@ other objects, so are the most flexible field type available.
|
|||||||
Reference fields
|
Reference fields
|
||||||
----------------
|
----------------
|
||||||
References may be stored to other documents in the database using the
|
References may be stored to other documents in the database using the
|
||||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
|
||||||
first argument to the constructor, then simply assign document objects to the
|
first argument to the constructor, then simply assign document objects to the
|
||||||
field::
|
field::
|
||||||
|
|
||||||
@@ -241,9 +256,9 @@ field::
|
|||||||
The :class:`User` object is automatically turned into a reference behind the
|
The :class:`User` object is automatically turned into a reference behind the
|
||||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||||
|
|
||||||
To add a :class:`~mongoengine.ReferenceField` that references the document
|
To add a :class:`~mongoengine.fields.ReferenceField` that references the document
|
||||||
being defined, use the string ``'self'`` in place of the document class as the
|
being defined, use the string ``'self'`` in place of the document class as the
|
||||||
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a
|
argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
|
||||||
document that has not yet been defined, use the name of the undefined document
|
document that has not yet been defined, use the name of the undefined document
|
||||||
as the constructor's argument::
|
as the constructor's argument::
|
||||||
|
|
||||||
@@ -256,6 +271,41 @@ as the constructor's argument::
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
|
One to Many with ListFields
|
||||||
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
If you are implementing a one to many relationship via a list of references,
|
||||||
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
|
instance of the object to the query::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(User))
|
||||||
|
|
||||||
|
bob = User(name="Bob Jones").save()
|
||||||
|
john = User(name="John Smith").save()
|
||||||
|
|
||||||
|
Page(content="Test Page", authors=[bob, john]).save()
|
||||||
|
Page(content="Another Page", authors=[john]).save()
|
||||||
|
|
||||||
|
# Find all pages Bob authored
|
||||||
|
Page.objects(authors__in=[bob])
|
||||||
|
|
||||||
|
# Find all pages that both Bob and John have authored
|
||||||
|
Page.objects(authors__all=[bob, john])
|
||||||
|
|
||||||
|
# Remove Bob from the authors for a page.
|
||||||
|
Page.objects(id='...').update_one(pull__authors=bob)
|
||||||
|
|
||||||
|
# Add John to the authors for a page.
|
||||||
|
Page.objects(id='...').update_one(push__authors=john)
|
||||||
|
|
||||||
|
|
||||||
Dealing with deletion of referred documents
|
Dealing with deletion of referred documents
|
||||||
'''''''''''''''''''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||||
@@ -266,12 +316,12 @@ reference with a delete rule specification. A delete rule is specified by
|
|||||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||||
:class:`ReferenceField` definition, like this::
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
class Employee(Document):
|
class ProfilePage(Document):
|
||||||
...
|
...
|
||||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||||
|
|
||||||
The declaration in this example means that when an :class:`Employee` object is
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
removed, the :class:`ProfilePage` that references that employee is removed as
|
||||||
well. If a whole batch of employees is removed, all profile pages that are
|
well. If a whole batch of employees is removed, all profile pages that are
|
||||||
linked are removed as well.
|
linked are removed as well.
|
||||||
|
|
||||||
@@ -287,8 +337,12 @@ Its value can take any of the following constants:
|
|||||||
Any object's fields still referring to the object being deleted are removed
|
Any object's fields still referring to the object being deleted are removed
|
||||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are refererring to the object being deleted
|
Any object containing fields that are referring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
|
:const:`mongoengine.PULL`
|
||||||
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
|
from any object's fields of
|
||||||
|
:class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@@ -307,11 +361,10 @@ Its value can take any of the following constants:
|
|||||||
In Django, be sure to put all apps that have such delete rule declarations in
|
In Django, be sure to put all apps that have such delete rule declarations in
|
||||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any
|
:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
|
||||||
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||||
:class:`~mongoengine.Document` subclass as a constructor argument::
|
:class:`~mongoengine.Document` subclass as a constructor argument::
|
||||||
|
|
||||||
@@ -335,18 +388,18 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
|
||||||
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
|
||||||
you will only be referencing one document type, prefer the standard
|
you will only be referencing one document type, prefer the standard
|
||||||
:class:`~mongoengine.ReferenceField`.
|
:class:`~mongoengine.fields.ReferenceField`.
|
||||||
|
|
||||||
Uniqueness constraints
|
Uniqueness constraints
|
||||||
----------------------
|
----------------------
|
||||||
MongoEngine allows you to specify that a field should be unique across a
|
MongoEngine allows you to specify that a field should be unique across a
|
||||||
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's
|
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||||
constructor. If you try to save a document that has the same value for a unique
|
constructor. If you try to save a document that has the same value for a unique
|
||||||
field as a document that is already in the database, a
|
field as a document that is already in the database, a
|
||||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
|
||||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||||
either a single field name, or a list or tuple of field names::
|
either a single field name, or a list or tuple of field names::
|
||||||
|
|
||||||
@@ -358,7 +411,7 @@ either a single field name, or a list or tuple of field names::
|
|||||||
Skipping Document validation on save
|
Skipping Document validation on save
|
||||||
------------------------------------
|
------------------------------------
|
||||||
You can also skip the whole document validation process by setting
|
You can also skip the whole document validation process by setting
|
||||||
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
||||||
method::
|
method::
|
||||||
|
|
||||||
class Recipient(Document):
|
class Recipient(Document):
|
||||||
@@ -373,7 +426,7 @@ Document collections
|
|||||||
====================
|
====================
|
||||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||||
will have their own **collection** in the database. The name of the collection
|
will have their own **collection** in the database. The name of the collection
|
||||||
is by default the name of the class, coverted to lowercase (so in the example
|
is by default the name of the class, converted to lowercase (so in the example
|
||||||
above, the collection would be called `page`). If you need to change the name
|
above, the collection would be called `page`). If you need to change the name
|
||||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||||
@@ -390,8 +443,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
|||||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||||
collection in bytes. If :attr:`max_size` is not specified and
|
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
|
||||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
|
||||||
|
avoid confusions. If :attr:`max_size` is not specified and
|
||||||
|
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
|
||||||
The following example shows a :class:`Log` document that will be limited to
|
The following example shows a :class:`Log` document that will be limited to
|
||||||
1000 entries and 2MB of disk space::
|
1000 entries and 2MB of disk space::
|
||||||
|
|
||||||
@@ -399,21 +454,40 @@ The following example shows a :class:`Log` document that will be limited to
|
|||||||
ip_address = StringField()
|
ip_address = StringField()
|
||||||
meta = {'max_documents': 1000, 'max_size': 2000000}
|
meta = {'max_documents': 1000, 'max_size': 2000000}
|
||||||
|
|
||||||
|
.. defining-indexes_
|
||||||
|
|
||||||
Indexes
|
Indexes
|
||||||
=======
|
=======
|
||||||
|
|
||||||
You can specify indexes on collections to make querying faster. This is done
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
either be a single field name, a tuple containing multiple field names, or a
|
either be a single field name, a tuple containing multiple field names, or a
|
||||||
dictionary containing a full index definition. A direction may be specified on
|
dictionary containing a full index definition.
|
||||||
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
|
||||||
direction only matters on multi-field indexes. ::
|
A direction may be specified on fields by prefixing the field name with a
|
||||||
|
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||||
|
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||||
|
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||||
|
the field name with a **#**::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
|
category = IntField()
|
||||||
title = StringField()
|
title = StringField()
|
||||||
rating = StringField()
|
rating = StringField()
|
||||||
|
created = DateTimeField()
|
||||||
meta = {
|
meta = {
|
||||||
'indexes': ['title', ('title', '-rating')]
|
'indexes': [
|
||||||
|
'title',
|
||||||
|
'$title', # text index
|
||||||
|
'#title', # hashed index
|
||||||
|
('title', '-rating'),
|
||||||
|
('category', '_cls'),
|
||||||
|
{
|
||||||
|
'fields': ['created'],
|
||||||
|
'expireAfterSeconds': 3600
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
If a dictionary is passed then the following options are available:
|
If a dictionary is passed then the following options are available:
|
||||||
@@ -421,31 +495,109 @@ If a dictionary is passed then the following options are available:
|
|||||||
:attr:`fields` (Default: None)
|
:attr:`fields` (Default: None)
|
||||||
The fields to index. Specified in the same format as described above.
|
The fields to index. Specified in the same format as described above.
|
||||||
|
|
||||||
:attr:`types` (Default: True)
|
:attr:`cls` (Default: True)
|
||||||
Whether the index should have the :attr:`_types` field added automatically
|
If you have polymorphic models that inherit and have
|
||||||
to the start of the index.
|
:attr:`allow_inheritance` turned on, you can configure whether the index
|
||||||
|
should have the :attr:`_cls` field added automatically to the start of the
|
||||||
|
index.
|
||||||
|
|
||||||
:attr:`sparse` (Default: False)
|
:attr:`sparse` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be sparse.
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be unique.
|
||||||
|
|
||||||
.. warning::
|
:attr:`expireAfterSeconds` (Optional)
|
||||||
|
Allows you to automatically expire data from a collection by setting the
|
||||||
|
time in seconds to expire the a field.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||||
|
|
||||||
|
Global index default options
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
There are a few top level defaults for all indexes that can be set::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField()
|
||||||
|
rating = StringField()
|
||||||
|
meta = {
|
||||||
|
'index_options': {},
|
||||||
|
'index_background': True,
|
||||||
|
'index_drop_dups': True,
|
||||||
|
'index_cls': False
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Inheritance adds extra indices.
|
:attr:`index_options` (Optional)
|
||||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
||||||
|
|
||||||
|
:attr:`index_background` (Optional)
|
||||||
|
Set the default value for if an index should be indexed in the background
|
||||||
|
|
||||||
|
:attr:`index_cls` (Optional)
|
||||||
|
A way to turn off a specific index for _cls.
|
||||||
|
|
||||||
|
:attr:`index_drop_dups` (Optional)
|
||||||
|
Set the default value for if an index should drop duplicates
|
||||||
|
|
||||||
|
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||||
|
and has no effect
|
||||||
|
|
||||||
|
|
||||||
|
Compound Indexes and Indexing sub documents
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
Compound indexes can be created by adding the Embedded field or dictionary
|
||||||
|
field name to the index definition.
|
||||||
|
|
||||||
|
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
||||||
|
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
||||||
|
|
||||||
|
.. _geospatial-indexes:
|
||||||
|
|
||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
---------------------------
|
------------------
|
||||||
|
|
||||||
|
The best geo index for mongodb is the new "2dsphere", which has an improved
|
||||||
|
spherical model and provides better performance and more options when querying.
|
||||||
|
The following fields will explicitly add a "2dsphere" index:
|
||||||
|
|
||||||
|
- :class:`~mongoengine.fields.PointField`
|
||||||
|
- :class:`~mongoengine.fields.LineStringField`
|
||||||
|
- :class:`~mongoengine.fields.PolygonField`
|
||||||
|
- :class:`~mongoengine.fields.MultiPointField`
|
||||||
|
- :class:`~mongoengine.fields.MultiLineStringField`
|
||||||
|
- :class:`~mongoengine.fields.MultiPolygonField`
|
||||||
|
|
||||||
|
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||||
|
automatic index but would prefer a compound index. In this example we turn off
|
||||||
|
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
|
||||||
|
|
||||||
|
class Log(Document):
|
||||||
|
location = PointField(auto_index=False)
|
||||||
|
datetime = DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Pre MongoDB 2.4 Geo
|
||||||
|
'''''''''''''''''''
|
||||||
|
|
||||||
|
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
|
||||||
|
index is a big improvement over the previous 2D model - so upgrading is
|
||||||
|
advised.
|
||||||
|
|
||||||
Geospatial indexes will be automatically created for all
|
Geospatial indexes will be automatically created for all
|
||||||
:class:`~mongoengine.GeoPointField`\ s
|
:class:`~mongoengine.fields.GeoPointField`\ s
|
||||||
|
|
||||||
It is also possible to explicitly define geospatial indexes. This is
|
It is also possible to explicitly define geospatial indexes. This is
|
||||||
useful if you need to define a geospatial index on a subfield of a
|
useful if you need to define a geospatial index on a subfield of a
|
||||||
:class:`~mongoengine.DictField` or a custom field that contains a
|
:class:`~mongoengine.fields.DictField` or a custom field that contains a
|
||||||
point. To create a geospatial index you must prefix the field with the
|
point. To create a geospatial index you must prefix the field with the
|
||||||
***** sign. ::
|
***** sign. ::
|
||||||
|
|
||||||
@@ -457,6 +609,35 @@ point. To create a geospatial index you must prefix the field with the
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Time To Live indexes
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
A special index type that allows you to automatically expire data from a
|
||||||
|
collection after a given period. See the official
|
||||||
|
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
|
||||||
|
documentation for more information. A common usecase might be session data::
|
||||||
|
|
||||||
|
class Session(Document):
|
||||||
|
created = DateTimeField(default=datetime.now)
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. warning:: TTL indexes happen on the MongoDB server and not in the application
|
||||||
|
code, therefore no signals will be fired on document deletion.
|
||||||
|
If you need signals to be fired on deletion, then you must handle the
|
||||||
|
deletion of Documents in your application code.
|
||||||
|
|
||||||
|
Comparing Indexes
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Use :func:`mongoengine.Document.compare_indexes` to compare actual indexes in
|
||||||
|
the database to those that your document definitions define. This is useful
|
||||||
|
for maintenance purposes and ensuring you have the correct indexes for your
|
||||||
|
schema.
|
||||||
|
|
||||||
Ordering
|
Ordering
|
||||||
========
|
========
|
||||||
A default ordering can be specified for your
|
A default ordering can be specified for your
|
||||||
@@ -501,11 +682,11 @@ Shard keys
|
|||||||
==========
|
==========
|
||||||
|
|
||||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||||
This ensures that the shard key is sent with the query when calling the
|
This ensures that the shard key is sent with the query when calling the
|
||||||
:meth:`~mongoengine.document.Document.save` or
|
:meth:`~mongoengine.document.Document.save` or
|
||||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||||
:class:`-mongoengine.Document` instance::
|
:class:`~mongoengine.Document` instance::
|
||||||
|
|
||||||
class LogEntry(Document):
|
class LogEntry(Document):
|
||||||
machine = StringField()
|
machine = StringField()
|
||||||
@@ -527,7 +708,9 @@ defined, you may subclass it and add any extra fields or methods you may need.
|
|||||||
As this is new class is not a direct subclass of
|
As this is new class is not a direct subclass of
|
||||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||||
will use the same collection as its superclass uses. This allows for more
|
will use the same collection as its superclass uses. This allows for more
|
||||||
convenient and efficient retrieval of related documents::
|
convenient and efficient retrieval of related documents -- all you need do is
|
||||||
|
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
||||||
|
document.::
|
||||||
|
|
||||||
# Stored in a collection named 'page'
|
# Stored in a collection named 'page'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@@ -539,25 +722,47 @@ convenient and efficient retrieval of related documents::
|
|||||||
class DatedPage(Page):
|
class DatedPage(Page):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
|
|
||||||
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||||
|
to False, meaning you must set it to True to use inheritance.
|
||||||
|
|
||||||
Working with existing data
|
Working with existing data
|
||||||
--------------------------
|
--------------------------
|
||||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
easily get working with existing data. Just define the document to match
|
||||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
the expected schema in your database ::
|
||||||
interface, but may not be present if you are trying to use MongoEngine with
|
|
||||||
an existing database. For this reason, you may disable this inheritance
|
|
||||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
|
||||||
you to work with existing databases. To disable inheritance on a document
|
|
||||||
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
|
||||||
dictionary::
|
|
||||||
|
|
||||||
# Will work with data in an existing collection named 'cmsPage'
|
# Will work with data in an existing collection named 'cmsPage'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
meta = {
|
meta = {
|
||||||
'collection': 'cmsPage',
|
'collection': 'cmsPage'
|
||||||
'allow_inheritance': False,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
If you have wildly varying schemas then using a
|
||||||
|
:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of
|
||||||
|
defining all possible field types.
|
||||||
|
|
||||||
|
If you use :class:`~mongoengine.Document` and the database contains data that
|
||||||
|
isn't defined then that data will be stored in the `document._data` dictionary.
|
||||||
|
|
||||||
|
Abstract classes
|
||||||
|
================
|
||||||
|
|
||||||
|
If you want to add some extra functionality to a group of Document classes but
|
||||||
|
you don't need or want the overhead of inheritance you can use the
|
||||||
|
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`.
|
||||||
|
This won't turn on :ref:`document-inheritance` but will allow you to keep your
|
||||||
|
code DRY::
|
||||||
|
|
||||||
|
class BaseDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
def check_permissions(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
class User(BaseDocument):
|
||||||
|
...
|
||||||
|
|
||||||
|
Now the User class will have access to the inherited `check_permissions` method
|
||||||
|
and won't store any of the extra `_cls` information.
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Documents instances
|
Documents instances
|
||||||
===================
|
===================
|
||||||
To create a new document object, create an instance of the relevant document
|
To create a new document object, create an instance of the relevant document
|
||||||
class, providing values for its fields as its constructor keyword arguments.
|
class, providing values for its fields as constructor keyword arguments.
|
||||||
You may provide values for any of the fields on the document::
|
You may provide values for any of the fields on the document::
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
>>> page = Page(title="Test Page")
|
||||||
@@ -30,21 +30,53 @@ already exist, then any changes will be updated atomically. For example::
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Changes to documents are tracked and on the whole perform `set` operations.
|
Changes to documents are tracked and on the whole perform ``set`` operations.
|
||||||
|
|
||||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
* ``list_field.push(0)`` --- *sets* the resulting list
|
||||||
* ``del(list_field)`` - *unsets* whole list
|
* ``del(list_field)`` --- *unsets* whole list
|
||||||
|
|
||||||
|
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
||||||
|
this stops the whole list being updated --- stopping any race conditions.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
|
Pre save data validation and cleaning
|
||||||
|
-------------------------------------
|
||||||
|
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||||
|
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||||
|
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||||
|
cleaning.
|
||||||
|
|
||||||
|
This might be useful if you want to ensure a default value based on other
|
||||||
|
document values for example::
|
||||||
|
|
||||||
|
class Essay(Document):
|
||||||
|
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||||
|
pub_date = DateTimeField()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""Ensures that only published essays have a `pub_date` and
|
||||||
|
automatically sets the pub_date if published and not set"""
|
||||||
|
if self.status == 'Draft' and self.pub_date is not None:
|
||||||
|
msg = 'Draft entries should not have a publication date.'
|
||||||
|
raise ValidationError(msg)
|
||||||
|
# Set the pub_date for published items if not set.
|
||||||
|
if self.status == 'Published' and self.pub_date is None:
|
||||||
|
self.pub_date = datetime.now()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Cleaning is only called if validation is turned on and when calling
|
||||||
|
:meth:`~mongoengine.Document.save`.
|
||||||
|
|
||||||
Cascading Saves
|
Cascading Saves
|
||||||
---------------
|
---------------
|
||||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||||
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
||||||
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
:meth:`~mongoengine.Document.save` method will not save any changes to
|
||||||
those objects as well. If this is not desired passing :attr:`cascade` as False
|
those objects. If you want all references to be saved also, noting each
|
||||||
to the save method turns this feature off.
|
save is a separate query, then passing :attr:`cascade` as True
|
||||||
|
to the save method will cascade any saves.
|
||||||
|
|
||||||
Deleting documents
|
Deleting documents
|
||||||
------------------
|
------------------
|
||||||
@@ -81,12 +113,13 @@ you may still use :attr:`id` to access the primary key if you want::
|
|||||||
>>> bob.id == bob.email == 'bob@example.com'
|
>>> bob.id == bob.email == 'bob@example.com'
|
||||||
True
|
True
|
||||||
|
|
||||||
You can also access the document's "primary key" using the :attr:`pk` field; in
|
You can also access the document's "primary key" using the :attr:`pk` field,
|
||||||
is an alias to :attr:`id`::
|
it's an alias to :attr:`id`::
|
||||||
|
|
||||||
>>> page = Page(title="Another Test Page")
|
>>> page = Page(title="Another Test Page")
|
||||||
>>> page.save()
|
>>> page.save()
|
||||||
>>> page.id == page.pk
|
>>> page.id == page.pk
|
||||||
|
True
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ GridFS
|
|||||||
Writing
|
Writing
|
||||||
-------
|
-------
|
||||||
|
|
||||||
GridFS support comes in the form of the :class:`~mongoengine.FileField` field
|
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||||
object. This field acts as a file-like object and provides a couple of
|
object. This field acts as a file-like object and provides a couple of
|
||||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||||
content type can also be stored alongside the files. In the following example,
|
content type can also be stored alongside the files. In the following example,
|
||||||
@@ -18,26 +18,16 @@ a document is created to store details about animals, including a photo::
|
|||||||
family = StringField()
|
family = StringField()
|
||||||
photo = FileField()
|
photo = FileField()
|
||||||
|
|
||||||
marmot = Animal('Marmota', 'Sciuridae')
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk
|
|
||||||
marmot.photo = marmot_photo # Store photo in the document
|
|
||||||
marmot.photo.content_type = 'image/jpeg' # Store metadata
|
|
||||||
|
|
||||||
marmot.save()
|
|
||||||
|
|
||||||
Another way of writing to a :class:`~mongoengine.FileField` is to use the
|
|
||||||
:func:`put` method. This allows for metadata to be stored in the same call as
|
|
||||||
the file::
|
|
||||||
|
|
||||||
marmot.photo.put(marmot_photo, content_type='image/jpeg')
|
|
||||||
|
|
||||||
|
marmot_photo = open('marmot.jpg', 'rb')
|
||||||
|
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||||
marmot.save()
|
marmot.save()
|
||||||
|
|
||||||
Retrieval
|
Retrieval
|
||||||
---------
|
---------
|
||||||
|
|
||||||
So using the :class:`~mongoengine.FileField` is just like using any other
|
So using the :class:`~mongoengine.fields.FileField` is just like using any other
|
||||||
field. The file can also be retrieved just as easily::
|
field. The file can also be retrieved just as easily::
|
||||||
|
|
||||||
marmot = Animal.objects(genus='Marmota').first()
|
marmot = Animal.objects(genus='Marmota').first()
|
||||||
@@ -47,7 +37,7 @@ field. The file can also be retrieved just as easily::
|
|||||||
Streaming
|
Streaming
|
||||||
---------
|
---------
|
||||||
|
|
||||||
Streaming data into a :class:`~mongoengine.FileField` is achieved in a
|
Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
|
||||||
slightly different manner. First, a new file must be created by calling the
|
slightly different manner. First, a new file must be created by calling the
|
||||||
:func:`new_file` method. Data can then be written using :func:`write`::
|
:func:`new_file` method. Data can then be written using :func:`write`::
|
||||||
|
|
||||||
@@ -56,7 +46,7 @@ slightly different manner. First, a new file must be created by calling the
|
|||||||
marmot.photo.write('some_more_image_data')
|
marmot.photo.write('some_more_image_data')
|
||||||
marmot.photo.close()
|
marmot.photo.close()
|
||||||
|
|
||||||
marmot.photo.save()
|
marmot.save()
|
||||||
|
|
||||||
Deletion
|
Deletion
|
||||||
--------
|
--------
|
||||||
@@ -65,7 +55,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
|
|
||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. warning::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that deleting a document
|
separate GridFS collection. This means that deleting a document
|
||||||
@@ -80,5 +70,5 @@ Replacing files
|
|||||||
Files can be replaced with the :func:`replace` method. This works just like
|
Files can be replaced with the :func:`replace` method. This works just like
|
||||||
the :func:`put` method so even metadata can (and should) be replaced::
|
the :func:`put` method so even metadata can (and should) be replaced::
|
||||||
|
|
||||||
another_marmot = open('another_marmot.png', 'r')
|
another_marmot = open('another_marmot.png', 'rb')
|
||||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||||
|
|||||||
@@ -12,3 +12,5 @@ User Guide
|
|||||||
querying
|
querying
|
||||||
gridfs
|
gridfs
|
||||||
signals
|
signals
|
||||||
|
text-indexes
|
||||||
|
mongomock
|
||||||
|
|||||||
@@ -2,13 +2,13 @@
|
|||||||
Installing MongoEngine
|
Installing MongoEngine
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_
|
||||||
and ensure it is running in an accessible location. You will also need
|
and ensure it is running in an accessible location. You will also need
|
||||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
you.
|
you.
|
||||||
|
|
||||||
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
MongoEngine is available on PyPI, so you can use :program:`pip`:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -22,10 +22,10 @@ Alternatively, if you don't have setuptools installed, `download it from PyPi
|
|||||||
$ python setup.py install
|
$ python setup.py install
|
||||||
|
|
||||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||||
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
`GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone git://github.com/hmarr/mongoengine
|
$ git clone git://github.com/mongoengine/mongoengine
|
||||||
$ cd mongoengine
|
$ cd mongoengine
|
||||||
$ python setup.py install
|
$ python setup.py install
|
||||||
|
|||||||
21
docs/guide/mongomock.rst
Normal file
21
docs/guide/mongomock.rst
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
==============================
|
||||||
|
Use mongomock for testing
|
||||||
|
==============================
|
||||||
|
|
||||||
|
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
||||||
|
what the name implies, mocking a mongo database.
|
||||||
|
|
||||||
|
To use with mongoengine, simply specify mongomock when connecting with
|
||||||
|
mongoengine:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
connect('mongoenginetest', host='mongomock://localhost')
|
||||||
|
conn = get_connection()
|
||||||
|
|
||||||
|
or with an alias:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
||||||
|
conn = get_connection('testdb')
|
||||||
@@ -15,11 +15,10 @@ fetch documents from the database::
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Once the iteration finishes (when :class:`StopIteration` is raised),
|
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
||||||
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
it multiple times will only cause a single query. If this is not the
|
||||||
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache`
|
||||||
results of the first iteration are *not* cached, so the database will be hit
|
(version **0.8.3+**) to return a non-caching queryset.
|
||||||
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
|
|
||||||
|
|
||||||
Filtering queries
|
Filtering queries
|
||||||
=================
|
=================
|
||||||
@@ -40,10 +39,18 @@ syntax::
|
|||||||
# been written by a user whose 'country' field is set to 'uk'
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
uk_pages = Page.objects(author__country='uk')
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
(version **0.9.1+**) if your field name is like mongodb operator name (for example
|
||||||
|
type, lte, lt...) and you want to place it at the end of lookup keyword
|
||||||
|
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
|
||||||
|
your lookup keyword. For example if your field name is ``type`` and you want to
|
||||||
|
query by this field you must use ``.objects(user__type__="admin")`` instead of
|
||||||
|
``.objects(user__type="admin")``
|
||||||
|
|
||||||
Query operators
|
Query operators
|
||||||
===============
|
===============
|
||||||
Operators other than equality may also be used in queries; just attach the
|
Operators other than equality may also be used in queries --- just attach the
|
||||||
operator name to a key with a double-underscore::
|
operator name to a key with a double-underscore::
|
||||||
|
|
||||||
# Only find users whose age is 18 or less
|
# Only find users whose age is 18 or less
|
||||||
@@ -65,6 +72,9 @@ Available operators are as follows:
|
|||||||
* ``size`` -- the size of the array is
|
* ``size`` -- the size of the array is
|
||||||
* ``exists`` -- value for field exists
|
* ``exists`` -- value for field exists
|
||||||
|
|
||||||
|
String queries
|
||||||
|
--------------
|
||||||
|
|
||||||
The following operators are available as shortcuts to querying with regular
|
The following operators are available as shortcuts to querying with regular
|
||||||
expressions:
|
expressions:
|
||||||
|
|
||||||
@@ -78,12 +88,75 @@ expressions:
|
|||||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||||
|
|
||||||
There are a few special operators for performing geographical queries, that
|
|
||||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
Geo queries
|
||||||
|
-----------
|
||||||
|
|
||||||
|
There are a few special operators for performing geographical queries.
|
||||||
|
The following were added in MongoEngine 0.8 for
|
||||||
|
:class:`~mongoengine.fields.PointField`,
|
||||||
|
:class:`~mongoengine.fields.LineStringField` and
|
||||||
|
:class:`~mongoengine.fields.PolygonField`:
|
||||||
|
|
||||||
|
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use
|
||||||
|
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||||
|
loc.objects(point__geo_within={"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||||
|
|
||||||
|
* ``geo_within_box`` -- simplified geo_within searching with a box eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
||||||
|
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
||||||
|
|
||||||
|
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
|
||||||
|
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
|
||||||
|
[ <x2> , <y2> ] ,
|
||||||
|
[ <x3> , <y3> ] ])
|
||||||
|
|
||||||
|
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
|
||||||
|
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
|
||||||
|
|
||||||
|
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
|
||||||
|
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
|
||||||
|
|
||||||
|
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
|
||||||
|
|
||||||
|
# Inferred from provided points lists:
|
||||||
|
loc.objects(poly__geo_intersects=[40, 6])
|
||||||
|
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
|
||||||
|
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
|
||||||
|
|
||||||
|
# With geoJson style objects
|
||||||
|
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
|
||||||
|
loc.objects(poly__geo_intersects={"type": "LineString",
|
||||||
|
"coordinates": [[40, 5], [40, 6]]})
|
||||||
|
loc.objects(poly__geo_intersects={"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
||||||
|
|
||||||
|
* ``near`` -- find all the locations near a given point::
|
||||||
|
|
||||||
|
loc.objects(point__near=[40, 5])
|
||||||
|
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||||
|
|
||||||
|
You can also set the maximum and/or the minimum distance in meters as well::
|
||||||
|
|
||||||
|
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||||
|
loc.objects(point__near=[40, 5], point__min_distance=100)
|
||||||
|
|
||||||
|
The older 2D indexes are still supported with the
|
||||||
|
:class:`~mongoengine.fields.GeoPointField`:
|
||||||
|
|
||||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
distance (e.g. [(41.342, -87.653), 5])
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
* ``within_spherical_distance`` -- same as above but using the spherical geo model
|
||||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||||
* ``near`` -- order the documents by how close they are to a given point
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||||
@@ -91,14 +164,19 @@ may used with :class:`~mongoengine.GeoPointField`\ s:
|
|||||||
[(35.0, -125.0), (40.0, -100.0)])
|
[(35.0, -125.0), (40.0, -100.0)])
|
||||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||||
|
|
||||||
.. note:: Requires Mongo Server 2.0
|
.. note:: Requires Mongo Server 2.0
|
||||||
|
|
||||||
|
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||||
|
distance.
|
||||||
|
* ``min_distance`` -- can be added to your location queries to set a minimum
|
||||||
|
distance.
|
||||||
|
|
||||||
Querying lists
|
Querying lists
|
||||||
--------------
|
--------------
|
||||||
On most fields, this syntax will look up documents where the field specified
|
On most fields, this syntax will look up documents where the field specified
|
||||||
matches the given value exactly, but when the field refers to a
|
matches the given value exactly, but when the field refers to a
|
||||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
|
||||||
lists that contain that item will be matched::
|
lists that contain that item will be matched::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@@ -129,12 +207,14 @@ However, this doesn't map well to the syntax so you can also use a capital S ins
|
|||||||
|
|
||||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
.. note::
|
||||||
|
Due to :program:`Mongo`, currently the $ operator only applies to the
|
||||||
|
first matched item in the query.
|
||||||
|
|
||||||
|
|
||||||
Raw queries
|
Raw queries
|
||||||
-----------
|
-----------
|
||||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will
|
||||||
be integrated directly into the query. This is done using the ``__raw__``
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
keyword argument::
|
keyword argument::
|
||||||
|
|
||||||
@@ -144,12 +224,12 @@ keyword argument::
|
|||||||
|
|
||||||
Limiting and skipping results
|
Limiting and skipping results
|
||||||
=============================
|
=============================
|
||||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
Just as with traditional ORMs, you may limit the number of results returned or
|
||||||
skip a number or results in you query.
|
skip a number or results in you query.
|
||||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
||||||
achieving this is using array-slicing syntax::
|
is preferred for achieving this::
|
||||||
|
|
||||||
# Only the first 5 people
|
# Only the first 5 people
|
||||||
users = User.objects[:5]
|
users = User.objects[:5]
|
||||||
@@ -157,7 +237,7 @@ achieving this is using array-slicing syntax::
|
|||||||
# All except for the first 5 people
|
# All except for the first 5 people
|
||||||
users = User.objects[5:]
|
users = User.objects[5:]
|
||||||
|
|
||||||
# 5 users, starting from the 10th user found
|
# 5 users, starting from the 11th user found
|
||||||
users = User.objects[10:15]
|
users = User.objects[10:15]
|
||||||
|
|
||||||
You may also index the query to retrieve a single result. If an item at that
|
You may also index the query to retrieve a single result. If an item at that
|
||||||
@@ -179,25 +259,21 @@ Retrieving unique results
|
|||||||
-------------------------
|
-------------------------
|
||||||
To retrieve a result that should be unique in the collection, use
|
To retrieve a result that should be unique in the collection, use
|
||||||
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
||||||
:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query,
|
:class:`~mongoengine.queryset.DoesNotExist` if
|
||||||
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
|
no document matches the query, and
|
||||||
document matched the query.
|
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
||||||
|
if more than one document matched the query. These exceptions are merged into
|
||||||
|
your document definitions eg: `MyDoc.DoesNotExist`
|
||||||
|
|
||||||
A variation of this method exists,
|
A variation of this method, get_or_create() existed, but it was unsafe. It
|
||||||
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
could not be made safe, because there are no transactions in mongoDB. Other
|
||||||
document with the query arguments if no documents match the query. An
|
approaches should be investigated, to ensure you don't accidentally duplicate
|
||||||
additional keyword argument, :attr:`defaults` may be provided, which will be
|
data when using something similar to this method. Therefore it was deprecated
|
||||||
used as default values for the new document, in the case that it should need
|
in 0.8 and removed in 0.10.
|
||||||
to be created::
|
|
||||||
|
|
||||||
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
|
||||||
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
|
||||||
>>> a.name == b.name and a.age == b.age
|
|
||||||
True
|
|
||||||
|
|
||||||
Default Document queries
|
Default Document queries
|
||||||
========================
|
========================
|
||||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
By default, the objects :attr:`~Document.objects` attribute on a
|
||||||
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
||||||
the collection -- it returns all objects. This may be changed by defining a
|
the collection -- it returns all objects. This may be changed by defining a
|
||||||
method on a document that modifies a queryset. The method should accept two
|
method on a document that modifies a queryset. The method should accept two
|
||||||
@@ -232,7 +308,7 @@ custom manager methods as you like::
|
|||||||
BlogPost(title='test1', published=False).save()
|
BlogPost(title='test1', published=False).save()
|
||||||
BlogPost(title='test2', published=True).save()
|
BlogPost(title='test2', published=True).save()
|
||||||
assert len(BlogPost.objects) == 2
|
assert len(BlogPost.objects) == 2
|
||||||
assert len(BlogPost.live_posts) == 1
|
assert len(BlogPost.live_posts()) == 1
|
||||||
|
|
||||||
Custom QuerySets
|
Custom QuerySets
|
||||||
================
|
================
|
||||||
@@ -240,14 +316,19 @@ Should you want to add custom methods for interacting with or filtering
|
|||||||
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
||||||
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
||||||
a document, set ``queryset_class`` to the custom class in a
|
a document, set ``queryset_class`` to the custom class in a
|
||||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
:class:`~mongoengine.Document`'s ``meta`` dictionary::
|
||||||
|
|
||||||
class AwesomerQuerySet(QuerySet):
|
class AwesomerQuerySet(QuerySet):
|
||||||
pass
|
|
||||||
|
def get_awesome(self):
|
||||||
|
return self.filter(awesome=True)
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
meta = {'queryset_class': AwesomerQuerySet}
|
meta = {'queryset_class': AwesomerQuerySet}
|
||||||
|
|
||||||
|
# To call:
|
||||||
|
Page.objects.get_awesome()
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Aggregation
|
Aggregation
|
||||||
@@ -259,12 +340,19 @@ Javascript code that is executed on the database server.
|
|||||||
|
|
||||||
Counting results
|
Counting results
|
||||||
----------------
|
----------------
|
||||||
Just as with limiting and skipping results, there is a method on
|
Just as with limiting and skipping results, there is a method on a
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
:class:`~mongoengine.queryset.QuerySet` object --
|
||||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
:meth:`~mongoengine.queryset.QuerySet.count`::
|
||||||
way of achieving this::
|
|
||||||
|
|
||||||
num_users = len(User.objects)
|
num_users = User.objects.count()
|
||||||
|
|
||||||
|
You could technically use ``len(User.objects)`` to get the same result, but it
|
||||||
|
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||||
|
When you execute a server-side count query, you let MongoDB do the heavy
|
||||||
|
lifting and you receive a single integer over the wire. Meanwhile, len()
|
||||||
|
retrieves all the results, places them in a local cache, and finally counts
|
||||||
|
them. If we compare the performance of the two operations, len() is much slower
|
||||||
|
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||||
|
|
||||||
Further aggregation
|
Further aggregation
|
||||||
-------------------
|
-------------------
|
||||||
@@ -310,7 +398,7 @@ Retrieving a subset of fields
|
|||||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||||
and for efficiency only these should be retrieved from the database. This issue
|
and for efficiency only these should be retrieved from the database. This issue
|
||||||
is especially important for MongoDB, as fields may often be extremely large
|
is especially important for MongoDB, as fields may often be extremely large
|
||||||
(e.g. a :class:`~mongoengine.ListField` of
|
(e.g. a :class:`~mongoengine.fields.ListField` of
|
||||||
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
||||||
blog post. To select only a subset of fields, use
|
blog post. To select only a subset of fields, use
|
||||||
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
||||||
@@ -342,14 +430,14 @@ If you later need the missing fields, just call
|
|||||||
Getting related data
|
Getting related data
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
When iterating the results of :class:`~mongoengine.ListField` or
|
When iterating the results of :class:`~mongoengine.fields.ListField` or
|
||||||
:class:`~mongoengine.DictField` we automatically dereference any
|
:class:`~mongoengine.fields.DictField` we automatically dereference any
|
||||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||||
number the queries to mongo.
|
number the queries to mongo.
|
||||||
|
|
||||||
There are times when that efficiency is not enough, documents that have
|
There are times when that efficiency is not enough, documents that have
|
||||||
:class:`~mongoengine.ReferenceField` objects or
|
:class:`~mongoengine.fields.ReferenceField` objects or
|
||||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
|
||||||
expensive as the number of queries to MongoDB can quickly rise.
|
expensive as the number of queries to MongoDB can quickly rise.
|
||||||
|
|
||||||
To limit the number of queries use
|
To limit the number of queries use
|
||||||
@@ -360,8 +448,30 @@ references to the depth of 1 level. If you have more complicated documents and
|
|||||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||||
will dereference more levels of the document.
|
will dereference more levels of the document.
|
||||||
|
|
||||||
|
Turning off dereferencing
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Sometimes for performance reasons you don't want to automatically dereference
|
||||||
|
data. To turn off dereferencing of the results of a query use
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||||
|
|
||||||
|
post = Post.objects.no_dereference().first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
You can also turn off all dereferencing for a fixed period by using the
|
||||||
|
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||||
|
|
||||||
|
with no_dereference(Post) as Post:
|
||||||
|
post = Post.objects.first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
# Outside the context manager dereferencing occurs.
|
||||||
|
assert(isinstance(post.author, User))
|
||||||
|
|
||||||
|
|
||||||
Advanced queries
|
Advanced queries
|
||||||
================
|
================
|
||||||
|
|
||||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||||
arguments can't fully express the query you want to use -- for example if you
|
arguments can't fully express the query you want to use -- for example if you
|
||||||
need to combine a number of constraints using *and* and *or*. This is made
|
need to combine a number of constraints using *and* and *or*. This is made
|
||||||
@@ -374,34 +484,46 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
|
|||||||
first positional argument to :attr:`Document.objects` when you filter it by
|
first positional argument to :attr:`Document.objects` when you filter it by
|
||||||
calling it with keyword arguments::
|
calling it with keyword arguments::
|
||||||
|
|
||||||
|
from mongoengine.queryset.visitor import Q
|
||||||
|
|
||||||
# Get published posts
|
# Get published posts
|
||||||
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
||||||
|
|
||||||
# Get top posts
|
# Get top posts
|
||||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||||
|
|
||||||
|
.. warning:: You have to use bitwise operators. You cannot use ``or``, ``and``
|
||||||
|
to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as
|
||||||
|
``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is
|
||||||
|
the same as ``Q(a=a)``.
|
||||||
|
|
||||||
.. _guide-atomic-updates:
|
.. _guide-atomic-updates:
|
||||||
|
|
||||||
Atomic updates
|
Atomic updates
|
||||||
==============
|
==============
|
||||||
Documents may be updated atomically by using the
|
Documents may be updated atomically by using the
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
:meth:`~mongoengine.queryset.QuerySet.update_one`,
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
:meth:`~mongoengine.queryset.QuerySet.update` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
|
||||||
that you may use with these methods:
|
:class:`~mongoengine.queryset.QuerySet` or
|
||||||
|
:meth:`~mongoengine.Document.modify` and
|
||||||
|
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
|
||||||
|
:class:`~mongoengine.Document`.
|
||||||
|
There are several different "modifiers" that you may use with these methods:
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
* ``set`` -- set a particular value
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
||||||
* ``inc`` -- increment a value by a given amount
|
* ``inc`` -- increment a value by a given amount
|
||||||
* ``dec`` -- decrement a value by a given amount
|
* ``dec`` -- decrement a value by a given amount
|
||||||
* ``pop`` -- remove the last item from a list
|
|
||||||
* ``push`` -- append a value to a list
|
* ``push`` -- append a value to a list
|
||||||
* ``push_all`` -- append several values to a list
|
* ``push_all`` -- append several values to a list
|
||||||
* ``pop`` -- remove the first or last element of a list
|
* ``pop`` -- remove the first or last element of a list `depending on the value`_
|
||||||
* ``pull`` -- remove a value from a list
|
* ``pull`` -- remove a value from a list
|
||||||
* ``pull_all`` -- remove several values from a list
|
* ``pull_all`` -- remove several values from a list
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
|
|
||||||
|
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||||
|
|
||||||
The syntax for atomic updates is similar to the querying syntax, but the
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
modifier comes before the field, not after it::
|
modifier comes before the field, not after it::
|
||||||
|
|
||||||
@@ -420,7 +542,14 @@ modifier comes before the field, not after it::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'nosql']
|
['database', 'nosql']
|
||||||
|
|
||||||
.. note ::
|
.. note::
|
||||||
|
|
||||||
|
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
|
||||||
|
|
||||||
|
>>> BlogPost.objects(id=post.id).update(title='Example Post')
|
||||||
|
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||||
on changed documents by tracking changes to that document.
|
on changed documents by tracking changes to that document.
|
||||||
@@ -436,7 +565,7 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'mongodb']
|
['database', 'mongodb']
|
||||||
|
|
||||||
.. note ::
|
.. note::
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||||
@@ -478,7 +607,7 @@ Some variables are made available in the scope of the Javascript function:
|
|||||||
|
|
||||||
The following example demonstrates the intended usage of
|
The following example demonstrates the intended usage of
|
||||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||||
over a field on a document (this functionality is already available throught
|
over a field on a document (this functionality is already available through
|
||||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||||
example)::
|
example)::
|
||||||
|
|
||||||
@@ -505,7 +634,7 @@ Javascript code. When accessing a field on a collection object, use
|
|||||||
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
||||||
The field name that follows the tilde will be translated to the name used in
|
The field name that follows the tilde will be translated to the name used in
|
||||||
the database. Note that when referring to fields on embedded documents,
|
the database. Note that when referring to fields on embedded documents,
|
||||||
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot,
|
the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
|
||||||
should be used before the name of the field on the embedded document. The
|
should be used before the name of the field on the embedded document. The
|
||||||
following example shows how the substitutions are made::
|
following example shows how the substitutions are made::
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
.. _signals:
|
.. _signals:
|
||||||
|
|
||||||
|
=======
|
||||||
Signals
|
Signals
|
||||||
=======
|
=======
|
||||||
|
|
||||||
@@ -7,32 +8,95 @@ Signals
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Signal support is provided by the excellent `blinker`_ library and
|
Signal support is provided by the excellent `blinker`_ library. If you wish
|
||||||
will gracefully fall back if it is not available.
|
to enable signal support this library must be installed, though it is not
|
||||||
|
required for MongoEngine to function.
|
||||||
|
|
||||||
|
Overview
|
||||||
|
--------
|
||||||
|
|
||||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
Signals are found within the `mongoengine.signals` module. Unless
|
||||||
|
specified signals receive no additional arguments beyond the `sender` class and
|
||||||
|
`document` instance. Post-signals are only called if there were no exceptions
|
||||||
|
raised during the processing of their related function.
|
||||||
|
|
||||||
* `mongoengine.signals.pre_init`
|
Available signals include:
|
||||||
* `mongoengine.signals.post_init`
|
|
||||||
* `mongoengine.signals.pre_save`
|
|
||||||
* `mongoengine.signals.post_save`
|
|
||||||
* `mongoengine.signals.pre_delete`
|
|
||||||
* `mongoengine.signals.post_delete`
|
|
||||||
* `mongoengine.signals.pre_bulk_insert`
|
|
||||||
* `mongoengine.signals.post_bulk_insert`
|
|
||||||
|
|
||||||
Example usage::
|
`pre_init`
|
||||||
|
Called during the creation of a new :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` instance, after the constructor
|
||||||
|
arguments have been collected but before any additional processing has been
|
||||||
|
done to them. (I.e. assignment of default values.) Handlers for this signal
|
||||||
|
are passed the dictionary of arguments using the `values` keyword argument
|
||||||
|
and may modify this dictionary prior to returning.
|
||||||
|
|
||||||
|
`post_init`
|
||||||
|
Called after all processing of a new :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
||||||
|
|
||||||
|
`pre_save`
|
||||||
|
Called within :meth:`~mongoengine.Document.save` prior to performing
|
||||||
|
any actions.
|
||||||
|
|
||||||
|
`pre_save_post_validation`
|
||||||
|
Called within :meth:`~mongoengine.Document.save` after validation
|
||||||
|
has taken place but before saving.
|
||||||
|
|
||||||
|
`post_save`
|
||||||
|
Called within :meth:`~mongoengine.Document.save` after all actions
|
||||||
|
(validation, insert/update, cascades, clearing dirty flags) have completed
|
||||||
|
successfully. Passed the additional boolean keyword argument `created` to
|
||||||
|
indicate if the save was an insert or an update.
|
||||||
|
|
||||||
|
`pre_delete`
|
||||||
|
Called within :meth:`~mongoengine.Document.delete` prior to
|
||||||
|
attempting the delete operation.
|
||||||
|
|
||||||
|
`post_delete`
|
||||||
|
Called within :meth:`~mongoengine.Document.delete` upon successful
|
||||||
|
deletion of the record.
|
||||||
|
|
||||||
|
`pre_bulk_insert`
|
||||||
|
Called after validation of the documents to insert, but prior to any data
|
||||||
|
being written. In this case, the `document` argument is replaced by a
|
||||||
|
`documents` argument representing the list of documents being inserted.
|
||||||
|
|
||||||
|
`post_bulk_insert`
|
||||||
|
Called after a successful bulk insert operation. As per `pre_bulk_insert`,
|
||||||
|
the `document` argument is omitted and replaced with a `documents` argument.
|
||||||
|
An additional boolean argument, `loaded`, identifies the contents of
|
||||||
|
`documents` as either :class:`~mongoengine.Document` instances when `True` or
|
||||||
|
simply a list of primary key values for the inserted records if `False`.
|
||||||
|
|
||||||
|
Attaching Events
|
||||||
|
----------------
|
||||||
|
|
||||||
|
After writing a handler function like the following::
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
|
def update_modified(sender, document):
|
||||||
|
document.modified = datetime.utcnow()
|
||||||
|
|
||||||
|
You attach the event handler to your :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` subclass::
|
||||||
|
|
||||||
|
class Record(Document):
|
||||||
|
modified = DateTimeField()
|
||||||
|
|
||||||
|
signals.pre_save.connect(update_modified)
|
||||||
|
|
||||||
|
While this is not the most elaborate document model, it does demonstrate the
|
||||||
|
concepts involved. As a more complete demonstration you can also define your
|
||||||
|
handlers within your subclass::
|
||||||
|
|
||||||
class Author(Document):
|
class Author(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
logging.debug("Pre Save: %s" % document.name)
|
logging.debug("Pre Save: %s" % document.name)
|
||||||
@@ -49,5 +113,33 @@ Example usage::
|
|||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
Finally, you can also use this small decorator to quickly create a number of
|
||||||
|
signals and attach them to your :class:`~mongoengine.Document` or
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||||
|
|
||||||
|
def handler(event):
|
||||||
|
"""Signal decorator to allow use of callback functions as class decorators."""
|
||||||
|
|
||||||
|
def decorator(fn):
|
||||||
|
def apply(cls):
|
||||||
|
event.connect(fn, sender=cls)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
fn.apply = apply
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
Using the first example of updating a modification time the code is now much
|
||||||
|
cleaner looking while still allowing manual execution of the callback::
|
||||||
|
|
||||||
|
@handler(signals.pre_save)
|
||||||
|
def update_modified(sender, document):
|
||||||
|
document.modified = datetime.utcnow()
|
||||||
|
|
||||||
|
@update_modified.apply
|
||||||
|
class Record(Document):
|
||||||
|
modified = DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
|
|||||||
51
docs/guide/text-indexes.rst
Normal file
51
docs/guide/text-indexes.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
===========
|
||||||
|
Text Search
|
||||||
|
===========
|
||||||
|
|
||||||
|
After MongoDB 2.4 version, supports search documents by text indexes.
|
||||||
|
|
||||||
|
|
||||||
|
Defining a Document with text index
|
||||||
|
===================================
|
||||||
|
Use the *$* prefix to set a text index, Look the declaration::
|
||||||
|
|
||||||
|
class News(Document):
|
||||||
|
title = StringField()
|
||||||
|
content = StringField()
|
||||||
|
is_active = BooleanField()
|
||||||
|
|
||||||
|
meta = {'indexes': [
|
||||||
|
{'fields': ['$title', "$content"],
|
||||||
|
'default_language': 'english',
|
||||||
|
'weights': {'title': 10, 'content': 2}
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Querying
|
||||||
|
========
|
||||||
|
|
||||||
|
Saving a document::
|
||||||
|
|
||||||
|
News(title="Using mongodb text search",
|
||||||
|
content="Testing text search").save()
|
||||||
|
|
||||||
|
News(title="MongoEngine 0.9 released",
|
||||||
|
content="Various improvements").save()
|
||||||
|
|
||||||
|
Next, start a text search using :attr:`QuerySet.search_text` method::
|
||||||
|
|
||||||
|
document = News.objects.search_text('testing').first()
|
||||||
|
document.title # may be: "Using mongodb text search"
|
||||||
|
|
||||||
|
document = News.objects.search_text('released').first()
|
||||||
|
document.title # may be: "MongoEngine 0.9 released"
|
||||||
|
|
||||||
|
|
||||||
|
Ordering by text score
|
||||||
|
======================
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
objects = News.objects.search('mongo').order_by('$text_score')
|
||||||
@@ -7,16 +7,18 @@ MongoDB. To install it, simply run
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# pip install -U mongoengine
|
$ pip install -U mongoengine
|
||||||
|
|
||||||
:doc:`tutorial`
|
:doc:`tutorial`
|
||||||
Start here for a quick overview.
|
A quick tutorial building a tumblelog to get you up and running with
|
||||||
|
MongoEngine.
|
||||||
|
|
||||||
:doc:`guide/index`
|
:doc:`guide/index`
|
||||||
The Full guide to MongoEngine
|
The Full guide to MongoEngine --- from modeling documents to storing files,
|
||||||
|
from querying for data to firing signals and *everything* between.
|
||||||
|
|
||||||
:doc:`apireference`
|
:doc:`apireference`
|
||||||
The complete API documentation.
|
The complete API documentation --- the innards of documents, querysets and fields.
|
||||||
|
|
||||||
:doc:`upgrade`
|
:doc:`upgrade`
|
||||||
How to upgrade MongoEngine.
|
How to upgrade MongoEngine.
|
||||||
@@ -28,35 +30,50 @@ Community
|
|||||||
---------
|
---------
|
||||||
|
|
||||||
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||||
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
<http://groups.google.com/group/mongoengine-users>`_ or the ever popular
|
||||||
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
`stackoverflow <http://www.stackoverflow.com>`_.
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
------------
|
------------
|
||||||
|
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
**Yes please!** We are always looking for contributions, additions and improvements.
|
||||||
contributions are always encouraged. Contributions can be as simple as
|
|
||||||
minor tweaks to this documentation. To contribute, fork the project on
|
|
||||||
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
|
||||||
pull request.
|
|
||||||
|
|
||||||
Also, you can join the developers' `mailing list
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_.
|
and contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to this documentation, the website or the core.
|
||||||
|
|
||||||
|
To contribute, fork the project on
|
||||||
|
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
|
||||||
|
pull request.
|
||||||
|
|
||||||
Changes
|
Changes
|
||||||
-------
|
-------
|
||||||
|
|
||||||
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||||
:doc:`upgrade` for upgrade information.
|
:doc:`upgrade` for upgrade information.
|
||||||
|
|
||||||
|
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before
|
||||||
|
putting updates live in production **;)**
|
||||||
|
|
||||||
|
Offline Reading
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
|
||||||
|
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
|
||||||
|
formats for offline reading.
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
:numbered:
|
||||||
:hidden:
|
:hidden:
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
guide/index
|
guide/index
|
||||||
apireference
|
apireference
|
||||||
django
|
|
||||||
changelog
|
changelog
|
||||||
upgrade
|
upgrade
|
||||||
|
django
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
------------------
|
------------------
|
||||||
|
|||||||
@@ -1,68 +1,78 @@
|
|||||||
========
|
========
|
||||||
Tutorial
|
Tutorial
|
||||||
========
|
========
|
||||||
|
|
||||||
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
||||||
through how to create a simple **Tumblelog** application. A Tumblelog is a type
|
through how to create a simple **Tumblelog** application. A tumblelog is a
|
||||||
of blog where posts are not constrained to being conventional text-based posts.
|
blog that supports mixed media content, including text, images, links, video,
|
||||||
As well as text-based entries, users may post images, links, videos, etc. For
|
audio, etc. For simplicity's sake, we'll stick to text, image, and link
|
||||||
simplicity's sake, we'll stick to text, image and link entries in our
|
entries. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
||||||
application. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
|
||||||
focus on the data-modelling side of the application, leaving out a user
|
focus on the data-modelling side of the application, leaving out a user
|
||||||
interface.
|
interface.
|
||||||
|
|
||||||
Getting started
|
Getting started
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Before we start, make sure that a copy of MongoDB is running in an accessible
|
Before we start, make sure that a copy of MongoDB is running in an accessible
|
||||||
location --- running it locally will be easier, but if that is not an option
|
location --- running it locally will be easier, but if that is not an option
|
||||||
then it may be run on a remote server.
|
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||||
|
simply use pip to install it like so::
|
||||||
|
|
||||||
|
$ pip install mongoengine
|
||||||
|
|
||||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||||
function. The only argument we need to provide is the name of the MongoDB
|
function. If running locally, the only argument we need to provide is the name
|
||||||
database to use::
|
of the MongoDB database to use::
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
connect('tumblelog')
|
connect('tumblelog')
|
||||||
|
|
||||||
For more information about connecting to MongoDB see :ref:`guide-connecting`.
|
There are lots of options for connecting to MongoDB, for more information about
|
||||||
|
them see the :ref:`guide-connecting` guide.
|
||||||
|
|
||||||
Defining our documents
|
Defining our documents
|
||||||
======================
|
======================
|
||||||
|
|
||||||
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
||||||
--- we may add and remove fields however we want and MongoDB won't complain.
|
--- we may add and remove fields however we want and MongoDB won't complain.
|
||||||
This makes life a lot easier in many regards, especially when there is a change
|
This makes life a lot easier in many regards, especially when there is a change
|
||||||
to the data model. However, defining schemata for our documents can help to
|
to the data model. However, defining schemas for our documents can help to iron
|
||||||
iron out bugs involving incorrect types or missing fields, and also allow us to
|
out bugs involving incorrect types or missing fields, and also allow us to
|
||||||
define utility methods on our documents in the same way that traditional
|
define utility methods on our documents in the same way that traditional
|
||||||
:abbr:`ORMs (Object-Relational Mappers)` do.
|
:abbr:`ORMs (Object-Relational Mappers)` do.
|
||||||
|
|
||||||
In our Tumblelog application we need to store several different types of
|
In our Tumblelog application we need to store several different types of
|
||||||
information. We will need to have a collection of **users**, so that we may
|
information. We will need to have a collection of **users**, so that we may
|
||||||
link posts to an individual. We also need to store our different types
|
link posts to an individual. We also need to store our different types of
|
||||||
**posts** (text, image and link) in the database. To aid navigation of our
|
**posts** (eg: text, image and link) in the database. To aid navigation of our
|
||||||
Tumblelog, posts may have **tags** associated with them, so that the list of
|
Tumblelog, posts may have **tags** associated with them, so that the list of
|
||||||
posts shown to the user may be limited to posts that have been assigned a
|
posts shown to the user may be limited to posts that have been assigned a
|
||||||
specified tag. Finally, it would be nice if **comments** could be added to
|
specific tag. Finally, it would be nice if **comments** could be added to
|
||||||
posts. We'll start with **users**, as the others are slightly more involved.
|
posts. We'll start with **users**, as the other document models are slightly
|
||||||
|
more involved.
|
||||||
|
|
||||||
Users
|
Users
|
||||||
-----
|
-----
|
||||||
|
|
||||||
Just as if we were using a relational database with an ORM, we need to define
|
Just as if we were using a relational database with an ORM, we need to define
|
||||||
which fields a :class:`User` may have, and what their types will be::
|
which fields a :class:`User` may have, and what types of data they might store::
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
email = StringField(required=True)
|
email = StringField(required=True)
|
||||||
first_name = StringField(max_length=50)
|
first_name = StringField(max_length=50)
|
||||||
last_name = StringField(max_length=50)
|
last_name = StringField(max_length=50)
|
||||||
|
|
||||||
This looks similar to how a the structure of a table would be defined in a
|
This looks similar to how the structure of a table would be defined in a
|
||||||
regular ORM. The key difference is that this schema will never be passed on to
|
regular ORM. The key difference is that this schema will never be passed on to
|
||||||
MongoDB --- this will only be enforced at the application level. Also, the User
|
MongoDB --- this will only be enforced at the application level, making future
|
||||||
documents will be stored in a MongoDB *collection* rather than a table.
|
changes easy to manage. Also, the User documents will be stored in a
|
||||||
|
MongoDB *collection* rather than a table.
|
||||||
|
|
||||||
Posts, Comments and Tags
|
Posts, Comments and Tags
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
Now we'll think about how to store the rest of the information. If we were
|
Now we'll think about how to store the rest of the information. If we were
|
||||||
using a relational database, we would most likely have a table of **posts**, a
|
using a relational database, we would most likely have a table of **posts**, a
|
||||||
table of **comments** and a table of **tags**. To associate the comments with
|
table of **comments** and a table of **tags**. To associate the comments with
|
||||||
@@ -75,21 +85,25 @@ of them stand out as particularly intuitive solutions.
|
|||||||
|
|
||||||
Posts
|
Posts
|
||||||
^^^^^
|
^^^^^
|
||||||
But MongoDB *isn't* a relational database, so we're not going to do it that
|
|
||||||
|
Happily mongoDB *isn't* a relational database, so we're not going to do it that
|
||||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||||
a much nicer solution. We will store all of the posts in *one collection* ---
|
a much nicer solution. We will store all of the posts in *one collection* and
|
||||||
each post type will just have the fields it needs. If we later want to add
|
each post type will only store the fields it needs. If we later want to add
|
||||||
video posts, we don't have to modify the collection at all, we just *start
|
video posts, we don't have to modify the collection at all, we just *start
|
||||||
using* the new fields we need to support video posts. This fits with the
|
using* the new fields we need to support video posts. This fits with the
|
||||||
Object-Oriented principle of *inheritance* nicely. We can think of
|
Object-Oriented principle of *inheritance* nicely. We can think of
|
||||||
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
||||||
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
||||||
this kind of modelling out of the box::
|
this kind of modeling out of the box --- all you need do is turn on inheritance
|
||||||
|
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
@@ -100,20 +114,21 @@ this kind of modelling out of the box::
|
|||||||
link_url = StringField()
|
link_url = StringField()
|
||||||
|
|
||||||
We are storing a reference to the author of the posts using a
|
We are storing a reference to the author of the posts using a
|
||||||
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key
|
:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
|
||||||
fields in traditional ORMs, and are automatically translated into references
|
fields in traditional ORMs, and are automatically translated into references
|
||||||
when they are saved, and dereferenced when they are loaded.
|
when they are saved, and dereferenced when they are loaded.
|
||||||
|
|
||||||
Tags
|
Tags
|
||||||
^^^^
|
^^^^
|
||||||
|
|
||||||
Now that we have our Post models figured out, how will we attach tags to them?
|
Now that we have our Post models figured out, how will we attach tags to them?
|
||||||
MongoDB allows us to store lists of items natively, so rather than having a
|
MongoDB allows us to store lists of items natively, so rather than having a
|
||||||
link table, we can just store a list of tags in each post. So, for both
|
link table, we can just store a list of tags in each post. So, for both
|
||||||
efficiency and simplicity's sake, we'll store the tags as strings directly
|
efficiency and simplicity's sake, we'll store the tags as strings directly
|
||||||
within the post, rather than storing references to tags in a separate
|
within the post, rather than storing references to tags in a separate
|
||||||
collection. Especially as tags are generally very short (often even shorter
|
collection. Especially as tags are generally very short (often even shorter
|
||||||
than a document's id), this denormalisation won't impact very strongly on the
|
than a document's id), this denormalization won't impact the size of the
|
||||||
size of our database. So let's take a look that the code our modified
|
database very strongly. Let's take a look at the code of our modified
|
||||||
:class:`Post` class::
|
:class:`Post` class::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
@@ -121,16 +136,19 @@ size of our database. So let's take a look that the code our modified
|
|||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
|
|
||||||
The :class:`~mongoengine.ListField` object that is used to define a Post's tags
|
The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
|
||||||
takes a field object as its first argument --- this means that you can have
|
takes a field object as its first argument --- this means that you can have
|
||||||
lists of any type of field (including lists). Note that we don't need to
|
lists of any type of field (including lists).
|
||||||
modify the specialised post types as they all inherit from :class:`Post`.
|
|
||||||
|
.. note:: We don't need to modify the specialized post types as they all
|
||||||
|
inherit from :class:`Post`.
|
||||||
|
|
||||||
Comments
|
Comments
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|
||||||
A comment is typically associated with *one* post. In a relational database, to
|
A comment is typically associated with *one* post. In a relational database, to
|
||||||
display a post with its comments, we would have to retrieve the post from the
|
display a post with its comments, we would have to retrieve the post from the
|
||||||
database, then query the database again for the comments associated with the
|
database and then query the database again for the comments associated with the
|
||||||
post. This works, but there is no real reason to be storing the comments
|
post. This works, but there is no real reason to be storing the comments
|
||||||
separately from their associated posts, other than to work around the
|
separately from their associated posts, other than to work around the
|
||||||
relational model. Using MongoDB we can store the comments as a list of
|
relational model. Using MongoDB we can store the comments as a list of
|
||||||
@@ -155,7 +173,7 @@ We can then store a list of comment documents in our post document::
|
|||||||
Handling deletions of references
|
Handling deletions of references
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
|
||||||
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||||
To delete all the posts if a user is deleted set the rule::
|
To delete all the posts if a user is deleted set the rule::
|
||||||
|
|
||||||
@@ -165,9 +183,9 @@ To delete all the posts if a user is deleted set the rule::
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
See :class:`~mongoengine.ReferenceField` for more information.
|
See :class:`~mongoengine.fields.ReferenceField` for more information.
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
MapFields and DictFields currently don't support automatic handling of
|
MapFields and DictFields currently don't support automatic handling of
|
||||||
deleted references
|
deleted references
|
||||||
|
|
||||||
@@ -178,15 +196,15 @@ Now that we've defined how our documents will be structured, let's start adding
|
|||||||
some documents to the database. Firstly, we'll need to create a :class:`User`
|
some documents to the database. Firstly, we'll need to create a :class:`User`
|
||||||
object::
|
object::
|
||||||
|
|
||||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save()
|
||||||
john.save()
|
|
||||||
|
|
||||||
Note that we could have also defined our user using attribute syntax::
|
.. note::
|
||||||
|
We could have also defined our user using attribute syntax::
|
||||||
|
|
||||||
john = User(email='jdoe@example.com')
|
ross = User(email='ross@example.com')
|
||||||
john.first_name = 'John'
|
ross.first_name = 'Ross'
|
||||||
john.last_name = 'Doe'
|
ross.last_name = 'Lawley'
|
||||||
john.save()
|
ross.save()
|
||||||
|
|
||||||
Now that we've got our user in the database, let's add a couple of posts::
|
Now that we've got our user in the database, let's add a couple of posts::
|
||||||
|
|
||||||
@@ -195,16 +213,17 @@ Now that we've got our user in the database, let's add a couple of posts::
|
|||||||
post1.tags = ['mongodb', 'mongoengine']
|
post1.tags = ['mongodb', 'mongoengine']
|
||||||
post1.save()
|
post1.save()
|
||||||
|
|
||||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
post2 = LinkPost(title='MongoEngine Documentation', author=ross)
|
||||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
post2.link_url = 'http://docs.mongoengine.com/'
|
||||||
post2.tags = ['mongoengine']
|
post2.tags = ['mongoengine']
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
Note that if you change a field on a object that has already been saved, then
|
.. note:: If you change a field on an object that has already been saved and
|
||||||
call :meth:`save` again, the document will be updated.
|
then call :meth:`save` again, the document will be updated.
|
||||||
|
|
||||||
Accessing our data
|
Accessing our data
|
||||||
==================
|
==================
|
||||||
|
|
||||||
So now we've got a couple of posts in our database, how do we display them?
|
So now we've got a couple of posts in our database, how do we display them?
|
||||||
Each document class (i.e. any class that inherits either directly or indirectly
|
Each document class (i.e. any class that inherits either directly or indirectly
|
||||||
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
||||||
@@ -212,16 +231,17 @@ used to access the documents in the database collection associated with that
|
|||||||
class. So let's see how we can get our posts' titles::
|
class. So let's see how we can get our posts' titles::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print(post.title)
|
||||||
|
|
||||||
Retrieving type-specific information
|
Retrieving type-specific information
|
||||||
------------------------------------
|
------------------------------------
|
||||||
This will print the titles of our posts, one on each line. But What if we want
|
|
||||||
|
This will print the titles of our posts, one on each line. But what if we want
|
||||||
to access the type-specific data (link_url, content, etc.)? One way is simply
|
to access the type-specific data (link_url, content, etc.)? One way is simply
|
||||||
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
||||||
|
|
||||||
for post in TextPost.objects:
|
for post in TextPost.objects:
|
||||||
print post.content
|
print(post.content)
|
||||||
|
|
||||||
Using TextPost's :attr:`objects` attribute only returns documents that were
|
Using TextPost's :attr:`objects` attribute only returns documents that were
|
||||||
created using :class:`TextPost`. Actually, there is a more general rule here:
|
created using :class:`TextPost`. Actually, there is a more general rule here:
|
||||||
@@ -238,22 +258,21 @@ instances of :class:`Post` --- they were instances of the subclass of
|
|||||||
practice::
|
practice::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print(post.title)
|
||||||
print '=' * len(post.title)
|
print('=' * len(post.title))
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print post.content
|
print(post.content)
|
||||||
|
|
||||||
if isinstance(post, LinkPost):
|
if isinstance(post, LinkPost):
|
||||||
print 'Link:', post.link_url
|
print('Link: {}'.format(post.link_url))
|
||||||
|
|
||||||
print
|
|
||||||
|
|
||||||
This would print the title of each post, followed by the content if it was a
|
This would print the title of each post, followed by the content if it was a
|
||||||
text post, and "Link: <url>" if it was a link post.
|
text post, and "Link: <url>" if it was a link post.
|
||||||
|
|
||||||
Searching our posts by tag
|
Searching our posts by tag
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
||||||
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
||||||
database only when you need the data. It may also be filtered to narrow down
|
database only when you need the data. It may also be filtered to narrow down
|
||||||
@@ -261,7 +280,7 @@ your query. Let's adjust our query so that only posts with the tag "mongodb"
|
|||||||
are returned::
|
are returned::
|
||||||
|
|
||||||
for post in Post.objects(tags='mongodb'):
|
for post in Post.objects(tags='mongodb'):
|
||||||
print post.title
|
print(post.title)
|
||||||
|
|
||||||
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
||||||
objects that allow different results to be returned, for example, calling
|
objects that allow different results to be returned, for example, calling
|
||||||
@@ -270,5 +289,11 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
print('Found {} posts with tag "mongodb"'.format(num_posts))
|
||||||
|
|
||||||
|
Learning more about MongoEngine
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
If you got this far you've made a great start, so well done! The next step on
|
||||||
|
your MongoEngine journey is the `full user guide <guide/index.html>`_, where
|
||||||
|
you can learn in-depth about how to use MongoEngine and MongoDB.
|
||||||
|
|||||||
536
docs/upgrade.rst
536
docs/upgrade.rst
@@ -1,33 +1,521 @@
|
|||||||
=========
|
#########
|
||||||
Upgrading
|
Upgrading
|
||||||
=========
|
#########
|
||||||
|
|
||||||
0.5 to 0.6
|
Development
|
||||||
|
***********
|
||||||
|
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||||
|
|
||||||
|
0.13.0
|
||||||
|
******
|
||||||
|
This release adds Unicode support to the `EmailField` and changes its
|
||||||
|
structure significantly. Previously, email addresses containing Unicode
|
||||||
|
characters didn't work at all. Starting with v0.13.0, domains with Unicode
|
||||||
|
characters are supported out of the box, meaning some emails that previously
|
||||||
|
didn't pass validation now do. Make sure the rest of your application can
|
||||||
|
accept such email addresses. Additionally, if you subclassed the `EmailField`
|
||||||
|
in your application and overrode `EmailField.EMAIL_REGEX`, you will have to
|
||||||
|
adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`,
|
||||||
|
and potentially `EmailField.UTF8_USER_REGEX`.
|
||||||
|
|
||||||
|
0.12.0
|
||||||
|
******
|
||||||
|
This release includes various fixes for the `BaseQuerySet` methods and how they
|
||||||
|
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
|
||||||
|
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
|
||||||
|
This has been fixed now, so you'll need to make sure that your code didn't rely
|
||||||
|
on the broken implementation.
|
||||||
|
|
||||||
|
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
|
||||||
|
`_clone_into`. If you directly used that method in your code, you'll need to
|
||||||
|
rename its occurrences.
|
||||||
|
|
||||||
|
0.11.0
|
||||||
|
******
|
||||||
|
This release includes a major rehaul of MongoEngine's code quality and
|
||||||
|
introduces a few breaking changes. It also touches many different parts of
|
||||||
|
the package and although all the changes have been tested and scrutinized,
|
||||||
|
you're encouraged to thorougly test the upgrade.
|
||||||
|
|
||||||
|
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||||
|
If you import or catch this exception, you'll need to rename it in your code.
|
||||||
|
|
||||||
|
Second breaking change drops Python v2.6 support. If you run MongoEngine on
|
||||||
|
that Python version, you'll need to upgrade it first.
|
||||||
|
|
||||||
|
Third breaking change drops an old backward compatibility measure where
|
||||||
|
`from mongoengine.base import ErrorClass` would work on top of
|
||||||
|
`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g.
|
||||||
|
`ValidationError`). If you import any exceptions from `mongoengine.base`,
|
||||||
|
change it to `mongoengine.errors`.
|
||||||
|
|
||||||
|
0.10.8
|
||||||
|
******
|
||||||
|
This version fixed an issue where specifying a MongoDB URI host would override
|
||||||
|
more information than it should. These changes are minor, but they still
|
||||||
|
subtly modify the connection logic and thus you're encouraged to test your
|
||||||
|
MongoDB connection before shipping v0.10.8 in production.
|
||||||
|
|
||||||
|
0.10.7
|
||||||
|
******
|
||||||
|
|
||||||
|
`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use
|
||||||
|
`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework
|
||||||
|
by default from now on.
|
||||||
|
|
||||||
|
0.9.0
|
||||||
|
*****
|
||||||
|
|
||||||
|
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||||
|
|
||||||
|
pip uninstall pymongo
|
||||||
|
pip uninstall mongoengine
|
||||||
|
pip install pymongo==2.8
|
||||||
|
pip install mongoengine
|
||||||
|
|
||||||
|
0.8.7
|
||||||
|
*****
|
||||||
|
|
||||||
|
Calling reload on deleted / nonexistent documents now raises a DoesNotExist
|
||||||
|
exception.
|
||||||
|
|
||||||
|
|
||||||
|
0.8.2 to 0.8.3
|
||||||
|
**************
|
||||||
|
|
||||||
|
Minor change that may impact users:
|
||||||
|
|
||||||
|
DynamicDocument fields are now stored in creation order after any declared
|
||||||
|
fields. Previously they were stored alphabetically.
|
||||||
|
|
||||||
|
|
||||||
|
0.7 to 0.8
|
||||||
|
**********
|
||||||
|
|
||||||
|
There have been numerous backwards breaking changes in 0.8. The reasons for
|
||||||
|
these are to ensure that MongoEngine has sane defaults going forward and that it
|
||||||
|
performs the best it can out of the box. Where possible there have been
|
||||||
|
FutureWarnings to help get you ready for the change, but that hasn't been
|
||||||
|
possible for the whole of the release.
|
||||||
|
|
||||||
|
.. warning:: Breaking changes - test upgrading on a test system before putting
|
||||||
|
live. There maybe multiple manual steps in migrating and these are best honed
|
||||||
|
on a staging / test system.
|
||||||
|
|
||||||
|
Python and PyMongo
|
||||||
|
==================
|
||||||
|
|
||||||
|
MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
|
||||||
|
|
||||||
|
Data Model
|
||||||
==========
|
==========
|
||||||
|
|
||||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
Inheritance
|
||||||
to `pk` as pk is no longer a property of Embedded Documents.
|
-----------
|
||||||
|
|
||||||
|
The inheritance model has changed, we no longer need to store an array of
|
||||||
|
:attr:`types` with the model we can just use the classname in :attr:`_cls`.
|
||||||
|
This means that you will have to update your indexes for each of your
|
||||||
|
inherited classes like so: ::
|
||||||
|
|
||||||
|
# 1. Declaration of the class
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Remove _types
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({'_types': {"$exists": True}}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
|
|
||||||
|
|
||||||
|
Document Definition
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
The default for inheritance has changed - it is now off by default and
|
||||||
|
:attr:`_cls` will not be stored automatically with the class. So if you extend
|
||||||
|
your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments`
|
||||||
|
you will need to declare :attr:`allow_inheritance` in the meta data like so: ::
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Previously, if you had data in the database that wasn't defined in the Document
|
||||||
|
definition, it would set it as an attribute on the document. This is no longer
|
||||||
|
the case and the data is set only in the ``document._data`` dictionary: ::
|
||||||
|
|
||||||
|
>>> from mongoengine import *
|
||||||
|
>>> class Animal(Document):
|
||||||
|
... name = StringField()
|
||||||
|
...
|
||||||
|
>>> cat = Animal(name="kit", size="small")
|
||||||
|
|
||||||
|
# 0.7
|
||||||
|
>>> cat.size
|
||||||
|
u'small'
|
||||||
|
|
||||||
|
# 0.8
|
||||||
|
>>> cat.size
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
AttributeError: 'Animal' object has no attribute 'size'
|
||||||
|
|
||||||
|
The Document class has introduced a reserved function `clean()`, which will be
|
||||||
|
called before saving the document. If your document class happens to have a method
|
||||||
|
with the same name, please try to rename it.
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
ReferenceField
|
||||||
|
--------------
|
||||||
|
|
||||||
|
ReferenceFields now store ObjectIds by default - this is more efficient than
|
||||||
|
DBRefs as we already know what Document types they reference::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Animal(Document):
|
||||||
|
name = ReferenceField('self')
|
||||||
|
|
||||||
|
# New code to keep dbrefs
|
||||||
|
class Animal(Document):
|
||||||
|
name = ReferenceField('self', dbref=True)
|
||||||
|
|
||||||
|
To migrate all the references you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
friends = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
# Mark all ReferenceFields as dirty and save
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_changed('parent')
|
||||||
|
p._mark_as_changed('friends')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
`An example test migration for ReferenceFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
||||||
|
|
||||||
|
.. Note:: Internally mongoengine handles ReferenceFields the same, so they are
|
||||||
|
converted to DBRef on loading and ObjectIds or DBRefs depending on settings
|
||||||
|
on storage.
|
||||||
|
|
||||||
|
UUIDField
|
||||||
|
---------
|
||||||
|
|
||||||
|
UUIDFields now default to storing binary values::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField()
|
||||||
|
|
||||||
|
# New code
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField(binary=False)
|
||||||
|
|
||||||
|
To migrate all the uuids you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField()
|
||||||
|
|
||||||
|
# Mark all UUIDFields as dirty and save
|
||||||
|
for a in Animal.objects:
|
||||||
|
a._mark_as_changed('uuid')
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
`An example test migration for UUIDFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
|
||||||
|
|
||||||
|
DecimalField
|
||||||
|
------------
|
||||||
|
|
||||||
|
DecimalFields now store floats - previously it was storing strings and that
|
||||||
|
made it impossible to do comparisons when querying correctly.::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField()
|
||||||
|
|
||||||
|
# New code
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField(force_string=True)
|
||||||
|
|
||||||
|
To migrate all the DecimalFields you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField()
|
||||||
|
|
||||||
|
# Mark all DecimalField's as dirty and save
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_changed('balance')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
.. note:: DecimalFields have also been improved with the addition of precision
|
||||||
|
and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
|
||||||
|
|
||||||
|
`An example test migration for DecimalFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
|
||||||
|
|
||||||
|
Cascading Saves
|
||||||
|
---------------
|
||||||
|
To improve performance document saves will no longer automatically cascade.
|
||||||
|
Any changes to a Document's references will either have to be saved manually or
|
||||||
|
you will have to explicitly tell it to cascade on save::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or on save:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
Storage
|
||||||
|
-------
|
||||||
|
|
||||||
|
Document and Embedded Documents are now serialized based on declared field order.
|
||||||
|
Previously, the data was passed to mongodb as a dictionary and which meant that
|
||||||
|
order wasn't guaranteed - so things like ``$addToSet`` operations on
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
|
||||||
|
ways.
|
||||||
|
|
||||||
|
If this impacts you, you may want to rewrite the objects using the
|
||||||
|
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
|
||||||
|
compound primary key then you will need to ensure the order is fixed and match
|
||||||
|
your EmbeddedDocument to that order.
|
||||||
|
|
||||||
|
Querysets
|
||||||
|
=========
|
||||||
|
|
||||||
|
Attack of the clones
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Querysets now return clones and should no longer be considered editable in
|
||||||
|
place. This brings us in line with how Django's querysets work and removes a
|
||||||
|
long running gotcha. If you edit your querysets inplace you will have to
|
||||||
|
update your code like so: ::
|
||||||
|
|
||||||
|
# Old code:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8
|
||||||
|
[m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset
|
||||||
|
|
||||||
|
# Update example a) assign queryset after a change:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied
|
||||||
|
[m for m in carnivores] # This will return all carnivores
|
||||||
|
|
||||||
|
# Update example b) chain the queryset:
|
||||||
|
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
|
||||||
|
[m for m in mammals] # This will return all carnivores
|
||||||
|
|
||||||
|
Len iterates the queryset
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
||||||
|
this caused some unusual issues. As `len(queryset)` is most often used by
|
||||||
|
`list(queryset)` we now cache the queryset results and use that for the length.
|
||||||
|
|
||||||
|
This isn't as performant as a `count()` and if you aren't iterating the
|
||||||
|
queryset you should upgrade to use count::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
len(Animal.objects(type="mammal"))
|
||||||
|
|
||||||
|
# New code
|
||||||
|
Animal.objects(type="mammal").count()
|
||||||
|
|
||||||
|
|
||||||
|
.only() now inline with .exclude()
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion
|
||||||
|
to `.exclude()`. Chaining `.only()` calls will increase the fields required::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name`
|
||||||
|
|
||||||
|
# New code
|
||||||
|
Animal.objects().only('name')
|
||||||
|
|
||||||
|
# Note:
|
||||||
|
Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order`
|
||||||
|
|
||||||
|
|
||||||
|
Client
|
||||||
|
======
|
||||||
|
PyMongo 2.4 came with a new connection client; MongoClient_ and started the
|
||||||
|
depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine
|
||||||
|
now uses the latest `MongoClient` for connections. By default operations were
|
||||||
|
`safe` but if you turned them off or used the connection directly this will
|
||||||
|
impact your queries.
|
||||||
|
|
||||||
|
Querysets
|
||||||
|
---------
|
||||||
|
|
||||||
|
Safe
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
`safe` has been depreciated in the new MongoClient connection. Please use
|
||||||
|
`write_concern` instead. As `safe` always defaulted as `True` normally no code
|
||||||
|
change is required. To disable confirmation of the write just pass `{"w": 0}`
|
||||||
|
eg: ::
|
||||||
|
|
||||||
|
# Old
|
||||||
|
Animal(name="Dinasour").save(safe=False)
|
||||||
|
|
||||||
|
# new code:
|
||||||
|
Animal(name="Dinasour").save(write_concern={"w": 0})
|
||||||
|
|
||||||
|
Write Concern
|
||||||
|
^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
`write_options` has been replaced with `write_concern` to bring it inline with
|
||||||
|
pymongo. To upgrade simply rename any instances where you used the `write_option`
|
||||||
|
keyword to `write_concern` like so::
|
||||||
|
|
||||||
|
# Old code:
|
||||||
|
Animal(name="Dinasour").save(write_options={"w": 2})
|
||||||
|
|
||||||
|
# new code:
|
||||||
|
Animal(name="Dinasour").save(write_concern={"w": 2})
|
||||||
|
|
||||||
|
|
||||||
|
Indexes
|
||||||
|
=======
|
||||||
|
|
||||||
|
Index methods are no longer tied to querysets but rather to the document class.
|
||||||
|
Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist.
|
||||||
|
They should be replaced with :func:`~mongoengine.Document.ensure_indexes` /
|
||||||
|
:func:`~mongoengine.Document.ensure_index`.
|
||||||
|
|
||||||
|
SequenceFields
|
||||||
|
==============
|
||||||
|
|
||||||
|
:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to
|
||||||
|
allow flexible storage of the calculated value. As such MIN and MAX settings
|
||||||
|
are no longer handled.
|
||||||
|
|
||||||
|
.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient
|
||||||
|
|
||||||
|
0.6 to 0.7
|
||||||
|
**********
|
||||||
|
|
||||||
|
Cascade saves
|
||||||
|
=============
|
||||||
|
|
||||||
|
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
||||||
|
to True. This is because in 0.8 it will default to False. If you require
|
||||||
|
cascading saves then either set it in the `meta` or pass
|
||||||
|
via `save` eg ::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or in code:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Remember: cascading saves **do not** cascade through lists.
|
||||||
|
|
||||||
|
ReferenceFields
|
||||||
|
===============
|
||||||
|
|
||||||
|
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
||||||
|
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
||||||
|
will be raised.
|
||||||
|
|
||||||
|
|
||||||
|
To explicitly continue to use DBRefs change the `dbref` flag
|
||||||
|
to True ::
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
groups = ListField(ReferenceField(Group, dbref=True))
|
||||||
|
|
||||||
|
To migrate to using strings instead of DBRefs you will have to manually
|
||||||
|
migrate ::
|
||||||
|
|
||||||
|
# Step 1 - Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Step 2 - Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
g.author = g.author
|
||||||
|
g.members = g.members
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
|
||||||
|
item_frequencies
|
||||||
|
================
|
||||||
|
|
||||||
|
In the 0.6 series we added support for null / zero / false values in
|
||||||
|
item_frequencies. A side effect was to return keys in the value they are
|
||||||
|
stored in rather than as string representations. Your code may need to be
|
||||||
|
updated to handle native types rather than strings keys for the results of
|
||||||
|
item frequency queries.
|
||||||
|
|
||||||
|
BinaryFields
|
||||||
|
============
|
||||||
|
|
||||||
|
Binary fields have been updated so that they are native binary types. If you
|
||||||
|
previously were doing `str` comparisons with binary field values you will have
|
||||||
|
to update and wrap the value in a `str`.
|
||||||
|
|
||||||
|
0.5 to 0.6
|
||||||
|
**********
|
||||||
|
|
||||||
|
Embedded Documents - if you had a `pk` field you will have to rename it from
|
||||||
|
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
||||||
|
|
||||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||||
an InvalidDocument error as they aren't currently supported.
|
an InvalidDocument error as they aren't currently supported.
|
||||||
|
|
||||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
Document._get_subclasses - Is no longer used and the class method has been
|
||||||
|
removed.
|
||||||
|
|
||||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
Document.objects.with_id - now raises an InvalidQueryError if used with a
|
||||||
|
filter.
|
||||||
|
|
||||||
FutureWarning - A future warning has been added to all inherited classes that
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
don't define `allow_inheritance` in their meta.
|
don't define :attr:`allow_inheritance` in their meta.
|
||||||
|
|
||||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||||
|
|
||||||
0.4 to 0.5
|
0.4 to 0.5
|
||||||
===========
|
**********
|
||||||
|
|
||||||
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||||
|
|
||||||
Choice options:
|
Choice options:
|
||||||
---------------
|
===============
|
||||||
|
|
||||||
Are now expected to be an iterable of tuples, with the first element in each
|
Are now expected to be an iterable of tuples, with the first element in each
|
||||||
tuple being the actual value to be stored. The second element is the
|
tuple being the actual value to be stored. The second element is the
|
||||||
@@ -35,13 +523,13 @@ human-readable name for the option.
|
|||||||
|
|
||||||
|
|
||||||
PyMongo / MongoDB
|
PyMongo / MongoDB
|
||||||
-----------------
|
=================
|
||||||
|
|
||||||
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
||||||
parameters, have been depreciated.
|
`reduce_output` parameters, have been depreciated.
|
||||||
|
|
||||||
More methods now use map_reduce as db.eval is not supported for sharding as such
|
More methods now use map_reduce as db.eval is not supported for sharding as
|
||||||
the following have been changed:
|
such the following have been changed:
|
||||||
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.average`
|
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||||
@@ -49,10 +537,10 @@ the following have been changed:
|
|||||||
|
|
||||||
|
|
||||||
Default collection naming
|
Default collection naming
|
||||||
-------------------------
|
=========================
|
||||||
|
|
||||||
Previously it was just lowercase, its now much more pythonic and readable as its
|
Previously it was just lowercase, it's now much more pythonic and readable as
|
||||||
lowercase and underscores, previously ::
|
it's lowercase and underscores, previously ::
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
class MyAceDocument(Document):
|
||||||
pass
|
pass
|
||||||
@@ -88,7 +576,8 @@ Alternatively, you can rename your collections eg ::
|
|||||||
|
|
||||||
failure = False
|
failure = False
|
||||||
|
|
||||||
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
collection_names = [d._get_collection_name()
|
||||||
|
for d in _document_registry.values()]
|
||||||
|
|
||||||
for new_style_name in collection_names:
|
for new_style_name in collection_names:
|
||||||
if not new_style_name: # embedded documents don't have collections
|
if not new_style_name: # embedded documents don't have collections
|
||||||
@@ -106,10 +595,17 @@ Alternatively, you can rename your collections eg ::
|
|||||||
old_style_name, new_style_name)
|
old_style_name, new_style_name)
|
||||||
else:
|
else:
|
||||||
db[old_style_name].rename(new_style_name)
|
db[old_style_name].rename(new_style_name)
|
||||||
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
print "Renamed: %s to %s" % (old_style_name,
|
||||||
|
new_style_name)
|
||||||
|
|
||||||
if failure:
|
if failure:
|
||||||
print "Upgrading collection names failed"
|
print "Upgrading collection names failed"
|
||||||
else:
|
else:
|
||||||
print "Upgraded collection names"
|
print "Upgraded collection names"
|
||||||
|
|
||||||
|
|
||||||
|
mongodb 1.8 > 2.0 +
|
||||||
|
===================
|
||||||
|
|
||||||
|
It's been reported that indexes may need to be recreated to the newer version of indexes.
|
||||||
|
To do this drop indexes and call ``ensure_indexes`` on each model.
|
||||||
|
|||||||
@@ -1,24 +1,36 @@
|
|||||||
import document
|
# Import submodules so that we can expose their __all__
|
||||||
from document import *
|
from mongoengine import connection
|
||||||
import fields
|
from mongoengine import document
|
||||||
from fields import *
|
from mongoengine import errors
|
||||||
import connection
|
from mongoengine import fields
|
||||||
from connection import *
|
from mongoengine import queryset
|
||||||
import queryset
|
from mongoengine import signals
|
||||||
from queryset import *
|
|
||||||
import signals
|
|
||||||
from signals import *
|
|
||||||
|
|
||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
# Import everything from each submodule so that it can be accessed via
|
||||||
queryset.__all__ + signals.__all__)
|
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||||
|
# users can simply use `from mongoengine import connect`, or even
|
||||||
|
# `from mongoengine import *` and then `connect('testdb')`.
|
||||||
|
from mongoengine.connection import *
|
||||||
|
from mongoengine.document import *
|
||||||
|
from mongoengine.errors import *
|
||||||
|
from mongoengine.fields import *
|
||||||
|
from mongoengine.queryset import *
|
||||||
|
from mongoengine.signals import *
|
||||||
|
|
||||||
VERSION = (0, 6, 7)
|
|
||||||
|
__all__ = (list(document.__all__) + list(fields.__all__) +
|
||||||
|
list(connection.__all__) + list(queryset.__all__) +
|
||||||
|
list(signals.__all__) + list(errors.__all__))
|
||||||
|
|
||||||
|
|
||||||
|
VERSION = (0, 13, 0)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
|
||||||
if VERSION[2]:
|
return '0.10.7'.
|
||||||
version = '%s.%s' % (version, VERSION[2])
|
"""
|
||||||
return version
|
return '.'.join(map(str, VERSION))
|
||||||
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|||||||
1375
mongoengine/base.py
1375
mongoengine/base.py
File diff suppressed because it is too large
Load Diff
28
mongoengine/base/__init__.py
Normal file
28
mongoengine/base/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Base module is split into several files for convenience. Files inside of
|
||||||
|
# this module should import from a specific submodule (e.g.
|
||||||
|
# `from mongoengine.base.document import BaseDocument`), but all of the
|
||||||
|
# other modules should import directly from the top-level module (e.g.
|
||||||
|
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
|
||||||
|
# also helps with cyclical import errors.
|
||||||
|
from mongoengine.base.common import *
|
||||||
|
from mongoengine.base.datastructures import *
|
||||||
|
from mongoengine.base.document import *
|
||||||
|
from mongoengine.base.fields import *
|
||||||
|
from mongoengine.base.metaclasses import *
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
# common
|
||||||
|
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||||
|
|
||||||
|
# datastructures
|
||||||
|
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||||
|
|
||||||
|
# document
|
||||||
|
'BaseDocument',
|
||||||
|
|
||||||
|
# fields
|
||||||
|
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||||
|
|
||||||
|
# metaclasses
|
||||||
|
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||||
|
)
|
||||||
31
mongoengine/base/common.py
Normal file
31
mongoengine/base/common.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from mongoengine.errors import NotRegistered
|
||||||
|
|
||||||
|
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||||
|
|
||||||
|
|
||||||
|
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||||
|
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||||
|
'set_on_insert', 'min', 'max', 'rename'])
|
||||||
|
|
||||||
|
|
||||||
|
_document_registry = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_document(name):
|
||||||
|
"""Get a document class by name."""
|
||||||
|
doc = _document_registry.get(name, None)
|
||||||
|
if not doc:
|
||||||
|
# Possible old style name
|
||||||
|
single_end = name.split('.')[-1]
|
||||||
|
compound_end = '.%s' % single_end
|
||||||
|
possible_match = [k for k in _document_registry.keys()
|
||||||
|
if k.endswith(compound_end) or k == single_end]
|
||||||
|
if len(possible_match) == 1:
|
||||||
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
|
if not doc:
|
||||||
|
raise NotRegistered("""
|
||||||
|
`%s` has not been registered in the document registry.
|
||||||
|
Importing the document class automatically registers it, has it
|
||||||
|
been imported?
|
||||||
|
""".strip() % name)
|
||||||
|
return doc
|
||||||
486
mongoengine/base/datastructures.py
Normal file
486
mongoengine/base/datastructures.py
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
import itertools
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||||
|
|
||||||
|
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDict(dict):
|
||||||
|
"""A special dict so we can watch any changes."""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, dict_items, instance, name):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
|
||||||
|
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
|
def __getitem__(self, key, *args, **kwargs):
|
||||||
|
value = super(BaseDict, self).__getitem__(key)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||||
|
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||||
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||||
|
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||||
|
super(BaseDict, self).__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __setitem__(self, key, value, *args, **kwargs):
|
||||||
|
self._mark_as_changed(key)
|
||||||
|
return super(BaseDict, self).__setitem__(key, value)
|
||||||
|
|
||||||
|
def __delete__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, key, *args, **kwargs):
|
||||||
|
self._mark_as_changed(key)
|
||||||
|
return super(BaseDict, self).__delitem__(key)
|
||||||
|
|
||||||
|
def __delattr__(self, key, *args, **kwargs):
|
||||||
|
self._mark_as_changed(key)
|
||||||
|
return super(BaseDict, self).__delattr__(key)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def clear(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).clear()
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def popitem(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).popitem()
|
||||||
|
|
||||||
|
def setdefault(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).setdefault(*args, **kwargs)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).update(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self, key=None):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
if key:
|
||||||
|
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||||
|
else:
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseList(list):
|
||||||
|
"""A special list so we can watch any changes."""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, list_items, instance, name):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
|
||||||
|
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
|
def __getitem__(self, key, *args, **kwargs):
|
||||||
|
value = super(BaseList, self).__getitem__(key)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||||
|
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||||
|
super(BaseList, self).__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||||
|
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||||
|
super(BaseList, self).__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
for i in xrange(self.__len__()):
|
||||||
|
yield self[i]
|
||||||
|
|
||||||
|
def __setitem__(self, key, value, *args, **kwargs):
|
||||||
|
if isinstance(key, slice):
|
||||||
|
self._mark_as_changed()
|
||||||
|
else:
|
||||||
|
self._mark_as_changed(key)
|
||||||
|
return super(BaseList, self).__setitem__(key, value)
|
||||||
|
|
||||||
|
def __delitem__(self, key, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__delitem__(key)
|
||||||
|
|
||||||
|
def __setslice__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__setslice__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delslice__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__delslice__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __iadd__(self, other):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__iadd__(other)
|
||||||
|
|
||||||
|
def __imul__(self, other):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__imul__(other)
|
||||||
|
|
||||||
|
def append(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).append(*args, **kwargs)
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).extend(*args, **kwargs)
|
||||||
|
|
||||||
|
def insert(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).insert(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def remove(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).remove(*args, **kwargs)
|
||||||
|
|
||||||
|
def reverse(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).reverse()
|
||||||
|
|
||||||
|
def sort(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).sort(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self, key=None):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
if key:
|
||||||
|
self._instance._mark_as_changed(
|
||||||
|
'%s.%s' % (self._name, key % len(self))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddedDocumentList(BaseList):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __match_all(cls, embedded_doc, kwargs):
|
||||||
|
"""Return True if a given embedded doc matches all the filter
|
||||||
|
kwargs. If it doesn't return False.
|
||||||
|
"""
|
||||||
|
for key, expected_value in kwargs.items():
|
||||||
|
doc_val = getattr(embedded_doc, key)
|
||||||
|
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __only_matches(cls, embedded_docs, kwargs):
|
||||||
|
"""Return embedded docs that match the filter kwargs."""
|
||||||
|
if not kwargs:
|
||||||
|
return embedded_docs
|
||||||
|
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||||
|
|
||||||
|
def __init__(self, list_items, instance, name):
|
||||||
|
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||||
|
self._instance = instance
|
||||||
|
|
||||||
|
def filter(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters the list by only including embedded documents with the
|
||||||
|
given keyword arguments.
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
filter on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: A new ``EmbeddedDocumentList`` containing the matching
|
||||||
|
embedded documents.
|
||||||
|
|
||||||
|
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||||
|
the embedded document class.
|
||||||
|
"""
|
||||||
|
values = self.__only_matches(self, kwargs)
|
||||||
|
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||||
|
|
||||||
|
def exclude(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters the list by excluding embedded documents with the given
|
||||||
|
keyword arguments.
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
exclude on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: A new ``EmbeddedDocumentList`` containing the non-matching
|
||||||
|
embedded documents.
|
||||||
|
|
||||||
|
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||||
|
the embedded document class.
|
||||||
|
"""
|
||||||
|
exclude = self.__only_matches(self, kwargs)
|
||||||
|
values = [item for item in self if item not in exclude]
|
||||||
|
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||||
|
|
||||||
|
def count(self):
|
||||||
|
"""
|
||||||
|
The number of embedded documents in the list.
|
||||||
|
|
||||||
|
:return: The length of the list, equivalent to the result of ``len()``.
|
||||||
|
"""
|
||||||
|
return len(self)
|
||||||
|
|
||||||
|
def get(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Retrieves an embedded document determined by the given keyword
|
||||||
|
arguments.
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
search on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: The embedded document matched by the given keyword arguments.
|
||||||
|
|
||||||
|
Raises ``DoesNotExist`` if the arguments used to query an embedded
|
||||||
|
document returns no results. ``MultipleObjectsReturned`` if more
|
||||||
|
than one result is returned.
|
||||||
|
"""
|
||||||
|
values = self.__only_matches(self, kwargs)
|
||||||
|
if len(values) == 0:
|
||||||
|
raise DoesNotExist(
|
||||||
|
'%s matching query does not exist.' % self._name
|
||||||
|
)
|
||||||
|
elif len(values) > 1:
|
||||||
|
raise MultipleObjectsReturned(
|
||||||
|
'%d items returned, instead of 1' % len(values)
|
||||||
|
)
|
||||||
|
|
||||||
|
return values[0]
|
||||||
|
|
||||||
|
def first(self):
|
||||||
|
"""Return the first embedded document in the list, or ``None``
|
||||||
|
if empty.
|
||||||
|
"""
|
||||||
|
if len(self) > 0:
|
||||||
|
return self[0]
|
||||||
|
|
||||||
|
def create(self, **values):
|
||||||
|
"""
|
||||||
|
Creates a new embedded document and saves it to the database.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The embedded document changes are not automatically saved
|
||||||
|
to the database after calling this method.
|
||||||
|
|
||||||
|
:param values: A dictionary of values for the embedded document.
|
||||||
|
:return: The new embedded document instance.
|
||||||
|
"""
|
||||||
|
name = self._name
|
||||||
|
EmbeddedClass = self._instance._fields[name].field.document_type_obj
|
||||||
|
self._instance[self._name].append(EmbeddedClass(**values))
|
||||||
|
|
||||||
|
return self._instance[self._name][-1]
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Saves the ancestor document.
|
||||||
|
|
||||||
|
:param args: Arguments passed up to the ancestor Document's save
|
||||||
|
method.
|
||||||
|
:param kwargs: Keyword arguments passed up to the ancestor Document's
|
||||||
|
save method.
|
||||||
|
"""
|
||||||
|
self._instance.save(*args, **kwargs)
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
"""
|
||||||
|
Deletes the embedded documents from the database.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The embedded document changes are not automatically saved
|
||||||
|
to the database after calling this method.
|
||||||
|
|
||||||
|
:return: The number of entries deleted.
|
||||||
|
"""
|
||||||
|
values = list(self)
|
||||||
|
for item in values:
|
||||||
|
self._instance[self._name].remove(item)
|
||||||
|
|
||||||
|
return len(values)
|
||||||
|
|
||||||
|
def update(self, **update):
|
||||||
|
"""
|
||||||
|
Updates the embedded documents with the given update values.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The embedded document changes are not automatically saved
|
||||||
|
to the database after calling this method.
|
||||||
|
|
||||||
|
:param update: A dictionary of update values to apply to each
|
||||||
|
embedded document.
|
||||||
|
:return: The number of entries updated.
|
||||||
|
"""
|
||||||
|
if len(update) == 0:
|
||||||
|
return 0
|
||||||
|
values = list(self)
|
||||||
|
for item in values:
|
||||||
|
for k, v in update.items():
|
||||||
|
setattr(item, k, v)
|
||||||
|
|
||||||
|
return len(values)
|
||||||
|
|
||||||
|
|
||||||
|
class StrictDict(object):
|
||||||
|
__slots__ = ()
|
||||||
|
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||||
|
_classes = {}
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
for k, v in kwargs.iteritems():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
key = '_reserved_' + key if key in self._special_fields else key
|
||||||
|
try:
|
||||||
|
return getattr(self, key)
|
||||||
|
except AttributeError:
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
key = '_reserved_' + key if key in self._special_fields else key
|
||||||
|
return setattr(self, key, value)
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return hasattr(self, key)
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
v = self.get(key, default)
|
||||||
|
try:
|
||||||
|
delattr(self, key)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return v
|
||||||
|
|
||||||
|
def iteritems(self):
|
||||||
|
for key in self:
|
||||||
|
yield key, self[key]
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return [(k, self[k]) for k in iter(self)]
|
||||||
|
|
||||||
|
def iterkeys(self):
|
||||||
|
return iter(self)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return list(iter(self))
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (key for key in self.__slots__ if hasattr(self, key))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(list(self.iteritems()))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.items() == other.items()
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return self.items() != other.items()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, allowed_keys):
|
||||||
|
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
|
||||||
|
allowed_keys = frozenset(allowed_keys_tuple)
|
||||||
|
if allowed_keys not in cls._classes:
|
||||||
|
class SpecificStrictDict(cls):
|
||||||
|
__slots__ = allowed_keys_tuple
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||||
|
|
||||||
|
cls._classes[allowed_keys] = SpecificStrictDict
|
||||||
|
return cls._classes[allowed_keys]
|
||||||
|
|
||||||
|
|
||||||
|
class SemiStrictDict(StrictDict):
|
||||||
|
__slots__ = ('_extras', )
|
||||||
|
_classes = {}
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
try:
|
||||||
|
super(SemiStrictDict, self).__getattr__(attr)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
return self.__getattribute__('_extras')[attr]
|
||||||
|
except KeyError as e:
|
||||||
|
raise AttributeError(e)
|
||||||
|
|
||||||
|
def __setattr__(self, attr, value):
|
||||||
|
try:
|
||||||
|
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
self._extras[attr] = value
|
||||||
|
except AttributeError:
|
||||||
|
self._extras = {attr: value}
|
||||||
|
|
||||||
|
def __delattr__(self, attr):
|
||||||
|
try:
|
||||||
|
super(SemiStrictDict, self).__delattr__(attr)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del self._extras[attr]
|
||||||
|
except KeyError as e:
|
||||||
|
raise AttributeError(e)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
try:
|
||||||
|
extras_iter = iter(self.__getattribute__('_extras'))
|
||||||
|
except AttributeError:
|
||||||
|
extras_iter = ()
|
||||||
|
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
|
||||||
1091
mongoengine/base/document.py
Normal file
1091
mongoengine/base/document.py
Normal file
File diff suppressed because it is too large
Load Diff
632
mongoengine/base/fields.py
Normal file
632
mongoengine/base/fields.py
Normal file
@@ -0,0 +1,632 @@
|
|||||||
|
import operator
|
||||||
|
import warnings
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId, SON
|
||||||
|
import pymongo
|
||||||
|
import six
|
||||||
|
|
||||||
|
from mongoengine.base.common import UPDATE_OPERATORS
|
||||||
|
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||||
|
EmbeddedDocumentList)
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||||
|
'GeoJsonBaseField')
|
||||||
|
|
||||||
|
|
||||||
|
class BaseField(object):
|
||||||
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 - added verbose and help text
|
||||||
|
"""
|
||||||
|
name = None
|
||||||
|
_geo_index = False
|
||||||
|
_auto_gen = False # Call `generate` to generate a value
|
||||||
|
_auto_dereference = True
|
||||||
|
|
||||||
|
# These track each time a Field instance is created. Used to retain order.
|
||||||
|
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
||||||
|
# creates, creation_counter is used for all user-specified fields.
|
||||||
|
creation_counter = 0
|
||||||
|
auto_creation_counter = -1
|
||||||
|
|
||||||
|
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||||
|
unique=False, unique_with=None, primary_key=False,
|
||||||
|
validation=None, choices=None, null=False, sparse=False,
|
||||||
|
**kwargs):
|
||||||
|
"""
|
||||||
|
:param db_field: The database field to store this field in
|
||||||
|
(defaults to the name of the field)
|
||||||
|
:param name: Deprecated - use db_field
|
||||||
|
:param required: If the field is required. Whether it has to have a
|
||||||
|
value or not. Defaults to False.
|
||||||
|
:param default: (optional) The default value for this field if no value
|
||||||
|
has been set (or if the value has been unset). It can be a
|
||||||
|
callable.
|
||||||
|
:param unique: Is the field value unique or not. Defaults to False.
|
||||||
|
:param unique_with: (optional) The other field this field should be
|
||||||
|
unique with.
|
||||||
|
:param primary_key: Mark this field as the primary key. Defaults to False.
|
||||||
|
:param validation: (optional) A callable to validate the value of the
|
||||||
|
field. Generally this is deprecated in favour of the
|
||||||
|
`FIELD.validate` method
|
||||||
|
:param choices: (optional) The valid choices
|
||||||
|
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||||
|
then the default value is set
|
||||||
|
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||||
|
means that uniqueness won't be enforced for `None` values
|
||||||
|
:param **kwargs: (optional) Arbitrary indirection-free metadata for
|
||||||
|
this field can be supplied as additional keyword arguments and
|
||||||
|
accessed as attributes of the field. Must not conflict with any
|
||||||
|
existing attributes. Common metadata includes `verbose_name` and
|
||||||
|
`help_text`.
|
||||||
|
"""
|
||||||
|
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||||
|
|
||||||
|
if name:
|
||||||
|
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
self.required = required or primary_key
|
||||||
|
self.default = default
|
||||||
|
self.unique = bool(unique or unique_with)
|
||||||
|
self.unique_with = unique_with
|
||||||
|
self.primary_key = primary_key
|
||||||
|
self.validation = validation
|
||||||
|
self.choices = choices
|
||||||
|
self.null = null
|
||||||
|
self.sparse = sparse
|
||||||
|
self._owner_document = None
|
||||||
|
|
||||||
|
# Validate the db_field
|
||||||
|
if isinstance(self.db_field, six.string_types) and (
|
||||||
|
'.' in self.db_field or
|
||||||
|
'\0' in self.db_field or
|
||||||
|
self.db_field.startswith('$')
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
'field names cannot contain dots (".") or null characters '
|
||||||
|
'("\\0"), and they must not start with a dollar sign ("$").'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Detect and report conflicts between metadata and base properties.
|
||||||
|
conflicts = set(dir(self)) & set(kwargs)
|
||||||
|
if conflicts:
|
||||||
|
raise TypeError('%s already has attribute(s): %s' % (
|
||||||
|
self.__class__.__name__, ', '.join(conflicts)))
|
||||||
|
|
||||||
|
# Assign metadata to the instance
|
||||||
|
# This efficient method is available because no __slots__ are defined.
|
||||||
|
self.__dict__.update(kwargs)
|
||||||
|
|
||||||
|
# Adjust the appropriate creation counter, and save our local copy.
|
||||||
|
if self.db_field == '_id':
|
||||||
|
self.creation_counter = BaseField.auto_creation_counter
|
||||||
|
BaseField.auto_creation_counter -= 1
|
||||||
|
else:
|
||||||
|
self.creation_counter = BaseField.creation_counter
|
||||||
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for retrieving a value from a field in a document.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
# Get value from document instance if available
|
||||||
|
return instance._data.get(self.name)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# If setting to None and there is a default
|
||||||
|
# Then set the value to the default value
|
||||||
|
if value is None:
|
||||||
|
if self.null:
|
||||||
|
value = None
|
||||||
|
elif self.default is not None:
|
||||||
|
value = self.default
|
||||||
|
if callable(value):
|
||||||
|
value = value()
|
||||||
|
|
||||||
|
if instance._initialised:
|
||||||
|
try:
|
||||||
|
if (self.name not in instance._data or
|
||||||
|
instance._data[self.name] != value):
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
except Exception:
|
||||||
|
# Values cant be compared eg: naive and tz datetimes
|
||||||
|
# So mark it as changed
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument):
|
||||||
|
value._instance = weakref.proxy(instance)
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
for v in value:
|
||||||
|
if isinstance(v, EmbeddedDocument):
|
||||||
|
v._instance = weakref.proxy(instance)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
def error(self, message='', errors=None, field_name=None):
|
||||||
|
"""Raise a ValidationError."""
|
||||||
|
field_name = field_name if field_name else self.name
|
||||||
|
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type."""
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type."""
|
||||||
|
return self.to_python(value)
|
||||||
|
|
||||||
|
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
|
||||||
|
"""Helper method to call to_mongo with proper inputs."""
|
||||||
|
f_inputs = self.to_mongo.__code__.co_varnames
|
||||||
|
ex_vars = {}
|
||||||
|
if 'fields' in f_inputs:
|
||||||
|
ex_vars['fields'] = fields
|
||||||
|
|
||||||
|
if 'use_db_field' in f_inputs:
|
||||||
|
ex_vars['use_db_field'] = use_db_field
|
||||||
|
|
||||||
|
return self.to_mongo(value, **ex_vars)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
"""Prepare a value that is being used in a query for PyMongo."""
|
||||||
|
if op in UPDATE_OPERATORS:
|
||||||
|
self.validate(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, value, clean=True):
|
||||||
|
"""Perform validation on a value."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _validate_choices(self, value):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
|
||||||
|
choice_list = self.choices
|
||||||
|
if isinstance(next(iter(choice_list)), (list, tuple)):
|
||||||
|
# next(iter) is useful for sets
|
||||||
|
choice_list = [k for k, _ in choice_list]
|
||||||
|
|
||||||
|
# Choices which are other types of Documents
|
||||||
|
if isinstance(value, (Document, EmbeddedDocument)):
|
||||||
|
if not any(isinstance(value, c) for c in choice_list):
|
||||||
|
self.error(
|
||||||
|
'Value must be an instance of %s' % (
|
||||||
|
six.text_type(choice_list)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Choices which are types other than Documents
|
||||||
|
elif value not in choice_list:
|
||||||
|
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||||
|
|
||||||
|
def _validate(self, value, **kwargs):
|
||||||
|
# Check the Choices Constraint
|
||||||
|
if self.choices:
|
||||||
|
self._validate_choices(value)
|
||||||
|
|
||||||
|
# check validation argument
|
||||||
|
if self.validation is not None:
|
||||||
|
if callable(self.validation):
|
||||||
|
if not self.validation(value):
|
||||||
|
self.error('Value does not match custom validation method')
|
||||||
|
else:
|
||||||
|
raise ValueError('validation argument for "%s" must be a '
|
||||||
|
'callable.' % self.name)
|
||||||
|
|
||||||
|
self.validate(value, **kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def owner_document(self):
|
||||||
|
return self._owner_document
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
@owner_document.setter
|
||||||
|
def owner_document(self, owner_document):
|
||||||
|
self._set_owner_document(owner_document)
|
||||||
|
|
||||||
|
|
||||||
|
class ComplexBaseField(BaseField):
|
||||||
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
|
|
||||||
|
Allows for nesting of embedded documents inside complex types.
|
||||||
|
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||||
|
items in a list / dict rather than one at a time.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
field = None
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor to automatically dereference references."""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||||
|
dereference = (self._auto_dereference and
|
||||||
|
(self.field is None or isinstance(self.field,
|
||||||
|
(GenericReferenceField, ReferenceField))))
|
||||||
|
|
||||||
|
_dereference = _import_class('DeReference')()
|
||||||
|
|
||||||
|
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
|
if instance._initialised and dereference and instance._data.get(self.name):
|
||||||
|
instance._data[self.name] = _dereference(
|
||||||
|
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||||
|
name=self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if isinstance(value, (list, tuple)):
|
||||||
|
if (issubclass(type(self), EmbeddedDocumentListField) and
|
||||||
|
not isinstance(value, EmbeddedDocumentList)):
|
||||||
|
value = EmbeddedDocumentList(value, instance, self.name)
|
||||||
|
elif not isinstance(value, BaseList):
|
||||||
|
value = BaseList(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
if (self._auto_dereference and instance._initialised and
|
||||||
|
isinstance(value, (BaseList, BaseDict)) and
|
||||||
|
not value._dereferenced):
|
||||||
|
value = _dereference(
|
||||||
|
value, max_depth=1, instance=instance, name=self.name
|
||||||
|
)
|
||||||
|
value._dereferenced = True
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type."""
|
||||||
|
if isinstance(value, six.string_types):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_python'):
|
||||||
|
return value.to_python()
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = {k: v for k, v in enumerate(value)}
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
self.field._auto_dereference = self._auto_dereference
|
||||||
|
value_dict = {key: self.field.to_python(item)
|
||||||
|
for key, item in value.items()}
|
||||||
|
else:
|
||||||
|
Document = _import_class('Document')
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_python'):
|
||||||
|
value_dict[k] = v.to_python()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_python(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for _, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type."""
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
|
||||||
|
if isinstance(value, six.string_types):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
if isinstance(value, Document):
|
||||||
|
return GenericReferenceField().to_mongo(value)
|
||||||
|
cls = value.__class__
|
||||||
|
val = value.to_mongo(use_db_field, fields)
|
||||||
|
# If it's a document that is not inherited add _cls
|
||||||
|
if isinstance(value, EmbeddedDocument):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
return val
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = {k: v for k, v in enumerate(value)}
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = {
|
||||||
|
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||||
|
for key, item in value.iteritems()
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.iteritems():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
|
||||||
|
# If its a document that is not inheritable it won't have
|
||||||
|
# any _cls data so make it a generic reference allows
|
||||||
|
# us to dereference
|
||||||
|
meta = getattr(v, '_meta', {})
|
||||||
|
allow_inheritance = meta.get('allow_inheritance')
|
||||||
|
if not allow_inheritance and not self.field:
|
||||||
|
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||||
|
else:
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_mongo'):
|
||||||
|
cls = v.__class__
|
||||||
|
val = v.to_mongo(use_db_field, fields)
|
||||||
|
# If it's a document that is not inherited add _cls
|
||||||
|
if isinstance(v, (Document, EmbeddedDocument)):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
value_dict[k] = val
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for _, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""If field is provided ensure the value is valid."""
|
||||||
|
errors = {}
|
||||||
|
if self.field:
|
||||||
|
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||||
|
sequence = value.iteritems()
|
||||||
|
else:
|
||||||
|
sequence = enumerate(value)
|
||||||
|
for k, v in sequence:
|
||||||
|
try:
|
||||||
|
self.field._validate(v)
|
||||||
|
except ValidationError as error:
|
||||||
|
errors[k] = error.errors or error
|
||||||
|
except (ValueError, AssertionError) as error:
|
||||||
|
errors[k] = error
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
field_class = self.field.__class__.__name__
|
||||||
|
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||||
|
errors=errors)
|
||||||
|
# Don't allow empty values if required
|
||||||
|
if self.required and not value:
|
||||||
|
self.error('Field is required and cannot be empty')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
if self.field:
|
||||||
|
return self.field.lookup_member(member_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
if self.field:
|
||||||
|
self.field.owner_document = owner_document
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectIdField(BaseField):
|
||||||
|
"""A field wrapper around MongoDB's ObjectIds."""
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
try:
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
value = ObjectId(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
try:
|
||||||
|
return ObjectId(six.text_type(value))
|
||||||
|
except Exception as e:
|
||||||
|
# e.message attribute has been deprecated since Python 2.6
|
||||||
|
self.error(six.text_type(e))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
try:
|
||||||
|
ObjectId(six.text_type(value))
|
||||||
|
except Exception:
|
||||||
|
self.error('Invalid Object ID')
|
||||||
|
|
||||||
|
|
||||||
|
class GeoJsonBaseField(BaseField):
|
||||||
|
"""A geo json field storing a geojson style object.
|
||||||
|
|
||||||
|
.. versionadded:: 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
_geo_index = pymongo.GEOSPHERE
|
||||||
|
_type = 'GeoBase'
|
||||||
|
|
||||||
|
def __init__(self, auto_index=True, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||||
|
Defaults to `True`.
|
||||||
|
"""
|
||||||
|
self._name = '%sField' % self._type
|
||||||
|
if not auto_index:
|
||||||
|
self._geo_index = False
|
||||||
|
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""Validate the GeoJson object based on its type."""
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if set(value.keys()) == set(['type', 'coordinates']):
|
||||||
|
if value['type'] != self._type:
|
||||||
|
self.error('%s type must be "%s"' %
|
||||||
|
(self._name, self._type))
|
||||||
|
return self.validate(value['coordinates'])
|
||||||
|
else:
|
||||||
|
self.error('%s can only accept a valid GeoJson dictionary'
|
||||||
|
' or lists of (x, y)' % self._name)
|
||||||
|
return
|
||||||
|
elif not isinstance(value, (list, tuple)):
|
||||||
|
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||||
|
return
|
||||||
|
|
||||||
|
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||||
|
error = validate(value)
|
||||||
|
if error:
|
||||||
|
self.error(error)
|
||||||
|
|
||||||
|
def _validate_polygon(self, value, top_level=True):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'Polygons must contain list of linestrings'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return 'Invalid Polygon must contain at least one valid linestring'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for val in value:
|
||||||
|
error = self._validate_linestring(val, False)
|
||||||
|
if not error and val[0] != val[-1]:
|
||||||
|
error = 'LineStrings must start and end at the same point'
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
if errors:
|
||||||
|
if top_level:
|
||||||
|
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||||
|
else:
|
||||||
|
return '%s' % ', '.join(errors)
|
||||||
|
|
||||||
|
def _validate_linestring(self, value, top_level=True):
|
||||||
|
"""Validate a linestring."""
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'LineStrings must contain list of coordinate pairs'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return 'Invalid LineString must contain at least one valid point'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for val in value:
|
||||||
|
error = self._validate_point(val)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
if errors:
|
||||||
|
if top_level:
|
||||||
|
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||||
|
else:
|
||||||
|
return '%s' % ', '.join(errors)
|
||||||
|
|
||||||
|
def _validate_point(self, value):
|
||||||
|
"""Validate each set of coords"""
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'Points must be a list of coordinate pairs'
|
||||||
|
elif not len(value) == 2:
|
||||||
|
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||||
|
elif (not isinstance(value[0], (float, int)) or
|
||||||
|
not isinstance(value[1], (float, int))):
|
||||||
|
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||||
|
|
||||||
|
def _validate_multipoint(self, value):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'MultiPoint must be a list of Point'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return 'Invalid MultiPoint must contain at least one valid point'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for point in value:
|
||||||
|
error = self._validate_point(point)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return '%s' % ', '.join(errors)
|
||||||
|
|
||||||
|
def _validate_multilinestring(self, value, top_level=True):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'MultiLineString must be a list of LineString'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for linestring in value:
|
||||||
|
error = self._validate_linestring(linestring, False)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
if top_level:
|
||||||
|
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||||
|
else:
|
||||||
|
return '%s' % ', '.join(errors)
|
||||||
|
|
||||||
|
def _validate_multipolygon(self, value):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'MultiPolygon must be a list of Polygon'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for polygon in value:
|
||||||
|
error = self._validate_polygon(polygon, False)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return value
|
||||||
|
return SON([('type', self._type), ('coordinates', value)])
|
||||||
452
mongoengine/base/metaclasses.py
Normal file
452
mongoengine/base/metaclasses.py
Normal file
@@ -0,0 +1,452 @@
|
|||||||
|
import warnings
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from mongoengine.base.common import _document_registry
|
||||||
|
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import InvalidDocumentError
|
||||||
|
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||||
|
MultipleObjectsReturned,
|
||||||
|
QuerySetManager)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentMetaclass(type):
|
||||||
|
"""Metaclass for all documents."""
|
||||||
|
|
||||||
|
# TODO lower complexity of this method
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(DocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# If a base class just call super
|
||||||
|
metaclass = attrs.get('my_metaclass')
|
||||||
|
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||||
|
attrs['_cached_reference_fields'] = []
|
||||||
|
|
||||||
|
# EmbeddedDocuments could have meta data for inheritance
|
||||||
|
if 'meta' in attrs:
|
||||||
|
attrs['_meta'] = attrs.pop('meta')
|
||||||
|
|
||||||
|
# EmbeddedDocuments should inherit meta data
|
||||||
|
if '_meta' not in attrs:
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||||
|
|
||||||
|
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||||
|
if attrs['_meta'].get('allow_inheritance'):
|
||||||
|
StringField = _import_class('StringField')
|
||||||
|
attrs['_cls'] = StringField()
|
||||||
|
|
||||||
|
# Handle document Fields
|
||||||
|
|
||||||
|
# Merge all fields from subclasses
|
||||||
|
doc_fields = {}
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
if hasattr(base, '_fields'):
|
||||||
|
doc_fields.update(base._fields)
|
||||||
|
|
||||||
|
# Standard object mixin - merge in any Fields
|
||||||
|
if not hasattr(base, '_meta'):
|
||||||
|
base_fields = {}
|
||||||
|
for attr_name, attr_value in base.__dict__.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
base_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
doc_fields.update(base_fields)
|
||||||
|
|
||||||
|
# Discover any document fields
|
||||||
|
field_names = {}
|
||||||
|
for attr_name, attr_value in attrs.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
doc_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
# Count names to ensure no db_field redefinitions
|
||||||
|
field_names[attr_value.db_field] = field_names.get(
|
||||||
|
attr_value.db_field, 0) + 1
|
||||||
|
|
||||||
|
# Ensure no duplicate db_fields
|
||||||
|
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||||
|
if duplicate_db_fields:
|
||||||
|
msg = ('Multiple db_fields defined for: %s ' %
|
||||||
|
', '.join(duplicate_db_fields))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
# Set _fields and db_field maps
|
||||||
|
attrs['_fields'] = doc_fields
|
||||||
|
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||||
|
for k, v in doc_fields.items()}
|
||||||
|
attrs['_reverse_db_field_map'] = {
|
||||||
|
v: k for k, v in attrs['_db_field_map'].items()
|
||||||
|
}
|
||||||
|
|
||||||
|
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||||
|
(v.creation_counter, v.name)
|
||||||
|
for v in doc_fields.itervalues()))
|
||||||
|
|
||||||
|
#
|
||||||
|
# Set document hierarchy
|
||||||
|
#
|
||||||
|
superclasses = ()
|
||||||
|
class_name = [name]
|
||||||
|
for base in flattened_bases:
|
||||||
|
if (not getattr(base, '_is_base_cls', True) and
|
||||||
|
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||||
|
# Collate hierarchy for _cls and _subclasses
|
||||||
|
class_name.append(base.__name__)
|
||||||
|
|
||||||
|
if hasattr(base, '_meta'):
|
||||||
|
# Warn if allow_inheritance isn't set and prevent
|
||||||
|
# inheritance of classes where inheritance is set to False
|
||||||
|
allow_inheritance = base._meta.get('allow_inheritance')
|
||||||
|
if not allow_inheritance and not base._meta.get('abstract'):
|
||||||
|
raise ValueError('Document %s may not be subclassed' %
|
||||||
|
base.__name__)
|
||||||
|
|
||||||
|
# Get superclasses from last base superclass
|
||||||
|
document_bases = [b for b in flattened_bases
|
||||||
|
if hasattr(b, '_class_name')]
|
||||||
|
if document_bases:
|
||||||
|
superclasses = document_bases[0]._superclasses
|
||||||
|
superclasses += (document_bases[0]._class_name, )
|
||||||
|
|
||||||
|
_cls = '.'.join(reversed(class_name))
|
||||||
|
attrs['_class_name'] = _cls
|
||||||
|
attrs['_superclasses'] = superclasses
|
||||||
|
attrs['_subclasses'] = (_cls, )
|
||||||
|
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||||
|
|
||||||
|
# Create the new_class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Set _subclasses
|
||||||
|
for base in document_bases:
|
||||||
|
if _cls not in base._subclasses:
|
||||||
|
base._subclasses += (_cls,)
|
||||||
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
|
(Document, EmbeddedDocument, DictField,
|
||||||
|
CachedReferenceField) = cls._import_classes()
|
||||||
|
|
||||||
|
if issubclass(new_class, Document):
|
||||||
|
new_class._collection = None
|
||||||
|
|
||||||
|
# Add class to the _document_registry
|
||||||
|
_document_registry[new_class._class_name] = new_class
|
||||||
|
|
||||||
|
# In Python 2, User-defined methods objects have special read-only
|
||||||
|
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||||
|
# and class instance object respectively. With Python 3 these special
|
||||||
|
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||||
|
# module continues to use im_func and im_self, so the code below
|
||||||
|
# copies __func__ into im_func and __self__ into im_self for
|
||||||
|
# classmethod objects in Document derived classes.
|
||||||
|
if six.PY3:
|
||||||
|
for val in new_class.__dict__.values():
|
||||||
|
if isinstance(val, classmethod):
|
||||||
|
f = val.__get__(new_class)
|
||||||
|
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||||
|
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||||
|
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||||
|
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||||
|
|
||||||
|
# Handle delete rules
|
||||||
|
for field in new_class._fields.itervalues():
|
||||||
|
f = field
|
||||||
|
if f.owner_document is None:
|
||||||
|
f.owner_document = new_class
|
||||||
|
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||||
|
if isinstance(f, CachedReferenceField):
|
||||||
|
|
||||||
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
|
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||||
|
'allowed in EmbeddedDocuments')
|
||||||
|
if not f.document_type:
|
||||||
|
raise InvalidDocumentError(
|
||||||
|
'Document is not available to sync')
|
||||||
|
|
||||||
|
if f.auto_sync:
|
||||||
|
f.start_listener()
|
||||||
|
|
||||||
|
f.document_type._cached_reference_fields.append(f)
|
||||||
|
|
||||||
|
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||||
|
delete_rule = getattr(f.field,
|
||||||
|
'reverse_delete_rule',
|
||||||
|
DO_NOTHING)
|
||||||
|
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||||
|
msg = ('Reverse delete rules are not supported '
|
||||||
|
'for %s (field: %s)' %
|
||||||
|
(field.__class__.__name__, field.name))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
f = field.field
|
||||||
|
|
||||||
|
if delete_rule != DO_NOTHING:
|
||||||
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
|
msg = ('Reverse delete rules are not supported for '
|
||||||
|
'EmbeddedDocuments (field: %s)' % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
f.document_type.register_delete_rule(new_class,
|
||||||
|
field.name, delete_rule)
|
||||||
|
|
||||||
|
if (field.name and hasattr(Document, field.name) and
|
||||||
|
EmbeddedDocument not in new_class.mro()):
|
||||||
|
msg = ('%s is a document method and not a valid '
|
||||||
|
'field name' % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
def add_to_class(self, name, value):
|
||||||
|
setattr(self, name, value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_bases(cls, bases):
|
||||||
|
if isinstance(bases, BasesTuple):
|
||||||
|
return bases
|
||||||
|
seen = []
|
||||||
|
bases = cls.__get_bases(bases)
|
||||||
|
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||||
|
return BasesTuple(unique_bases)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_bases(cls, bases):
|
||||||
|
for base in bases:
|
||||||
|
if base is object:
|
||||||
|
continue
|
||||||
|
yield base
|
||||||
|
for child_base in cls.__get_bases(base.__bases__):
|
||||||
|
yield child_base
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _import_classes(cls):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
DictField = _import_class('DictField')
|
||||||
|
CachedReferenceField = _import_class('CachedReferenceField')
|
||||||
|
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||||
|
|
||||||
|
|
||||||
|
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||||
|
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||||
|
collection in the database.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
|
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||||
|
# defaults
|
||||||
|
attrs['_meta'] = {
|
||||||
|
'abstract': True,
|
||||||
|
'max_documents': None,
|
||||||
|
'max_size': None,
|
||||||
|
'ordering': [], # default ordering applied at runtime
|
||||||
|
'indexes': [], # indexes to be ensured at runtime
|
||||||
|
'id_field': None,
|
||||||
|
'index_background': False,
|
||||||
|
'index_drop_dups': False,
|
||||||
|
'index_opts': None,
|
||||||
|
'delete_rules': None,
|
||||||
|
|
||||||
|
# allow_inheritance can be True, False, and None. True means
|
||||||
|
# "allow inheritance", False means "don't allow inheritance",
|
||||||
|
# None means "do whatever your parent does, or don't allow
|
||||||
|
# inheritance if you're a top-level class".
|
||||||
|
'allow_inheritance': None,
|
||||||
|
}
|
||||||
|
attrs['_is_base_cls'] = True
|
||||||
|
attrs['_meta'].update(attrs.get('meta', {}))
|
||||||
|
else:
|
||||||
|
attrs['_meta'] = attrs.get('meta', {})
|
||||||
|
# Explicitly set abstract to false unless set
|
||||||
|
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||||
|
attrs['_is_base_cls'] = False
|
||||||
|
|
||||||
|
# Set flag marking as document class - as opposed to an object mixin
|
||||||
|
attrs['_is_document'] = True
|
||||||
|
|
||||||
|
# Ensure queryset_class is inherited
|
||||||
|
if 'objects' in attrs:
|
||||||
|
manager = attrs['objects']
|
||||||
|
if hasattr(manager, 'queryset_class'):
|
||||||
|
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||||
|
|
||||||
|
# Clean up top level meta
|
||||||
|
if 'meta' in attrs:
|
||||||
|
del attrs['meta']
|
||||||
|
|
||||||
|
# Find the parent document class
|
||||||
|
parent_doc_cls = [b for b in flattened_bases
|
||||||
|
if b.__class__ == TopLevelDocumentMetaclass]
|
||||||
|
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||||
|
|
||||||
|
# Prevent classes setting collection different to their parents
|
||||||
|
# If parent wasn't an abstract class
|
||||||
|
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||||
|
not parent_doc_cls._meta.get('abstract', True)):
|
||||||
|
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||||
|
warnings.warn(msg, SyntaxWarning)
|
||||||
|
del attrs['_meta']['collection']
|
||||||
|
|
||||||
|
# Ensure abstract documents have abstract bases
|
||||||
|
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||||
|
if (parent_doc_cls and
|
||||||
|
not parent_doc_cls._meta.get('abstract', False)):
|
||||||
|
msg = 'Abstract document cannot have non-abstract base'
|
||||||
|
raise ValueError(msg)
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Merge base class metas.
|
||||||
|
# Uses a special MetaDict that handles various merging rules
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
|
||||||
|
# Set collection in the meta if its callable
|
||||||
|
if (getattr(base, '_is_document', False) and
|
||||||
|
not base._meta.get('abstract')):
|
||||||
|
collection = meta.get('collection', None)
|
||||||
|
if callable(collection):
|
||||||
|
meta['collection'] = collection(base)
|
||||||
|
|
||||||
|
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||||
|
|
||||||
|
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||||
|
# allow_inheritance to False. If the base Document allows inheritance,
|
||||||
|
# none of its subclasses can override allow_inheritance to False.
|
||||||
|
simple_class = all([b._meta.get('abstract')
|
||||||
|
for b in flattened_bases if hasattr(b, '_meta')])
|
||||||
|
if (
|
||||||
|
not simple_class and
|
||||||
|
meta['allow_inheritance'] is False and
|
||||||
|
not meta['abstract']
|
||||||
|
):
|
||||||
|
raise ValueError('Only direct subclasses of Document may set '
|
||||||
|
'"allow_inheritance" to False')
|
||||||
|
|
||||||
|
# Set default collection name
|
||||||
|
if 'collection' not in meta:
|
||||||
|
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||||
|
for c in name).strip('_').lower()
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
|
# Call super and get the new class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
meta = new_class._meta
|
||||||
|
|
||||||
|
# Set index specifications
|
||||||
|
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||||
|
|
||||||
|
# If collection is a callable - call it and set the value
|
||||||
|
collection = meta.get('collection')
|
||||||
|
if callable(collection):
|
||||||
|
new_class._meta['collection'] = collection(new_class)
|
||||||
|
|
||||||
|
# Provide a default queryset unless exists or one has been set
|
||||||
|
if 'objects' not in dir(new_class):
|
||||||
|
new_class.objects = QuerySetManager()
|
||||||
|
|
||||||
|
# Validate the fields and set primary key if needed
|
||||||
|
for field_name, field in new_class._fields.iteritems():
|
||||||
|
if field.primary_key:
|
||||||
|
# Ensure only one primary key is set
|
||||||
|
current_pk = new_class._meta.get('id_field')
|
||||||
|
if current_pk and current_pk != field_name:
|
||||||
|
raise ValueError('Cannot override primary key field')
|
||||||
|
|
||||||
|
# Set primary key
|
||||||
|
if not current_pk:
|
||||||
|
new_class._meta['id_field'] = field_name
|
||||||
|
new_class.id = field
|
||||||
|
|
||||||
|
# Set primary key if not defined by the document
|
||||||
|
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||||
|
'_auto_id_field', False)
|
||||||
|
if not new_class._meta.get('id_field'):
|
||||||
|
# After 0.10, find not existing names, instead of overwriting
|
||||||
|
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||||
|
new_class._auto_id_field = True
|
||||||
|
new_class._meta['id_field'] = id_name
|
||||||
|
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||||
|
new_class._fields[id_name].name = id_name
|
||||||
|
new_class.id = new_class._fields[id_name]
|
||||||
|
new_class._db_field_map[id_name] = id_db_name
|
||||||
|
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||||
|
# Prepend id field to _fields_ordered
|
||||||
|
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
|
||||||
|
|
||||||
|
# Merge in exceptions with parent hierarchy
|
||||||
|
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||||
|
module = attrs.get('__module__')
|
||||||
|
for exc in exceptions_to_merge:
|
||||||
|
name = exc.__name__
|
||||||
|
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||||
|
if hasattr(base, name)) or (exc,)
|
||||||
|
# Create new exception and set to new_class
|
||||||
|
exception = type(name, parents, {'__module__': module})
|
||||||
|
setattr(new_class, name, exception)
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_auto_id_names(cls, new_class):
|
||||||
|
id_name, id_db_name = ('id', '_id')
|
||||||
|
if id_name not in new_class._fields and \
|
||||||
|
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||||
|
return id_name, id_db_name
|
||||||
|
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||||
|
while id_name in new_class._fields or \
|
||||||
|
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||||
|
id_name = '{0}_{1}'.format(id_basename, i)
|
||||||
|
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||||
|
i += 1
|
||||||
|
return id_name, id_db_name
|
||||||
|
|
||||||
|
|
||||||
|
class MetaDict(dict):
|
||||||
|
"""Custom dictionary for meta classes.
|
||||||
|
Handles the merging of set indexes
|
||||||
|
"""
|
||||||
|
_merge_options = ('indexes',)
|
||||||
|
|
||||||
|
def merge(self, new_options):
|
||||||
|
for k, v in new_options.iteritems():
|
||||||
|
if k in self._merge_options:
|
||||||
|
self[k] = self.get(k, []) + v
|
||||||
|
else:
|
||||||
|
self[k] = v
|
||||||
|
|
||||||
|
|
||||||
|
class BasesTuple(tuple):
|
||||||
|
"""Special class to handle introspection of bases tuple in __new__"""
|
||||||
|
pass
|
||||||
58
mongoengine/common.py
Normal file
58
mongoengine/common.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
_class_registry_cache = {}
|
||||||
|
_field_list_cache = []
|
||||||
|
|
||||||
|
|
||||||
|
def _import_class(cls_name):
|
||||||
|
"""Cache mechanism for imports.
|
||||||
|
|
||||||
|
Due to complications of circular imports mongoengine needs to do lots of
|
||||||
|
inline imports in functions. This is inefficient as classes are
|
||||||
|
imported repeated throughout the mongoengine code. This is
|
||||||
|
compounded by some recursive functions requiring inline imports.
|
||||||
|
|
||||||
|
:mod:`mongoengine.common` provides a single point to import all these
|
||||||
|
classes. Circular imports aren't an issue as it dynamically imports the
|
||||||
|
class when first needed. Subsequent calls to the
|
||||||
|
:func:`~mongoengine.common._import_class` can then directly retrieve the
|
||||||
|
class from the :data:`mongoengine.common._class_registry_cache`.
|
||||||
|
"""
|
||||||
|
if cls_name in _class_registry_cache:
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
|
|
||||||
|
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||||
|
'MapReduceDocument')
|
||||||
|
|
||||||
|
# Field Classes
|
||||||
|
if not _field_list_cache:
|
||||||
|
from mongoengine.fields import __all__ as fields
|
||||||
|
_field_list_cache.extend(fields)
|
||||||
|
from mongoengine.base.fields import __all__ as fields
|
||||||
|
_field_list_cache.extend(fields)
|
||||||
|
|
||||||
|
field_classes = _field_list_cache
|
||||||
|
|
||||||
|
queryset_classes = ('OperationError',)
|
||||||
|
deref_classes = ('DeReference',)
|
||||||
|
|
||||||
|
if cls_name == 'BaseDocument':
|
||||||
|
from mongoengine.base import document as module
|
||||||
|
import_classes = ['BaseDocument']
|
||||||
|
elif cls_name in doc_classes:
|
||||||
|
from mongoengine import document as module
|
||||||
|
import_classes = doc_classes
|
||||||
|
elif cls_name in field_classes:
|
||||||
|
from mongoengine import fields as module
|
||||||
|
import_classes = field_classes
|
||||||
|
elif cls_name in queryset_classes:
|
||||||
|
from mongoengine import queryset as module
|
||||||
|
import_classes = queryset_classes
|
||||||
|
elif cls_name in deref_classes:
|
||||||
|
from mongoengine import dereference as module
|
||||||
|
import_classes = deref_classes
|
||||||
|
else:
|
||||||
|
raise ValueError('No import set for: ' % cls_name)
|
||||||
|
|
||||||
|
for cls in import_classes:
|
||||||
|
_class_registry_cache[cls] = getattr(module, cls)
|
||||||
|
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
@@ -1,15 +1,25 @@
|
|||||||
import pymongo
|
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||||
from pymongo import Connection, ReplicaSetConnection, uri_parser
|
import six
|
||||||
|
|
||||||
|
from mongoengine.python_support import IS_PYMONGO_3
|
||||||
|
|
||||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
|
||||||
'DEFAULT_CONNECTION_NAME']
|
'DEFAULT_CONNECTION_NAME']
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONNECTION_NAME = 'default'
|
DEFAULT_CONNECTION_NAME = 'default'
|
||||||
|
|
||||||
|
if IS_PYMONGO_3:
|
||||||
|
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||||
|
else:
|
||||||
|
from pymongo import MongoReplicaSetClient
|
||||||
|
READ_PREFERENCE = False
|
||||||
|
|
||||||
class ConnectionError(Exception):
|
|
||||||
|
class MongoEngineConnectionError(Exception):
|
||||||
|
"""Error raised when the database connection can't be established or
|
||||||
|
when a connection with a requested alias can't be retrieved.
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -18,9 +28,12 @@ _connections = {}
|
|||||||
_dbs = {}
|
_dbs = {}
|
||||||
|
|
||||||
|
|
||||||
def register_connection(alias, name, host='localhost', port=27017,
|
def register_connection(alias, name=None, host=None, port=None,
|
||||||
is_slave=False, read_preference=False, slaves=None,
|
read_preference=READ_PREFERENCE,
|
||||||
username=None, password=None, **kwargs):
|
username=None, password=None,
|
||||||
|
authentication_source=None,
|
||||||
|
authentication_mechanism=None,
|
||||||
|
**kwargs):
|
||||||
"""Add a connection.
|
"""Add a connection.
|
||||||
|
|
||||||
:param alias: the name that will be used to refer to this connection
|
:param alias: the name that will be used to refer to this connection
|
||||||
@@ -28,120 +41,202 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
:param name: the name of the specific database to use
|
:param name: the name of the specific database to use
|
||||||
:param host: the host name of the :program:`mongod` instance to connect to
|
:param host: the host name of the :program:`mongod` instance to connect to
|
||||||
:param port: the port that the :program:`mongod` instance is running on
|
:param port: the port that the :program:`mongod` instance is running on
|
||||||
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
:param read_preference: The read preference for the collection
|
||||||
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
** Added pymongo 2.1
|
||||||
:param slaves: a list of aliases of slave connections; each of these must
|
|
||||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
|
||||||
:param username: username to authenticate with
|
:param username: username to authenticate with
|
||||||
:param password: password to authenticate with
|
:param password: password to authenticate with
|
||||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
:param authentication_source: database to authenticate against
|
||||||
|
:param authentication_mechanism: database authentication mechanisms.
|
||||||
|
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||||
|
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||||
|
:param is_mock: explicitly use mongomock for this connection
|
||||||
|
(can also be done by using `mongomock://` as db host prefix)
|
||||||
|
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||||
|
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||||
|
for pymongo's `MongoClient` for a full list.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.6 - added mongomock support
|
||||||
"""
|
"""
|
||||||
global _connection_settings
|
|
||||||
|
|
||||||
conn_settings = {
|
conn_settings = {
|
||||||
'name': name,
|
'name': name or 'test',
|
||||||
'host': host,
|
'host': host or 'localhost',
|
||||||
'port': port,
|
'port': port or 27017,
|
||||||
'is_slave': is_slave,
|
'read_preference': read_preference,
|
||||||
'slaves': slaves or [],
|
|
||||||
'username': username,
|
'username': username,
|
||||||
'password': password,
|
'password': password,
|
||||||
'read_preference': read_preference
|
'authentication_source': authentication_source,
|
||||||
|
'authentication_mechanism': authentication_mechanism
|
||||||
}
|
}
|
||||||
|
|
||||||
# Handle uri style connections
|
conn_host = conn_settings['host']
|
||||||
if "://" in host:
|
|
||||||
uri_dict = uri_parser.parse_uri(host)
|
# Host can be a list or a string, so if string, force to a list.
|
||||||
if uri_dict.get('database') is None:
|
if isinstance(conn_host, six.string_types):
|
||||||
raise ConnectionError("If using URI style connection include "\
|
conn_host = [conn_host]
|
||||||
"database name in string")
|
|
||||||
conn_settings.update({
|
resolved_hosts = []
|
||||||
'host': host,
|
for entity in conn_host:
|
||||||
'name': uri_dict.get('database'),
|
|
||||||
'username': uri_dict.get('username'),
|
# Handle Mongomock
|
||||||
'password': uri_dict.get('password'),
|
if entity.startswith('mongomock://'):
|
||||||
'read_preference': read_preference,
|
conn_settings['is_mock'] = True
|
||||||
})
|
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||||
if "replicaSet" in host:
|
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
|
||||||
conn_settings['replicaSet'] = True
|
|
||||||
|
# Handle URI style connections, only updating connection params which
|
||||||
|
# were explicitly specified in the URI.
|
||||||
|
elif '://' in entity:
|
||||||
|
uri_dict = uri_parser.parse_uri(entity)
|
||||||
|
resolved_hosts.append(entity)
|
||||||
|
|
||||||
|
if uri_dict.get('database'):
|
||||||
|
conn_settings['name'] = uri_dict.get('database')
|
||||||
|
|
||||||
|
for param in ('read_preference', 'username', 'password'):
|
||||||
|
if uri_dict.get(param):
|
||||||
|
conn_settings[param] = uri_dict[param]
|
||||||
|
|
||||||
|
uri_options = uri_dict['options']
|
||||||
|
if 'replicaset' in uri_options:
|
||||||
|
conn_settings['replicaSet'] = uri_options['replicaset']
|
||||||
|
if 'authsource' in uri_options:
|
||||||
|
conn_settings['authentication_source'] = uri_options['authsource']
|
||||||
|
if 'authmechanism' in uri_options:
|
||||||
|
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||||
|
else:
|
||||||
|
resolved_hosts.append(entity)
|
||||||
|
conn_settings['host'] = resolved_hosts
|
||||||
|
|
||||||
|
# Deprecated parameters that should not be passed on
|
||||||
|
kwargs.pop('slaves', None)
|
||||||
|
kwargs.pop('is_slave', None)
|
||||||
|
|
||||||
|
conn_settings.update(kwargs)
|
||||||
_connection_settings[alias] = conn_settings
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
global _connections
|
"""Close the connection with a given alias."""
|
||||||
global _dbs
|
|
||||||
|
|
||||||
if alias in _connections:
|
if alias in _connections:
|
||||||
get_connection(alias=alias).disconnect()
|
get_connection(alias=alias).close()
|
||||||
del _connections[alias]
|
del _connections[alias]
|
||||||
if alias in _dbs:
|
if alias in _dbs:
|
||||||
del _dbs[alias]
|
del _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
global _connections
|
"""Return a connection with a given alias."""
|
||||||
|
|
||||||
# Connect to the database if not already connected
|
# Connect to the database if not already connected
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
if alias not in _connections:
|
# If the requested alias already exists in the _connections list, return
|
||||||
|
# it immediately.
|
||||||
|
if alias in _connections:
|
||||||
|
return _connections[alias]
|
||||||
|
|
||||||
|
# Validate that the requested alias exists in the _connection_settings.
|
||||||
|
# Raise MongoEngineConnectionError if it doesn't.
|
||||||
if alias not in _connection_settings:
|
if alias not in _connection_settings:
|
||||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
|
||||||
if alias == DEFAULT_CONNECTION_NAME:
|
if alias == DEFAULT_CONNECTION_NAME:
|
||||||
msg = 'You have not defined a default connection'
|
msg = 'You have not defined a default connection'
|
||||||
raise ConnectionError(msg)
|
|
||||||
conn_settings = _connection_settings[alias].copy()
|
|
||||||
|
|
||||||
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
|
||||||
conn_settings.pop('name', None)
|
|
||||||
conn_settings.pop('slaves', None)
|
|
||||||
conn_settings.pop('is_slave', None)
|
|
||||||
conn_settings.pop('username', None)
|
|
||||||
conn_settings.pop('password', None)
|
|
||||||
else:
|
else:
|
||||||
# Get all the slave connections
|
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||||
if 'slaves' in conn_settings:
|
raise MongoEngineConnectionError(msg)
|
||||||
slaves = []
|
|
||||||
for slave_alias in conn_settings['slaves']:
|
|
||||||
slaves.append(get_connection(slave_alias))
|
|
||||||
conn_settings['slaves'] = slaves
|
|
||||||
conn_settings.pop('read_preference', None)
|
|
||||||
|
|
||||||
connection_class = Connection
|
def _clean_settings(settings_dict):
|
||||||
if 'replicaSet' in conn_settings:
|
irrelevant_fields = set([
|
||||||
|
'name', 'username', 'password', 'authentication_source',
|
||||||
|
'authentication_mechanism'
|
||||||
|
])
|
||||||
|
return {
|
||||||
|
k: v for k, v in settings_dict.items()
|
||||||
|
if k not in irrelevant_fields
|
||||||
|
}
|
||||||
|
|
||||||
|
# Retrieve a copy of the connection settings associated with the requested
|
||||||
|
# alias and remove the database name and authentication info (we don't
|
||||||
|
# care about them at this point).
|
||||||
|
conn_settings = _clean_settings(_connection_settings[alias].copy())
|
||||||
|
|
||||||
|
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||||
|
is_mock = conn_settings.pop('is_mock', False)
|
||||||
|
if is_mock:
|
||||||
|
try:
|
||||||
|
import mongomock
|
||||||
|
except ImportError:
|
||||||
|
raise RuntimeError('You need mongomock installed to mock '
|
||||||
|
'MongoEngine.')
|
||||||
|
connection_class = mongomock.MongoClient
|
||||||
|
else:
|
||||||
|
connection_class = MongoClient
|
||||||
|
|
||||||
|
# For replica set connections with PyMongo 2.x, use
|
||||||
|
# MongoReplicaSetClient.
|
||||||
|
# TODO remove this once we stop supporting PyMongo 2.x.
|
||||||
|
if 'replicaSet' in conn_settings and not IS_PYMONGO_3:
|
||||||
|
connection_class = MongoReplicaSetClient
|
||||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||||
# Discard port since it can't be used on ReplicaSetConnection
|
|
||||||
conn_settings.pop('port', None)
|
|
||||||
# Discard replicaSet if not base string
|
|
||||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
|
||||||
conn_settings.pop('replicaSet', None)
|
|
||||||
connection_class = ReplicaSetConnection
|
|
||||||
|
|
||||||
|
# hosts_or_uri has to be a string, so if 'host' was provided
|
||||||
|
# as a list, join its parts and separate them by ','
|
||||||
|
if isinstance(conn_settings['hosts_or_uri'], list):
|
||||||
|
conn_settings['hosts_or_uri'] = ','.join(
|
||||||
|
conn_settings['hosts_or_uri'])
|
||||||
|
|
||||||
|
# Discard port since it can't be used on MongoReplicaSetClient
|
||||||
|
conn_settings.pop('port', None)
|
||||||
|
|
||||||
|
# Iterate over all of the connection settings and if a connection with
|
||||||
|
# the same parameters is already established, use it instead of creating
|
||||||
|
# a new one.
|
||||||
|
existing_connection = None
|
||||||
|
connection_settings_iterator = (
|
||||||
|
(db_alias, settings.copy())
|
||||||
|
for db_alias, settings in _connection_settings.items()
|
||||||
|
)
|
||||||
|
for db_alias, connection_settings in connection_settings_iterator:
|
||||||
|
connection_settings = _clean_settings(connection_settings)
|
||||||
|
if conn_settings == connection_settings and _connections.get(db_alias):
|
||||||
|
existing_connection = _connections[db_alias]
|
||||||
|
break
|
||||||
|
|
||||||
|
# If an existing connection was found, assign it to the new alias
|
||||||
|
if existing_connection:
|
||||||
|
_connections[alias] = existing_connection
|
||||||
|
else:
|
||||||
|
# Otherwise, create the new connection for this alias. Raise
|
||||||
|
# MongoEngineConnectionError if it can't be established.
|
||||||
try:
|
try:
|
||||||
_connections[alias] = connection_class(**conn_settings)
|
_connections[alias] = connection_class(**conn_settings)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
raise MongoEngineConnectionError(
|
||||||
|
'Cannot connect to database %s :\n%s' % (alias, e))
|
||||||
|
|
||||||
return _connections[alias]
|
return _connections[alias]
|
||||||
|
|
||||||
|
|
||||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
global _dbs
|
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
if alias not in _dbs:
|
if alias not in _dbs:
|
||||||
conn = get_connection(alias)
|
conn = get_connection(alias)
|
||||||
conn_settings = _connection_settings[alias]
|
conn_settings = _connection_settings[alias]
|
||||||
_dbs[alias] = conn[conn_settings['name']]
|
db = conn[conn_settings['name']]
|
||||||
|
auth_kwargs = {'source': conn_settings['authentication_source']}
|
||||||
|
if conn_settings['authentication_mechanism'] is not None:
|
||||||
|
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
|
||||||
# Authenticate if necessary
|
# Authenticate if necessary
|
||||||
if conn_settings['username'] and conn_settings['password']:
|
if conn_settings['username'] and (conn_settings['password'] or
|
||||||
_dbs[alias].authenticate(conn_settings['username'],
|
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
|
||||||
conn_settings['password'])
|
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
|
||||||
|
_dbs[alias] = db
|
||||||
return _dbs[alias]
|
return _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||||
"""Connect to the database specified by the 'db' argument.
|
"""Connect to the database specified by the 'db' argument.
|
||||||
|
|
||||||
Connection settings may be provided here as well if the database is not
|
Connection settings may be provided here as well if the database is not
|
||||||
@@ -151,14 +246,17 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
|||||||
Multiple databases are supported by using aliases. Provide a separate
|
Multiple databases are supported by using aliases. Provide a separate
|
||||||
`alias` to connect to a different instance of :program:`mongod`.
|
`alias` to connect to a different instance of :program:`mongod`.
|
||||||
|
|
||||||
|
See the docstring for `register_connection` for more details about all
|
||||||
|
supported kwargs.
|
||||||
|
|
||||||
.. versionchanged:: 0.6 - added multiple database support.
|
.. versionchanged:: 0.6 - added multiple database support.
|
||||||
"""
|
"""
|
||||||
global _connections
|
|
||||||
if alias not in _connections:
|
if alias not in _connections:
|
||||||
register_connection(alias, db, **kwargs)
|
register_connection(alias, db, **kwargs)
|
||||||
|
|
||||||
return get_connection(alias)
|
return get_connection(alias)
|
||||||
|
|
||||||
|
|
||||||
# Support old naming convention
|
# Support old naming convention
|
||||||
_get_connection = get_connection
|
_get_connection = get_connection
|
||||||
_get_db = get_db
|
_get_db = get_db
|
||||||
|
|||||||
217
mongoengine/context_managers.py
Normal file
217
mongoengine/context_managers.py
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||||
|
'no_sub_classes', 'query_counter')
|
||||||
|
|
||||||
|
|
||||||
|
class switch_db(object):
|
||||||
|
"""switch_db alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
# Register connections
|
||||||
|
register_connection('default', 'mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
|
Group(name='hello testdb!').save() # Saves in testdb-1
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, db_alias):
|
||||||
|
"""Construct the switch_db context manager
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param db_alias: the name of the specific database to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.collection = cls._get_collection()
|
||||||
|
self.db_alias = db_alias
|
||||||
|
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Change the db_alias and clear the cached collection."""
|
||||||
|
self.cls._meta['db_alias'] = self.db_alias
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
"""Reset the db_alias and collection."""
|
||||||
|
self.cls._meta['db_alias'] = self.ori_db_alias
|
||||||
|
self.cls._collection = self.collection
|
||||||
|
|
||||||
|
|
||||||
|
class switch_collection(object):
|
||||||
|
"""switch_collection alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
Group(name='hello testdb!').save() # Saves in group1 collection
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, collection_name):
|
||||||
|
"""Construct the switch_collection context manager.
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param collection_name: the name of the collection to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.ori_collection = cls._get_collection()
|
||||||
|
self.ori_get_collection_name = cls._get_collection_name
|
||||||
|
self.collection_name = collection_name
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Change the _get_collection_name and clear the cached collection."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
return self.collection_name
|
||||||
|
|
||||||
|
self.cls._get_collection_name = _get_collection_name
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
"""Reset the collection."""
|
||||||
|
self.cls._collection = self.ori_collection
|
||||||
|
self.cls._get_collection_name = self.ori_get_collection_name
|
||||||
|
|
||||||
|
|
||||||
|
class no_dereference(object):
|
||||||
|
"""no_dereference context manager.
|
||||||
|
|
||||||
|
Turns off all dereferencing in Documents for the duration of the context
|
||||||
|
manager::
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
Group.objects.find()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls):
|
||||||
|
"""Construct the no_dereference context manager.
|
||||||
|
|
||||||
|
:param cls: the class to turn dereferencing off on
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
ComplexBaseField = _import_class('ComplexBaseField')
|
||||||
|
|
||||||
|
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||||
|
if isinstance(v, (ReferenceField,
|
||||||
|
GenericReferenceField,
|
||||||
|
ComplexBaseField))]
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Change the objects default and _auto_dereference values."""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = False
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
"""Reset the default and _auto_dereference values."""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = True
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
|
class no_sub_classes(object):
|
||||||
|
"""no_sub_classes context manager.
|
||||||
|
|
||||||
|
Only returns instances of this class and no sub (inherited) classes::
|
||||||
|
|
||||||
|
with no_sub_classes(Group) as Group:
|
||||||
|
Group.objects.find()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls):
|
||||||
|
"""Construct the no_sub_classes context manager.
|
||||||
|
|
||||||
|
:param cls: the class to turn querying sub classes on
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Change the objects default and _auto_dereference values."""
|
||||||
|
self.cls._all_subclasses = self.cls._subclasses
|
||||||
|
self.cls._subclasses = (self.cls,)
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
"""Reset the default and _auto_dereference values."""
|
||||||
|
self.cls._subclasses = self.cls._all_subclasses
|
||||||
|
delattr(self.cls, '_all_subclasses')
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
|
class query_counter(object):
|
||||||
|
"""Query_counter context manager to get the number of queries."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Construct the query_counter."""
|
||||||
|
self.counter = 0
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""On every with block we need to drop the profile collection."""
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
self.db.system.profile.drop()
|
||||||
|
self.db.set_profiling_level(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
"""Reset the profiling level."""
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
|
||||||
|
def __eq__(self, value):
|
||||||
|
"""== Compare querycounter."""
|
||||||
|
counter = self._get_count()
|
||||||
|
return value == counter
|
||||||
|
|
||||||
|
def __ne__(self, value):
|
||||||
|
"""!= Compare querycounter."""
|
||||||
|
return not self.__eq__(value)
|
||||||
|
|
||||||
|
def __lt__(self, value):
|
||||||
|
"""< Compare querycounter."""
|
||||||
|
return self._get_count() < value
|
||||||
|
|
||||||
|
def __le__(self, value):
|
||||||
|
"""<= Compare querycounter."""
|
||||||
|
return self._get_count() <= value
|
||||||
|
|
||||||
|
def __gt__(self, value):
|
||||||
|
"""> Compare querycounter."""
|
||||||
|
return self._get_count() > value
|
||||||
|
|
||||||
|
def __ge__(self, value):
|
||||||
|
""">= Compare querycounter."""
|
||||||
|
return self._get_count() >= value
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
"""int representation."""
|
||||||
|
return self._get_count()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""repr query_counter as the number of queries."""
|
||||||
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
|
def _get_count(self):
|
||||||
|
"""Get the number of queries."""
|
||||||
|
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||||
|
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||||
|
self.counter += 1
|
||||||
|
return count
|
||||||
@@ -1,18 +1,20 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
from bson import DBRef, SON
|
from bson import DBRef, SON
|
||||||
|
import six
|
||||||
|
|
||||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
TopLevelDocumentMetaclass, get_document)
|
||||||
from connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from queryset import QuerySet
|
from mongoengine.document import Document, EmbeddedDocument
|
||||||
from document import Document
|
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||||
|
from mongoengine.queryset import QuerySet
|
||||||
|
|
||||||
|
|
||||||
class DeReference(object):
|
class DeReference(object):
|
||||||
|
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
"""
|
"""
|
||||||
Cheaply dereferences the items to a set depth.
|
Cheaply dereferences the items to a set depth.
|
||||||
Also handles the convertion of complex data types.
|
Also handles the conversion of complex data types.
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||||
:param max_depth: The maximum depth to recurse to
|
:param max_depth: The maximum depth to recurse to
|
||||||
@@ -22,7 +24,7 @@ class DeReference(object):
|
|||||||
:class:`~mongoengine.base.ComplexBaseField`
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
:param get: A boolean determining if being called by __get__
|
:param get: A boolean determining if being called by __get__
|
||||||
"""
|
"""
|
||||||
if items is None or isinstance(items, basestring):
|
if items is None or isinstance(items, six.string_types):
|
||||||
return items
|
return items
|
||||||
|
|
||||||
# cheapest way to convert a queryset to a list
|
# cheapest way to convert a queryset to a list
|
||||||
@@ -31,15 +33,45 @@ class DeReference(object):
|
|||||||
items = [i for i in items]
|
items = [i for i in items]
|
||||||
|
|
||||||
self.max_depth = max_depth
|
self.max_depth = max_depth
|
||||||
|
|
||||||
doc_type = None
|
doc_type = None
|
||||||
if instance and instance._fields:
|
|
||||||
doc_type = instance._fields[name].field
|
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
||||||
|
TopLevelDocumentMetaclass)):
|
||||||
|
doc_type = instance._fields.get(name)
|
||||||
|
while hasattr(doc_type, 'field'):
|
||||||
|
doc_type = doc_type.field
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
if isinstance(doc_type, ReferenceField):
|
||||||
|
field = doc_type
|
||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
if all([i.__class__ == doc_type for i in items]):
|
is_list = not hasattr(items, 'items')
|
||||||
|
|
||||||
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
|
elif not is_list and all(
|
||||||
|
[i.__class__ == doc_type for i in items.values()]):
|
||||||
|
return items
|
||||||
|
elif not field.dbref:
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
|
||||||
|
def _get_items(items):
|
||||||
|
new_items = []
|
||||||
|
for v in items:
|
||||||
|
if isinstance(v, list):
|
||||||
|
new_items.append(_get_items(v))
|
||||||
|
elif not isinstance(v, (DBRef, Document)):
|
||||||
|
new_items.append(field.to_python(v))
|
||||||
|
else:
|
||||||
|
new_items.append(v)
|
||||||
|
return new_items
|
||||||
|
|
||||||
|
items = _get_items(items)
|
||||||
|
else:
|
||||||
|
items = {
|
||||||
|
k: (v if isinstance(v, (DBRef, Document))
|
||||||
|
else field.to_python(v))
|
||||||
|
for k, v in items.iteritems()
|
||||||
|
}
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
self.reference_map = self._find_references(items)
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
@@ -57,36 +89,36 @@ class DeReference(object):
|
|||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
# Determine the iterator to use
|
# Determine the iterator to use
|
||||||
if not hasattr(items, 'items'):
|
if isinstance(items, dict):
|
||||||
iterator = enumerate(items)
|
iterator = items.values()
|
||||||
else:
|
else:
|
||||||
iterator = items.iteritems()
|
iterator = items
|
||||||
|
|
||||||
# Recursively find dbreferences
|
# Recursively find dbreferences
|
||||||
depth += 1
|
depth += 1
|
||||||
for k, item in iterator:
|
for item in iterator:
|
||||||
if hasattr(item, '_fields'):
|
if isinstance(item, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in item._fields.iteritems():
|
for field_name, field in item._fields.iteritems():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, DBRef):
|
||||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||||
references = self._find_references(v, depth)
|
references = self._find_references(v, depth)
|
||||||
for key, refs in references.iteritems():
|
for key, refs in references.iteritems():
|
||||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
key = field_cls
|
key = field_cls
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
elif isinstance(item, (DBRef)):
|
elif isinstance(item, DBRef):
|
||||||
reference_map.setdefault(item.collection, []).append(item.id)
|
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||||
references = self._find_references(item, depth - 1)
|
references = self._find_references(item, depth - 1)
|
||||||
for key, refs in references.iteritems():
|
for key, refs in references.iteritems():
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
|
|
||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
@@ -94,31 +126,38 @@ class DeReference(object):
|
|||||||
"""Fetch all references and convert to their document objects
|
"""Fetch all references and convert to their document objects
|
||||||
"""
|
"""
|
||||||
object_map = {}
|
object_map = {}
|
||||||
for col, dbrefs in self.reference_map.iteritems():
|
for collection, dbrefs in self.reference_map.iteritems():
|
||||||
keys = object_map.keys()
|
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||||
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
col_name = collection._get_collection_name()
|
||||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
refs = [dbref for dbref in dbrefs
|
||||||
references = col.objects.in_bulk(refs)
|
if (col_name, dbref) not in object_map]
|
||||||
|
references = collection.objects.in_bulk(refs)
|
||||||
for key, doc in references.iteritems():
|
for key, doc in references.iteritems():
|
||||||
object_map[key] = doc
|
object_map[(col_name, key)] = doc
|
||||||
else: # Generic reference: use the refs data to convert to document
|
else: # Generic reference: use the refs data to convert to document
|
||||||
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
continue
|
||||||
|
|
||||||
|
refs = [dbref for dbref in dbrefs
|
||||||
|
if (collection, dbref) not in object_map]
|
||||||
|
|
||||||
|
if doc_type:
|
||||||
|
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||||
for ref in references:
|
for ref in references:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[(collection, doc.id)] = doc
|
||||||
else:
|
else:
|
||||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||||
for ref in references:
|
for ref in references:
|
||||||
if '_cls' in ref:
|
if '_cls' in ref:
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
doc = get_document(ref['_cls'])._from_son(ref)
|
||||||
elif doc_type is None:
|
elif doc_type is None:
|
||||||
doc = get_document(
|
doc = get_document(
|
||||||
''.join(x.capitalize()
|
''.join(x.capitalize()
|
||||||
for x in col.split('_')))._from_son(ref)
|
for x in collection.split('_')))._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[(collection, doc.id)] = doc
|
||||||
return object_map
|
return object_map
|
||||||
|
|
||||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||||
@@ -144,16 +183,29 @@ class DeReference(object):
|
|||||||
|
|
||||||
if isinstance(items, (dict, SON)):
|
if isinstance(items, (dict, SON)):
|
||||||
if '_ref' in items:
|
if '_ref' in items:
|
||||||
return self.object_map.get(items['_ref'].id, items)
|
return self.object_map.get(
|
||||||
elif '_types' in items and '_cls' in items:
|
(items['_ref'].collection, items['_ref'].id), items)
|
||||||
|
elif '_cls' in items:
|
||||||
doc = get_document(items['_cls'])._from_son(items)
|
doc = get_document(items['_cls'])._from_son(items)
|
||||||
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
_cls = doc._data.pop('_cls', None)
|
||||||
|
del items['_cls']
|
||||||
|
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||||
|
if _cls is not None:
|
||||||
|
doc._data['_cls'] = _cls
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
if not hasattr(items, 'items'):
|
if not hasattr(items, 'items'):
|
||||||
is_list = True
|
is_list = True
|
||||||
|
list_type = BaseList
|
||||||
|
if isinstance(items, EmbeddedDocumentList):
|
||||||
|
list_type = EmbeddedDocumentList
|
||||||
|
as_tuple = isinstance(items, tuple)
|
||||||
iterator = enumerate(items)
|
iterator = enumerate(items)
|
||||||
data = []
|
data = []
|
||||||
|
elif isinstance(items, OrderedDict):
|
||||||
|
is_list = False
|
||||||
|
iterator = items.iteritems()
|
||||||
|
data = OrderedDict()
|
||||||
else:
|
else:
|
||||||
is_list = False
|
is_list = False
|
||||||
iterator = items.iteritems()
|
iterator = items.iteritems()
|
||||||
@@ -166,27 +218,29 @@ class DeReference(object):
|
|||||||
else:
|
else:
|
||||||
data[k] = v
|
data[k] = v
|
||||||
|
|
||||||
if k in self.object_map:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif hasattr(v, '_fields'):
|
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name in v._fields:
|
||||||
v = data[k]._data.get(field_name, None)
|
v = data[k]._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, DBRef):
|
||||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
|
(v.collection, v.id), v)
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
(v['_ref'].collection, v['_ref'].id), v)
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
|
||||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
|
||||||
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
item_name = '%s.%s' % (name, k) if name else name
|
||||||
|
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||||
elif hasattr(v, 'id'):
|
elif hasattr(v, 'id'):
|
||||||
data[k] = self.object_map.get(v.id, v)
|
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||||
|
|
||||||
if instance and name:
|
if instance and name:
|
||||||
if is_list:
|
if is_list:
|
||||||
return BaseList(data, instance, name)
|
return tuple(data) if as_tuple else list_type(data, instance, name)
|
||||||
return BaseDict(data, instance, name)
|
return BaseDict(data, instance, name)
|
||||||
depth += 1
|
depth += 1
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -1,158 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
try:
|
|
||||||
from django.contrib.auth.hashers import check_password, make_password
|
|
||||||
except ImportError:
|
|
||||||
"""Handle older versions of Django"""
|
|
||||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
|
||||||
|
|
||||||
def get_hexdigest(algorithm, salt, raw_password):
|
|
||||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
|
||||||
if algorithm == 'md5':
|
|
||||||
return md5_constructor(salt + raw_password).hexdigest()
|
|
||||||
elif algorithm == 'sha1':
|
|
||||||
return sha_constructor(salt + raw_password).hexdigest()
|
|
||||||
raise ValueError('Got unknown password algorithm type in password')
|
|
||||||
|
|
||||||
def check_password(raw_password, password):
|
|
||||||
algo, salt, hash = password.split('$')
|
|
||||||
return hash == get_hexdigest(algo, salt, raw_password)
|
|
||||||
|
|
||||||
def make_password(raw_password):
|
|
||||||
from random import random
|
|
||||||
algo = 'sha1'
|
|
||||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
|
||||||
hash = get_hexdigest(algo, salt, raw_password)
|
|
||||||
return '%s$%s$%s' % (algo, salt, hash)
|
|
||||||
|
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
"""A User document that aims to mirror most of the API specified by Django
|
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
|
||||||
"""
|
|
||||||
username = StringField(max_length=30, required=True,
|
|
||||||
verbose_name=_('username'),
|
|
||||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
|
||||||
|
|
||||||
first_name = StringField(max_length=30,
|
|
||||||
verbose_name=_('first name'))
|
|
||||||
|
|
||||||
last_name = StringField(max_length=30,
|
|
||||||
verbose_name=_('last name'))
|
|
||||||
email = EmailField(verbose_name=_('e-mail address'))
|
|
||||||
password = StringField(max_length=128,
|
|
||||||
verbose_name=_('password'),
|
|
||||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
|
||||||
is_staff = BooleanField(default=False,
|
|
||||||
verbose_name=_('staff status'),
|
|
||||||
help_text=_("Designates whether the user can log into this admin site."))
|
|
||||||
is_active = BooleanField(default=True,
|
|
||||||
verbose_name=_('active'),
|
|
||||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
|
||||||
is_superuser = BooleanField(default=False,
|
|
||||||
verbose_name=_('superuser status'),
|
|
||||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
|
||||||
last_login = DateTimeField(default=datetime.datetime.now,
|
|
||||||
verbose_name=_('last login'))
|
|
||||||
date_joined = DateTimeField(default=datetime.datetime.now,
|
|
||||||
verbose_name=_('date joined'))
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['username'], 'unique': True}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.username
|
|
||||||
|
|
||||||
def get_full_name(self):
|
|
||||||
"""Returns the users first and last names, separated by a space.
|
|
||||||
"""
|
|
||||||
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
|
||||||
return full_name.strip()
|
|
||||||
|
|
||||||
def is_anonymous(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def is_authenticated(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def set_password(self, raw_password):
|
|
||||||
"""Sets the user's password - always use this rather than directly
|
|
||||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
|
||||||
password is hashed before storage.
|
|
||||||
"""
|
|
||||||
self.password = make_password(raw_password)
|
|
||||||
self.save()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def check_password(self, raw_password):
|
|
||||||
"""Checks the user's password against a provided password - always use
|
|
||||||
this rather than directly comparing to
|
|
||||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
|
||||||
hashed before storage.
|
|
||||||
"""
|
|
||||||
return check_password(raw_password, self.password)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_user(cls, username, password, email=None):
|
|
||||||
"""Create (and save) a new user with the given username, password and
|
|
||||||
email address.
|
|
||||||
"""
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
|
|
||||||
# Normalize the address by lowercasing the domain part of the email
|
|
||||||
# address.
|
|
||||||
if email is not None:
|
|
||||||
try:
|
|
||||||
email_name, domain_part = email.strip().split('@', 1)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
email = '@'.join([email_name, domain_part.lower()])
|
|
||||||
|
|
||||||
user = cls(username=username, email=email, date_joined=now)
|
|
||||||
user.set_password(password)
|
|
||||||
user.save()
|
|
||||||
return user
|
|
||||||
|
|
||||||
def get_and_delete_messages(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class MongoEngineBackend(object):
|
|
||||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
|
||||||
"""
|
|
||||||
|
|
||||||
supports_object_permissions = False
|
|
||||||
supports_anonymous_user = False
|
|
||||||
supports_inactive_user = False
|
|
||||||
|
|
||||||
def authenticate(self, username=None, password=None):
|
|
||||||
user = User.objects(username=username).first()
|
|
||||||
if user:
|
|
||||||
if password and user.check_password(password):
|
|
||||||
return user
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_user(self, user_id):
|
|
||||||
return User.objects.with_id(user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_user(userid):
|
|
||||||
"""Returns a User object from an id (User.id). Django's equivalent takes
|
|
||||||
request, but taking an id instead leaves it up to the developer to store
|
|
||||||
the id in any way they want (session, signed cookie, etc.)
|
|
||||||
"""
|
|
||||||
if not userid:
|
|
||||||
return AnonymousUser()
|
|
||||||
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
|
||||||
from django.core.exceptions import SuspiciousOperation
|
|
||||||
from django.utils.encoding import force_unicode
|
|
||||||
|
|
||||||
from mongoengine.document import Document
|
|
||||||
from mongoengine import fields
|
|
||||||
from mongoengine.queryset import OperationError
|
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
|
||||||
|
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
|
||||||
DEFAULT_CONNECTION_NAME)
|
|
||||||
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
|
||||||
session_data = fields.StringField()
|
|
||||||
expire_date = fields.DateTimeField()
|
|
||||||
|
|
||||||
meta = {'collection': 'django_session',
|
|
||||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
|
||||||
'allow_inheritance': False}
|
|
||||||
|
|
||||||
|
|
||||||
class SessionStore(SessionBase):
|
|
||||||
"""A MongoEngine-based session store for Django.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
try:
|
|
||||||
s = MongoSession.objects(session_key=self.session_key,
|
|
||||||
expire_date__gt=datetime.now())[0]
|
|
||||||
return self.decode(force_unicode(s.session_data))
|
|
||||||
except (IndexError, SuspiciousOperation):
|
|
||||||
self.create()
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def exists(self, session_key):
|
|
||||||
return bool(MongoSession.objects(session_key=session_key).first())
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
while True:
|
|
||||||
self._session_key = self._get_new_session_key()
|
|
||||||
try:
|
|
||||||
self.save(must_create=True)
|
|
||||||
except CreateError:
|
|
||||||
continue
|
|
||||||
self.modified = True
|
|
||||||
self._session_cache = {}
|
|
||||||
return
|
|
||||||
|
|
||||||
def save(self, must_create=False):
|
|
||||||
if self.session_key is None:
|
|
||||||
self.create()
|
|
||||||
s = MongoSession(session_key=self.session_key)
|
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
|
||||||
s.expire_date = self.get_expiry_date()
|
|
||||||
try:
|
|
||||||
s.save(force_insert=must_create, safe=True)
|
|
||||||
except OperationError:
|
|
||||||
if must_create:
|
|
||||||
raise CreateError
|
|
||||||
raise
|
|
||||||
|
|
||||||
def delete(self, session_key=None):
|
|
||||||
if session_key is None:
|
|
||||||
if self.session_key is None:
|
|
||||||
return
|
|
||||||
session_key = self.session_key
|
|
||||||
MongoSession.objects(session_key=session_key).delete()
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from django.http import Http404
|
|
||||||
from mongoengine.queryset import QuerySet
|
|
||||||
from mongoengine.base import BaseDocument
|
|
||||||
from mongoengine.base import ValidationError
|
|
||||||
|
|
||||||
def _get_queryset(cls):
|
|
||||||
"""Inspired by django.shortcuts.*"""
|
|
||||||
if isinstance(cls, QuerySet):
|
|
||||||
return cls
|
|
||||||
else:
|
|
||||||
return cls.objects
|
|
||||||
|
|
||||||
def get_document_or_404(cls, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Uses get() to return an document, or raises a Http404 exception if the document
|
|
||||||
does not exist.
|
|
||||||
|
|
||||||
cls may be a Document or QuerySet object. All other passed
|
|
||||||
arguments and keyword arguments are used in the get() query.
|
|
||||||
|
|
||||||
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
|
||||||
object is found.
|
|
||||||
|
|
||||||
Inspired by django.shortcuts.*
|
|
||||||
"""
|
|
||||||
queryset = _get_queryset(cls)
|
|
||||||
try:
|
|
||||||
return queryset.get(*args, **kwargs)
|
|
||||||
except (queryset._document.DoesNotExist, ValidationError):
|
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Uses filter() to return a list of documents, or raise a Http404 exception if
|
|
||||||
the list is empty.
|
|
||||||
|
|
||||||
cls may be a Document or QuerySet object. All other passed
|
|
||||||
arguments and keyword arguments are used in the filter() query.
|
|
||||||
|
|
||||||
Inspired by django.shortcuts.*
|
|
||||||
"""
|
|
||||||
queryset = _get_queryset(cls)
|
|
||||||
obj_list = list(queryset.filter(*args, **kwargs))
|
|
||||||
if not obj_list:
|
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
|
||||||
return obj_list
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
import os
|
|
||||||
import itertools
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.files.storage import Storage
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
|
||||||
|
|
||||||
|
|
||||||
class FileDocument(Document):
|
|
||||||
"""A document used to store a single file in GridFS.
|
|
||||||
"""
|
|
||||||
file = FileField()
|
|
||||||
|
|
||||||
|
|
||||||
class GridFSStorage(Storage):
|
|
||||||
"""A custom storage backend to store files in GridFS
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, base_url=None):
|
|
||||||
|
|
||||||
if base_url is None:
|
|
||||||
base_url = settings.MEDIA_URL
|
|
||||||
self.base_url = base_url
|
|
||||||
self.document = FileDocument
|
|
||||||
self.field = 'file'
|
|
||||||
|
|
||||||
def delete(self, name):
|
|
||||||
"""Deletes the specified file from the storage system.
|
|
||||||
"""
|
|
||||||
if self.exists(name):
|
|
||||||
doc = self.document.objects.first()
|
|
||||||
field = getattr(doc, self.field)
|
|
||||||
self._get_doc_with_name(name).delete() # Delete the FileField
|
|
||||||
field.delete() # Delete the FileDocument
|
|
||||||
|
|
||||||
def exists(self, name):
|
|
||||||
"""Returns True if a file referened by the given name already exists in the
|
|
||||||
storage system, or False if the name is available for a new file.
|
|
||||||
"""
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
field = getattr(doc, self.field)
|
|
||||||
return bool(field.name)
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def listdir(self, path=None):
|
|
||||||
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
|
||||||
the first item being directories, the second item being files.
|
|
||||||
"""
|
|
||||||
def name(doc):
|
|
||||||
return getattr(doc, self.field).name
|
|
||||||
docs = self.document.objects
|
|
||||||
return [], [name(d) for d in docs if name(d)]
|
|
||||||
|
|
||||||
def size(self, name):
|
|
||||||
"""Returns the total size, in bytes, of the file specified by name.
|
|
||||||
"""
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
return getattr(doc, self.field).length
|
|
||||||
else:
|
|
||||||
raise ValueError("No such file or directory: '%s'" % name)
|
|
||||||
|
|
||||||
def url(self, name):
|
|
||||||
"""Returns an absolute URL where the file's contents can be accessed
|
|
||||||
directly by a web browser.
|
|
||||||
"""
|
|
||||||
if self.base_url is None:
|
|
||||||
raise ValueError("This file is not accessible via a URL.")
|
|
||||||
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
|
||||||
|
|
||||||
def _get_doc_with_name(self, name):
|
|
||||||
"""Find the documents in the store with the given name
|
|
||||||
"""
|
|
||||||
docs = self.document.objects
|
|
||||||
doc = [d for d in docs if getattr(d, self.field).name == name]
|
|
||||||
if doc:
|
|
||||||
return doc[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _open(self, name, mode='rb'):
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
return getattr(doc, self.field)
|
|
||||||
else:
|
|
||||||
raise ValueError("No file found with the name '%s'." % name)
|
|
||||||
|
|
||||||
def get_available_name(self, name):
|
|
||||||
"""Returns a filename that's free on the target storage system, and
|
|
||||||
available for new content to be written to.
|
|
||||||
"""
|
|
||||||
file_root, file_ext = os.path.splitext(name)
|
|
||||||
# If the filename already exists, add an underscore and a number (before
|
|
||||||
# the file extension, if one exists) to the filename until the generated
|
|
||||||
# filename doesn't exist.
|
|
||||||
count = itertools.count(1)
|
|
||||||
while self.exists(name):
|
|
||||||
# file_ext includes the dot.
|
|
||||||
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
|
||||||
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _save(self, name, content):
|
|
||||||
doc = self.document()
|
|
||||||
getattr(doc, self.field).put(content, filename=name)
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
return name
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
#coding: utf-8
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
from mongoengine import connect
|
|
||||||
|
|
||||||
class MongoTestCase(TestCase):
|
|
||||||
"""
|
|
||||||
TestCase class that clear the collection between the tests
|
|
||||||
"""
|
|
||||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
|
||||||
def __init__(self, methodName='runtest'):
|
|
||||||
self.db = connect(self.db_name)
|
|
||||||
super(MongoTestCase, self).__init__(methodName)
|
|
||||||
|
|
||||||
def _post_teardown(self):
|
|
||||||
super(MongoTestCase, self)._post_teardown()
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if collection == 'system.indexes':
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
File diff suppressed because it is too large
Load Diff
145
mongoengine/errors.py
Normal file
145
mongoengine/errors.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||||
|
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||||
|
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||||
|
'ValidationError', 'SaveConditionError')
|
||||||
|
|
||||||
|
|
||||||
|
class NotRegistered(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDocumentError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LookUpError(AttributeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DoesNotExist(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultipleObjectsReturned(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidQueryError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OperationError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotUniqueError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SaveConditionError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FieldDoesNotExist(Exception):
|
||||||
|
"""Raised when trying to set a field
|
||||||
|
not declared in a :class:`~mongoengine.Document`
|
||||||
|
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
|
||||||
|
To avoid this behavior on data loading,
|
||||||
|
you should set the :attr:`strict` to ``False``
|
||||||
|
in the :attr:`meta` dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(AssertionError):
|
||||||
|
"""Validation exception.
|
||||||
|
|
||||||
|
May represent an error validating a field or a
|
||||||
|
document containing fields with validation errors.
|
||||||
|
|
||||||
|
:ivar errors: A dictionary of errors for fields within this
|
||||||
|
document or list, or None if the error is for an
|
||||||
|
individual field.
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors = {}
|
||||||
|
field_name = None
|
||||||
|
_message = None
|
||||||
|
|
||||||
|
def __init__(self, message='', **kwargs):
|
||||||
|
self.errors = kwargs.get('errors', {})
|
||||||
|
self.field_name = kwargs.get('field_name')
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return six.text_type(self.message)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
message = super(ValidationError, self).__getattribute__(name)
|
||||||
|
if name == 'message':
|
||||||
|
if self.field_name:
|
||||||
|
message = '%s' % message
|
||||||
|
if self.errors:
|
||||||
|
message = '%s(%s)' % (message, self._format_errors())
|
||||||
|
return message
|
||||||
|
|
||||||
|
def _get_message(self):
|
||||||
|
return self._message
|
||||||
|
|
||||||
|
def _set_message(self, message):
|
||||||
|
self._message = message
|
||||||
|
|
||||||
|
message = property(_get_message, _set_message)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
"""Returns a dictionary of all errors within a document
|
||||||
|
|
||||||
|
Keys are field names or list indices and values are the
|
||||||
|
validation error messages, or a nested dictionary of
|
||||||
|
errors for an embedded document or list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def build_dict(source):
|
||||||
|
errors_dict = {}
|
||||||
|
if not source:
|
||||||
|
return errors_dict
|
||||||
|
|
||||||
|
if isinstance(source, dict):
|
||||||
|
for field_name, error in source.iteritems():
|
||||||
|
errors_dict[field_name] = build_dict(error)
|
||||||
|
elif isinstance(source, ValidationError) and source.errors:
|
||||||
|
return build_dict(source.errors)
|
||||||
|
else:
|
||||||
|
return six.text_type(source)
|
||||||
|
|
||||||
|
return errors_dict
|
||||||
|
|
||||||
|
if not self.errors:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return build_dict(self.errors)
|
||||||
|
|
||||||
|
def _format_errors(self):
|
||||||
|
"""Returns a string listing all errors within a document"""
|
||||||
|
|
||||||
|
def generate_key(value, prefix=''):
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = ' '.join([generate_key(k) for k in value])
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
value = ' '.join(
|
||||||
|
[generate_key(v, k) for k, v in value.iteritems()])
|
||||||
|
|
||||||
|
results = '%s.%s' % (prefix, value) if prefix else value
|
||||||
|
return results
|
||||||
|
|
||||||
|
error_dict = defaultdict(list)
|
||||||
|
for k, v in self.to_dict().iteritems():
|
||||||
|
error_dict[generate_key(v)].append(k)
|
||||||
|
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])
|
||||||
File diff suppressed because it is too large
Load Diff
25
mongoengine/python_support.py
Normal file
25
mongoengine/python_support.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""
|
||||||
|
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
|
||||||
|
PyMongo v2.7 - v3.x support.
|
||||||
|
"""
|
||||||
|
import pymongo
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] < 3:
|
||||||
|
IS_PYMONGO_3 = False
|
||||||
|
else:
|
||||||
|
IS_PYMONGO_3 = True
|
||||||
|
|
||||||
|
|
||||||
|
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||||
|
StringIO = six.BytesIO
|
||||||
|
|
||||||
|
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||||
|
if not six.PY3:
|
||||||
|
try:
|
||||||
|
import cStringIO
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
StringIO = cStringIO.StringIO
|
||||||
File diff suppressed because it is too large
Load Diff
17
mongoengine/queryset/__init__.py
Normal file
17
mongoengine/queryset/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from mongoengine.errors import *
|
||||||
|
from mongoengine.queryset.field_list import *
|
||||||
|
from mongoengine.queryset.manager import *
|
||||||
|
from mongoengine.queryset.queryset import *
|
||||||
|
from mongoengine.queryset.transform import *
|
||||||
|
from mongoengine.queryset.visitor import *
|
||||||
|
|
||||||
|
# Expose just the public subset of all imported objects and constants.
|
||||||
|
__all__ = (
|
||||||
|
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
|
||||||
|
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
|
||||||
|
|
||||||
|
# Errors that might be related to a queryset, mostly here for backward
|
||||||
|
# compatibility
|
||||||
|
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
|
||||||
|
'NotUniqueError', 'OperationError',
|
||||||
|
)
|
||||||
1914
mongoengine/queryset/base.py
Normal file
1914
mongoengine/queryset/base.py
Normal file
File diff suppressed because it is too large
Load Diff
85
mongoengine/queryset/field_list.py
Normal file
85
mongoengine/queryset/field_list.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
__all__ = ('QueryFieldList',)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryFieldList(object):
|
||||||
|
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||||
|
ONLY = 1
|
||||||
|
EXCLUDE = 0
|
||||||
|
|
||||||
|
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
||||||
|
"""The QueryFieldList builder
|
||||||
|
|
||||||
|
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||||
|
:param value: How to handle the fields; either `ONLY` or `EXCLUDE`
|
||||||
|
:param always_include: Any fields to always_include eg `_cls`
|
||||||
|
:param _only_called: Has `.only()` been called? If so its a set of fields
|
||||||
|
otherwise it performs a union.
|
||||||
|
"""
|
||||||
|
self.value = value
|
||||||
|
self.fields = set(fields or [])
|
||||||
|
self.always_include = set(always_include or [])
|
||||||
|
self._id = None
|
||||||
|
self._only_called = _only_called
|
||||||
|
self.slice = {}
|
||||||
|
|
||||||
|
def __add__(self, f):
|
||||||
|
if isinstance(f.value, dict):
|
||||||
|
for field in f.fields:
|
||||||
|
self.slice[field] = f.value
|
||||||
|
if not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
elif not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
self.value = f.value
|
||||||
|
self.slice = {}
|
||||||
|
elif self.value is self.ONLY and f.value is self.ONLY:
|
||||||
|
self._clean_slice()
|
||||||
|
if self._only_called:
|
||||||
|
self.fields = self.fields.union(f.fields)
|
||||||
|
else:
|
||||||
|
self.fields = f.fields
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
|
||||||
|
self.fields = self.fields.union(f.fields)
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.ONLY and f.value is self.EXCLUDE:
|
||||||
|
self.fields -= f.fields
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.ONLY:
|
||||||
|
self.value = self.ONLY
|
||||||
|
self.fields = f.fields - self.fields
|
||||||
|
self._clean_slice()
|
||||||
|
|
||||||
|
if '_id' in f.fields:
|
||||||
|
self._id = f.value
|
||||||
|
|
||||||
|
if self.always_include:
|
||||||
|
if self.value is self.ONLY and self.fields:
|
||||||
|
if sorted(self.slice.keys()) != sorted(self.fields):
|
||||||
|
self.fields = self.fields.union(self.always_include)
|
||||||
|
else:
|
||||||
|
self.fields -= self.always_include
|
||||||
|
|
||||||
|
if getattr(f, '_only_called', False):
|
||||||
|
self._only_called = True
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.fields)
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
field_list = {field: self.value for field in self.fields}
|
||||||
|
if self.slice:
|
||||||
|
field_list.update(self.slice)
|
||||||
|
if self._id is not None:
|
||||||
|
field_list['_id'] = self._id
|
||||||
|
return field_list
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self.fields = set([])
|
||||||
|
self.slice = {}
|
||||||
|
self.value = self.ONLY
|
||||||
|
|
||||||
|
def _clean_slice(self):
|
||||||
|
if self.slice:
|
||||||
|
for field in set(self.slice.keys()) - self.fields:
|
||||||
|
del self.slice[field]
|
||||||
57
mongoengine/queryset/manager.py
Normal file
57
mongoengine/queryset/manager.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from functools import partial
|
||||||
|
from mongoengine.queryset.queryset import QuerySet
|
||||||
|
|
||||||
|
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetManager(object):
|
||||||
|
"""
|
||||||
|
The default QuerySet Manager.
|
||||||
|
|
||||||
|
Custom QuerySet Manager functions can extend this class and users can
|
||||||
|
add extra queryset functionality. Any custom manager methods must accept a
|
||||||
|
:class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||||
|
|
||||||
|
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||||
|
, probably the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
|
||||||
|
get_queryset = None
|
||||||
|
default = QuerySet
|
||||||
|
|
||||||
|
def __init__(self, queryset_func=None):
|
||||||
|
if queryset_func:
|
||||||
|
self.get_queryset = queryset_func
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for instantiating a new QuerySet object when
|
||||||
|
Document.objects is accessed.
|
||||||
|
"""
|
||||||
|
if instance is not None:
|
||||||
|
# Document object being used rather than a document class
|
||||||
|
return self
|
||||||
|
|
||||||
|
# owner is the document that contains the QuerySetManager
|
||||||
|
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||||
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
|
if self.get_queryset:
|
||||||
|
arg_count = self.get_queryset.func_code.co_argcount
|
||||||
|
if arg_count == 1:
|
||||||
|
queryset = self.get_queryset(queryset)
|
||||||
|
elif arg_count == 2:
|
||||||
|
queryset = self.get_queryset(owner, queryset)
|
||||||
|
else:
|
||||||
|
queryset = partial(self.get_queryset, owner, queryset)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
def queryset_manager(func):
|
||||||
|
"""Decorator that allows you to define custom QuerySet managers on
|
||||||
|
:class:`~mongoengine.Document` classes. The manager must be a function that
|
||||||
|
accepts a :class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
|
||||||
|
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
|
||||||
|
the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
return QuerySetManager(func)
|
||||||
193
mongoengine/queryset/queryset.py
Normal file
193
mongoengine/queryset/queryset.py
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
from mongoengine.errors import OperationError
|
||||||
|
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||||
|
NULLIFY, PULL)
|
||||||
|
|
||||||
|
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
||||||
|
'DENY', 'PULL')
|
||||||
|
|
||||||
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
|
REPR_OUTPUT_SIZE = 20
|
||||||
|
ITER_CHUNK_SIZE = 100
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySet(BaseQuerySet):
|
||||||
|
"""The default queryset, that builds queries and handles a set of results
|
||||||
|
returned from a query.
|
||||||
|
|
||||||
|
Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as
|
||||||
|
the results.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_has_more = True
|
||||||
|
_len = None
|
||||||
|
_result_cache = None
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Iteration utilises a results cache which iterates the cursor
|
||||||
|
in batches of ``ITER_CHUNK_SIZE``.
|
||||||
|
|
||||||
|
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
||||||
|
batch. Otherwise iterate the result_cache.
|
||||||
|
"""
|
||||||
|
self._iter = True
|
||||||
|
|
||||||
|
if self._has_more:
|
||||||
|
return self._iter_results()
|
||||||
|
|
||||||
|
# iterating over the cache.
|
||||||
|
return iter(self._result_cache)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
"""Since __len__ is called quite frequently (for example, as part of
|
||||||
|
list(qs)), we populate the result cache and cache the length.
|
||||||
|
"""
|
||||||
|
if self._len is not None:
|
||||||
|
return self._len
|
||||||
|
|
||||||
|
# Populate the result cache with *all* of the docs in the cursor
|
||||||
|
if self._has_more:
|
||||||
|
list(self._iter_results())
|
||||||
|
|
||||||
|
# Cache the length of the complete result cache and return it
|
||||||
|
self._len = len(self._result_cache)
|
||||||
|
return self._len
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Provide a string representation of the QuerySet"""
|
||||||
|
if self._iter:
|
||||||
|
return '.. queryset mid-iteration ..'
|
||||||
|
|
||||||
|
self._populate_cache()
|
||||||
|
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
||||||
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
|
data[-1] = '...(remaining elements truncated)...'
|
||||||
|
return repr(data)
|
||||||
|
|
||||||
|
def _iter_results(self):
|
||||||
|
"""A generator for iterating over the result cache.
|
||||||
|
|
||||||
|
Also populates the cache if there are more possible results to
|
||||||
|
yield. Raises StopIteration when there are no more results.
|
||||||
|
"""
|
||||||
|
if self._result_cache is None:
|
||||||
|
self._result_cache = []
|
||||||
|
|
||||||
|
pos = 0
|
||||||
|
while True:
|
||||||
|
|
||||||
|
# For all positions lower than the length of the current result
|
||||||
|
# cache, serve the docs straight from the cache w/o hitting the
|
||||||
|
# database.
|
||||||
|
# XXX it's VERY important to compute the len within the `while`
|
||||||
|
# condition because the result cache might expand mid-iteration
|
||||||
|
# (e.g. if we call len(qs) inside a loop that iterates over the
|
||||||
|
# queryset). Fortunately len(list) is O(1) in Python, so this
|
||||||
|
# doesn't cause performance issues.
|
||||||
|
while pos < len(self._result_cache):
|
||||||
|
yield self._result_cache[pos]
|
||||||
|
pos += 1
|
||||||
|
|
||||||
|
# Raise StopIteration if we already established there were no more
|
||||||
|
# docs in the db cursor.
|
||||||
|
if not self._has_more:
|
||||||
|
raise StopIteration
|
||||||
|
|
||||||
|
# Otherwise, populate more of the cache and repeat.
|
||||||
|
if len(self._result_cache) <= pos:
|
||||||
|
self._populate_cache()
|
||||||
|
|
||||||
|
def _populate_cache(self):
|
||||||
|
"""
|
||||||
|
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
|
||||||
|
(until the cursor is exhausted).
|
||||||
|
"""
|
||||||
|
if self._result_cache is None:
|
||||||
|
self._result_cache = []
|
||||||
|
|
||||||
|
# Skip populating the cache if we already established there are no
|
||||||
|
# more docs to pull from the database.
|
||||||
|
if not self._has_more:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||||
|
# the result cache.
|
||||||
|
try:
|
||||||
|
for _ in xrange(ITER_CHUNK_SIZE):
|
||||||
|
self._result_cache.append(self.next())
|
||||||
|
except StopIteration:
|
||||||
|
# Getting this exception means there are no more docs in the
|
||||||
|
# db cursor. Set _has_more to False so that we can use that
|
||||||
|
# information in other places.
|
||||||
|
self._has_more = False
|
||||||
|
|
||||||
|
def count(self, with_limit_and_skip=False):
|
||||||
|
"""Count the selected elements in the query.
|
||||||
|
|
||||||
|
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
||||||
|
:meth:`skip` that has been applied to this cursor into account when
|
||||||
|
getting the count
|
||||||
|
"""
|
||||||
|
if with_limit_and_skip is False:
|
||||||
|
return super(QuerySet, self).count(with_limit_and_skip)
|
||||||
|
|
||||||
|
if self._len is None:
|
||||||
|
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
||||||
|
|
||||||
|
return self._len
|
||||||
|
|
||||||
|
def no_cache(self):
|
||||||
|
"""Convert to a non-caching queryset
|
||||||
|
|
||||||
|
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||||
|
"""
|
||||||
|
if self._result_cache is not None:
|
||||||
|
raise OperationError('QuerySet already cached')
|
||||||
|
|
||||||
|
return self._clone_into(QuerySetNoCache(self._document,
|
||||||
|
self._collection))
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetNoCache(BaseQuerySet):
|
||||||
|
"""A non caching QuerySet"""
|
||||||
|
|
||||||
|
def cache(self):
|
||||||
|
"""Convert to a caching queryset
|
||||||
|
|
||||||
|
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||||
|
"""
|
||||||
|
return self._clone_into(QuerySet(self._document, self._collection))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Provides the string representation of the QuerySet
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||||
|
"""
|
||||||
|
if self._iter:
|
||||||
|
return '.. queryset mid-iteration ..'
|
||||||
|
|
||||||
|
data = []
|
||||||
|
for _ in xrange(REPR_OUTPUT_SIZE + 1):
|
||||||
|
try:
|
||||||
|
data.append(self.next())
|
||||||
|
except StopIteration:
|
||||||
|
break
|
||||||
|
|
||||||
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
|
data[-1] = '...(remaining elements truncated)...'
|
||||||
|
|
||||||
|
self.rewind()
|
||||||
|
return repr(data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
queryset = self
|
||||||
|
if queryset._iter:
|
||||||
|
queryset = self.clone()
|
||||||
|
queryset.rewind()
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetNoDeRef(QuerySet):
|
||||||
|
"""Special no_dereference QuerySet"""
|
||||||
|
|
||||||
|
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||||
|
return items
|
||||||
427
mongoengine/queryset/transform.py
Normal file
427
mongoengine/queryset/transform.py
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from bson import ObjectId, SON
|
||||||
|
from bson.dbref import DBRef
|
||||||
|
import pymongo
|
||||||
|
import six
|
||||||
|
|
||||||
|
from mongoengine.base import UPDATE_OPERATORS
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.connection import get_connection
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
from mongoengine.python_support import IS_PYMONGO_3
|
||||||
|
|
||||||
|
__all__ = ('query', 'update')
|
||||||
|
|
||||||
|
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||||
|
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||||
|
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||||
|
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||||
|
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||||
|
'geo_within_polygon', 'geo_within_center',
|
||||||
|
'geo_within_sphere', 'geo_intersects')
|
||||||
|
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||||
|
'istartswith', 'endswith', 'iendswith',
|
||||||
|
'exact', 'iexact')
|
||||||
|
CUSTOM_OPERATORS = ('match',)
|
||||||
|
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||||
|
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO make this less complex
|
||||||
|
def query(_doc_cls=None, **kwargs):
|
||||||
|
"""Transform a query from Django-style format to Mongo format."""
|
||||||
|
mongo_query = {}
|
||||||
|
merge_query = defaultdict(list)
|
||||||
|
for key, value in sorted(kwargs.items()):
|
||||||
|
if key == '__raw__':
|
||||||
|
mongo_query.update(value)
|
||||||
|
continue
|
||||||
|
|
||||||
|
parts = key.rsplit('__')
|
||||||
|
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||||
|
parts = [part for part in parts if not part.isdigit()]
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||||
|
op = parts.pop()
|
||||||
|
|
||||||
|
# Allow to escape operator-like field name by __
|
||||||
|
if len(parts) > 1 and parts[-1] == '':
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
|
negate = False
|
||||||
|
if len(parts) > 1 and parts[-1] == 'not':
|
||||||
|
parts.pop()
|
||||||
|
negate = True
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception as e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
CachedReferenceField = _import_class('CachedReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, six.string_types):
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
# is last and CachedReferenceField
|
||||||
|
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||||
|
parts.append('%s._id' % field.db_field)
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
|
||||||
|
if append_field:
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||||
|
singular_ops += STRING_OPERATORS
|
||||||
|
if op in singular_ops:
|
||||||
|
if isinstance(field, six.string_types):
|
||||||
|
if (op in STRING_OPERATORS and
|
||||||
|
isinstance(value, six.string_types)):
|
||||||
|
StringField = _import_class('StringField')
|
||||||
|
value = StringField.prepare_query_value(op, value)
|
||||||
|
else:
|
||||||
|
value = field
|
||||||
|
else:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
|
if isinstance(field, CachedReferenceField) and value:
|
||||||
|
value = value['_id']
|
||||||
|
|
||||||
|
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||||
|
# Raise an error if the in/nin/all/near param is not iterable. We need a
|
||||||
|
# special check for BaseDocument, because - although it's iterable - using
|
||||||
|
# it as such in the context of this method is most definitely a mistake.
|
||||||
|
BaseDocument = _import_class('BaseDocument')
|
||||||
|
if isinstance(value, BaseDocument):
|
||||||
|
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||||
|
"`near`-operators you can\'t use a "
|
||||||
|
"`Document`, you must wrap your object "
|
||||||
|
"in a list (object -> [object]).")
|
||||||
|
elif not hasattr(value, '__iter__'):
|
||||||
|
raise TypeError("The `in`, `nin`, `all`, or "
|
||||||
|
"`near`-operators must be applied to an "
|
||||||
|
"iterable (e.g. a list).")
|
||||||
|
else:
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
|
||||||
|
# If we're querying a GenericReferenceField, we need to alter the
|
||||||
|
# key depending on the value:
|
||||||
|
# * If the value is a DBRef, the key should be "field_name._ref".
|
||||||
|
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||||
|
if isinstance(field, GenericReferenceField):
|
||||||
|
if isinstance(value, DBRef):
|
||||||
|
parts[-1] += '._ref'
|
||||||
|
elif isinstance(value, ObjectId):
|
||||||
|
parts[-1] += '._ref.$id'
|
||||||
|
|
||||||
|
# if op and op not in COMPARISON_OPERATORS:
|
||||||
|
if op:
|
||||||
|
if op in GEO_OPERATORS:
|
||||||
|
value = _geo_operator(field, op, value)
|
||||||
|
elif op in ('match', 'elemMatch'):
|
||||||
|
ListField = _import_class('ListField')
|
||||||
|
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||||
|
if (
|
||||||
|
isinstance(value, dict) and
|
||||||
|
isinstance(field, ListField) and
|
||||||
|
isinstance(field.field, EmbeddedDocumentField)
|
||||||
|
):
|
||||||
|
value = query(field.field.document_type, **value)
|
||||||
|
else:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
value = {'$elemMatch': value}
|
||||||
|
elif op in CUSTOM_OPERATORS:
|
||||||
|
NotImplementedError('Custom method "%s" has not '
|
||||||
|
'been implemented' % op)
|
||||||
|
elif op not in STRING_OPERATORS:
|
||||||
|
value = {'$' + op: value}
|
||||||
|
|
||||||
|
if negate:
|
||||||
|
value = {'$not': value}
|
||||||
|
|
||||||
|
for i, part in indices:
|
||||||
|
parts.insert(i, part)
|
||||||
|
|
||||||
|
key = '.'.join(parts)
|
||||||
|
|
||||||
|
if op is None or key not in mongo_query:
|
||||||
|
mongo_query[key] = value
|
||||||
|
elif key in mongo_query:
|
||||||
|
if isinstance(mongo_query[key], dict):
|
||||||
|
mongo_query[key].update(value)
|
||||||
|
# $max/minDistance needs to come last - convert to SON
|
||||||
|
value_dict = mongo_query[key]
|
||||||
|
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||||
|
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||||
|
value_son = SON()
|
||||||
|
for k, v in value_dict.iteritems():
|
||||||
|
if k == '$maxDistance' or k == '$minDistance':
|
||||||
|
continue
|
||||||
|
value_son[k] = v
|
||||||
|
# Required for MongoDB >= 2.6, may fail when combining
|
||||||
|
# PyMongo 3+ and MongoDB < 2.6
|
||||||
|
near_embedded = False
|
||||||
|
for near_op in ('$near', '$nearSphere'):
|
||||||
|
if isinstance(value_dict.get(near_op), dict) and (
|
||||||
|
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
|
||||||
|
value_son[near_op] = SON(value_son[near_op])
|
||||||
|
if '$maxDistance' in value_dict:
|
||||||
|
value_son[near_op][
|
||||||
|
'$maxDistance'] = value_dict['$maxDistance']
|
||||||
|
if '$minDistance' in value_dict:
|
||||||
|
value_son[near_op][
|
||||||
|
'$minDistance'] = value_dict['$minDistance']
|
||||||
|
near_embedded = True
|
||||||
|
if not near_embedded:
|
||||||
|
if '$maxDistance' in value_dict:
|
||||||
|
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||||
|
if '$minDistance' in value_dict:
|
||||||
|
value_son['$minDistance'] = value_dict['$minDistance']
|
||||||
|
mongo_query[key] = value_son
|
||||||
|
else:
|
||||||
|
# Store for manually merging later
|
||||||
|
merge_query[key].append(value)
|
||||||
|
|
||||||
|
# The queryset has been filter in such a way we must manually merge
|
||||||
|
for k, v in merge_query.items():
|
||||||
|
merge_query[k].append(mongo_query[k])
|
||||||
|
del mongo_query[k]
|
||||||
|
if isinstance(v, list):
|
||||||
|
value = [{k: val} for val in v]
|
||||||
|
if '$and' in mongo_query.keys():
|
||||||
|
mongo_query['$and'].extend(value)
|
||||||
|
else:
|
||||||
|
mongo_query['$and'] = value
|
||||||
|
|
||||||
|
return mongo_query
|
||||||
|
|
||||||
|
|
||||||
|
def update(_doc_cls=None, **update):
|
||||||
|
"""Transform an update spec from Django-style format to Mongo
|
||||||
|
format.
|
||||||
|
"""
|
||||||
|
mongo_update = {}
|
||||||
|
for key, value in update.items():
|
||||||
|
if key == '__raw__':
|
||||||
|
mongo_update.update(value)
|
||||||
|
continue
|
||||||
|
parts = key.split('__')
|
||||||
|
# if there is no operator, default to 'set'
|
||||||
|
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||||
|
parts.insert(0, 'set')
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if parts[0] in UPDATE_OPERATORS:
|
||||||
|
op = parts.pop(0)
|
||||||
|
# Convert Pythonic names to Mongo equivalents
|
||||||
|
if op in ('push_all', 'pull_all'):
|
||||||
|
op = op.replace('_all', 'All')
|
||||||
|
elif op == 'dec':
|
||||||
|
# Support decrement by flipping a positive value's sign
|
||||||
|
# and using 'inc'
|
||||||
|
op = 'inc'
|
||||||
|
value = -value
|
||||||
|
elif op == 'add_to_set':
|
||||||
|
op = 'addToSet'
|
||||||
|
elif op == 'set_on_insert':
|
||||||
|
op = 'setOnInsert'
|
||||||
|
|
||||||
|
match = None
|
||||||
|
if parts[-1] in COMPARISON_OPERATORS:
|
||||||
|
match = parts.pop()
|
||||||
|
|
||||||
|
# Allow to escape operator-like field name by __
|
||||||
|
if len(parts) > 1 and parts[-1] == '':
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception as e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
appended_sub_field = False
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, six.string_types):
|
||||||
|
# Convert the S operator to $
|
||||||
|
if field == 'S':
|
||||||
|
field = '$'
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
if append_field:
|
||||||
|
appended_sub_field = False
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
if hasattr(field, 'field'):
|
||||||
|
cleaned_fields.append(field.field)
|
||||||
|
appended_sub_field = True
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
if appended_sub_field:
|
||||||
|
field = cleaned_fields[-2]
|
||||||
|
else:
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
GeoJsonBaseField = _import_class('GeoJsonBaseField')
|
||||||
|
if isinstance(field, GeoJsonBaseField):
|
||||||
|
value = field.to_mongo(value)
|
||||||
|
|
||||||
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
|
if field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op in ('pushAll', 'pullAll'):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op in ('addToSet', 'setOnInsert'):
|
||||||
|
if isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op == 'unset':
|
||||||
|
value = 1
|
||||||
|
|
||||||
|
if match:
|
||||||
|
match = '$' + match
|
||||||
|
value = {match: value}
|
||||||
|
|
||||||
|
key = '.'.join(parts)
|
||||||
|
|
||||||
|
if not op:
|
||||||
|
raise InvalidQueryError('Updates must supply an operation '
|
||||||
|
'eg: set__FIELD=value')
|
||||||
|
|
||||||
|
if 'pull' in op and '.' in key:
|
||||||
|
# Dot operators don't work on pull operations
|
||||||
|
# unless they point to a list field
|
||||||
|
# Otherwise it uses nested dict syntax
|
||||||
|
if op == 'pullAll':
|
||||||
|
raise InvalidQueryError('pullAll operations only support '
|
||||||
|
'a single field depth')
|
||||||
|
|
||||||
|
# Look for the last list field and use dot notation until there
|
||||||
|
field_classes = [c.__class__ for c in cleaned_fields]
|
||||||
|
field_classes.reverse()
|
||||||
|
ListField = _import_class('ListField')
|
||||||
|
if ListField in field_classes:
|
||||||
|
# Join all fields via dot notation to the last ListField
|
||||||
|
# Then process as normal
|
||||||
|
last_listField = len(
|
||||||
|
cleaned_fields) - field_classes.index(ListField)
|
||||||
|
key = '.'.join(parts[:last_listField])
|
||||||
|
parts = parts[last_listField:]
|
||||||
|
parts.insert(0, key)
|
||||||
|
|
||||||
|
parts.reverse()
|
||||||
|
for key in parts:
|
||||||
|
value = {key: value}
|
||||||
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
|
value = {key: {'$each': value}}
|
||||||
|
else:
|
||||||
|
value = {key: value}
|
||||||
|
key = '$' + op
|
||||||
|
|
||||||
|
if key not in mongo_update:
|
||||||
|
mongo_update[key] = value
|
||||||
|
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||||
|
mongo_update[key].update(value)
|
||||||
|
|
||||||
|
return mongo_update
|
||||||
|
|
||||||
|
|
||||||
|
def _geo_operator(field, op, value):
|
||||||
|
"""Helper to return the query for a given geo query."""
|
||||||
|
if op == 'max_distance':
|
||||||
|
value = {'$maxDistance': value}
|
||||||
|
elif op == 'min_distance':
|
||||||
|
value = {'$minDistance': value}
|
||||||
|
elif field._geo_index == pymongo.GEO2D:
|
||||||
|
if op == 'within_distance':
|
||||||
|
value = {'$within': {'$center': value}}
|
||||||
|
elif op == 'within_spherical_distance':
|
||||||
|
value = {'$within': {'$centerSphere': value}}
|
||||||
|
elif op == 'within_polygon':
|
||||||
|
value = {'$within': {'$polygon': value}}
|
||||||
|
elif op == 'near':
|
||||||
|
value = {'$near': value}
|
||||||
|
elif op == 'near_sphere':
|
||||||
|
value = {'$nearSphere': value}
|
||||||
|
elif op == 'within_box':
|
||||||
|
value = {'$within': {'$box': value}}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError('Geo method "%s" has not been '
|
||||||
|
'implemented for a GeoPointField' % op)
|
||||||
|
else:
|
||||||
|
if op == 'geo_within':
|
||||||
|
value = {'$geoWithin': _infer_geometry(value)}
|
||||||
|
elif op == 'geo_within_box':
|
||||||
|
value = {'$geoWithin': {'$box': value}}
|
||||||
|
elif op == 'geo_within_polygon':
|
||||||
|
value = {'$geoWithin': {'$polygon': value}}
|
||||||
|
elif op == 'geo_within_center':
|
||||||
|
value = {'$geoWithin': {'$center': value}}
|
||||||
|
elif op == 'geo_within_sphere':
|
||||||
|
value = {'$geoWithin': {'$centerSphere': value}}
|
||||||
|
elif op == 'geo_intersects':
|
||||||
|
value = {'$geoIntersects': _infer_geometry(value)}
|
||||||
|
elif op == 'near':
|
||||||
|
value = {'$near': _infer_geometry(value)}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(
|
||||||
|
'Geo method "%s" has not been implemented for a %s '
|
||||||
|
% (op, field._name)
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _infer_geometry(value):
|
||||||
|
"""Helper method that tries to infer the $geometry shape for a
|
||||||
|
given value.
|
||||||
|
"""
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if '$geometry' in value:
|
||||||
|
return value
|
||||||
|
elif 'coordinates' in value and 'type' in value:
|
||||||
|
return {'$geometry': value}
|
||||||
|
raise InvalidQueryError('Invalid $geometry dictionary should have '
|
||||||
|
'type and coordinates keys')
|
||||||
|
elif isinstance(value, (list, set)):
|
||||||
|
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||||
|
# TODO: should both TypeError and IndexError be alike interpreted?
|
||||||
|
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
value[0]
|
||||||
|
return {'$geometry': {'type': 'Point', 'coordinates': value}}
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||||
|
'dictionary or (nested) lists of coordinate(s)')
|
||||||
159
mongoengine/queryset/visitor.py
Normal file
159
mongoengine/queryset/visitor.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
import copy
|
||||||
|
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
|
__all__ = ('Q',)
|
||||||
|
|
||||||
|
|
||||||
|
class QNodeVisitor(object):
|
||||||
|
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
"""Called by QCombination objects.
|
||||||
|
"""
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
"""Called by (New)Q objects.
|
||||||
|
"""
|
||||||
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateQueryConditionsError(InvalidQueryError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SimplificationVisitor(QNodeVisitor):
|
||||||
|
"""Simplifies query trees by combining unnecessary 'and' connection nodes
|
||||||
|
into a single Q-object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
if combination.operation == combination.AND:
|
||||||
|
# The simplification only applies to 'simple' queries
|
||||||
|
if all(isinstance(node, Q) for node in combination.children):
|
||||||
|
queries = [n.query for n in combination.children]
|
||||||
|
try:
|
||||||
|
return Q(**self._query_conjunction(queries))
|
||||||
|
except DuplicateQueryConditionsError:
|
||||||
|
# Cannot be simplified
|
||||||
|
pass
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def _query_conjunction(self, queries):
|
||||||
|
"""Merges query dicts - effectively &ing them together.
|
||||||
|
"""
|
||||||
|
query_ops = set()
|
||||||
|
combined_query = {}
|
||||||
|
for query in queries:
|
||||||
|
ops = set(query.keys())
|
||||||
|
# Make sure that the same operation isn't applied more than once
|
||||||
|
# to a single field
|
||||||
|
intersection = ops.intersection(query_ops)
|
||||||
|
if intersection:
|
||||||
|
raise DuplicateQueryConditionsError()
|
||||||
|
|
||||||
|
query_ops.update(ops)
|
||||||
|
combined_query.update(copy.deepcopy(query))
|
||||||
|
return combined_query
|
||||||
|
|
||||||
|
|
||||||
|
class QueryCompilerVisitor(QNodeVisitor):
|
||||||
|
"""Compiles the nodes in a query tree to a PyMongo-compatible query
|
||||||
|
dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, document):
|
||||||
|
self.document = document
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
operator = '$and'
|
||||||
|
if combination.operation == combination.OR:
|
||||||
|
operator = '$or'
|
||||||
|
return {operator: combination.children}
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
return transform.query(self.document, **query.query)
|
||||||
|
|
||||||
|
|
||||||
|
class QNode(object):
|
||||||
|
"""Base class for nodes in query trees."""
|
||||||
|
|
||||||
|
AND = 0
|
||||||
|
OR = 1
|
||||||
|
|
||||||
|
def to_query(self, document):
|
||||||
|
query = self.accept(SimplificationVisitor())
|
||||||
|
query = query.accept(QueryCompilerVisitor(document))
|
||||||
|
return query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _combine(self, other, operation):
|
||||||
|
"""Combine this node with another node into a QCombination
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
if getattr(other, 'empty', True):
|
||||||
|
return self
|
||||||
|
|
||||||
|
if self.empty:
|
||||||
|
return other
|
||||||
|
|
||||||
|
return QCombination(operation, [self, other])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __or__(self, other):
|
||||||
|
return self._combine(other, self.OR)
|
||||||
|
|
||||||
|
def __and__(self, other):
|
||||||
|
return self._combine(other, self.AND)
|
||||||
|
|
||||||
|
|
||||||
|
class QCombination(QNode):
|
||||||
|
"""Represents the combination of several conditions by a given
|
||||||
|
logical operator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, operation, children):
|
||||||
|
self.operation = operation
|
||||||
|
self.children = []
|
||||||
|
for node in children:
|
||||||
|
# If the child is a combination of the same type, we can merge its
|
||||||
|
# children directly into this combinations children
|
||||||
|
if isinstance(node, QCombination) and node.operation == operation:
|
||||||
|
self.children += node.children
|
||||||
|
else:
|
||||||
|
self.children.append(node)
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
for i in range(len(self.children)):
|
||||||
|
if isinstance(self.children[i], QNode):
|
||||||
|
self.children[i] = self.children[i].accept(visitor)
|
||||||
|
|
||||||
|
return visitor.visit_combination(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.children)
|
||||||
|
|
||||||
|
|
||||||
|
class Q(QNode):
|
||||||
|
"""A simple query object, used in a query tree to build up more complex
|
||||||
|
query structures.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **query):
|
||||||
|
self.query = query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
return visitor.visit_query(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.query)
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
# -*- coding: utf-8 -*-
|
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||||
|
'post_save', 'pre_delete', 'post_delete')
|
||||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
|
||||||
'pre_delete', 'post_delete']
|
|
||||||
|
|
||||||
signals_available = False
|
signals_available = False
|
||||||
try:
|
try:
|
||||||
from blinker import Namespace
|
from blinker import Namespace
|
||||||
|
|
||||||
signals_available = True
|
signals_available = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
class Namespace(object):
|
class Namespace(object):
|
||||||
@@ -27,11 +26,13 @@ except ImportError:
|
|||||||
raise RuntimeError('signalling support is unavailable '
|
raise RuntimeError('signalling support is unavailable '
|
||||||
'because the blinker library is '
|
'because the blinker library is '
|
||||||
'not installed.')
|
'not installed.')
|
||||||
send = lambda *a, **kw: None
|
|
||||||
|
send = lambda *a, **kw: None # noqa
|
||||||
connect = disconnect = has_receivers_for = receivers_for = \
|
connect = disconnect = has_receivers_for = receivers_for = \
|
||||||
temporarily_connected_to = _fail
|
temporarily_connected_to = _fail
|
||||||
del _fail
|
del _fail
|
||||||
|
|
||||||
|
|
||||||
# the namespace for code signals. If you are not mongoengine code, do
|
# the namespace for code signals. If you are not mongoengine code, do
|
||||||
# not put signals in here. Create your own namespace instead.
|
# not put signals in here. Create your own namespace instead.
|
||||||
_signals = Namespace()
|
_signals = Namespace()
|
||||||
@@ -39,6 +40,7 @@ _signals = Namespace()
|
|||||||
pre_init = _signals.signal('pre_init')
|
pre_init = _signals.signal('pre_init')
|
||||||
post_init = _signals.signal('post_init')
|
post_init = _signals.signal('post_init')
|
||||||
pre_save = _signals.signal('pre_save')
|
pre_save = _signals.signal('pre_save')
|
||||||
|
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
||||||
post_save = _signals.signal('post_save')
|
post_save = _signals.signal('post_save')
|
||||||
pre_delete = _signals.signal('pre_delete')
|
pre_delete = _signals.signal('pre_delete')
|
||||||
post_delete = _signals.signal('post_delete')
|
post_delete = _signals.signal('post_delete')
|
||||||
|
|||||||
@@ -1,59 +0,0 @@
|
|||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class query_counter(object):
|
|
||||||
""" Query_counter contextmanager to get the number of queries. """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
""" Construct the query_counter. """
|
|
||||||
self.counter = 0
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
""" On every with block we need to drop the profile collection. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
self.db.system.profile.drop()
|
|
||||||
self.db.set_profiling_level(2)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
""" Reset the profiling level. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
|
|
||||||
def __eq__(self, value):
|
|
||||||
""" == Compare querycounter. """
|
|
||||||
return value == self._get_count()
|
|
||||||
|
|
||||||
def __ne__(self, value):
|
|
||||||
""" != Compare querycounter. """
|
|
||||||
return not self.__eq__(value)
|
|
||||||
|
|
||||||
def __lt__(self, value):
|
|
||||||
""" < Compare querycounter. """
|
|
||||||
return self._get_count() < value
|
|
||||||
|
|
||||||
def __le__(self, value):
|
|
||||||
""" <= Compare querycounter. """
|
|
||||||
return self._get_count() <= value
|
|
||||||
|
|
||||||
def __gt__(self, value):
|
|
||||||
""" > Compare querycounter. """
|
|
||||||
return self._get_count() > value
|
|
||||||
|
|
||||||
def __ge__(self, value):
|
|
||||||
""" >= Compare querycounter. """
|
|
||||||
return self._get_count() >= value
|
|
||||||
|
|
||||||
def __int__(self):
|
|
||||||
""" int representation. """
|
|
||||||
return self._get_count()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
""" repr query_counter as the number of queries. """
|
|
||||||
return u"%s" % self._get_count()
|
|
||||||
|
|
||||||
def _get_count(self):
|
|
||||||
""" Get the number of queries. """
|
|
||||||
count = self.db.system.profile.find().count() - self.counter
|
|
||||||
self.counter += 1
|
|
||||||
return count
|
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.6.7
|
Version: 0.8.7
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
@@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
|||||||
# %{python_sitearch}/*
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
||||||
@@ -1 +1,5 @@
|
|||||||
pymongo
|
nose
|
||||||
|
pymongo>=2.7.1
|
||||||
|
six==1.10.0
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
|||||||
11
setup.cfg
Normal file
11
setup.cfg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[nosetests]
|
||||||
|
verbosity=2
|
||||||
|
detailed-errors=1
|
||||||
|
tests=tests
|
||||||
|
cover-package=mongoengine
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore=E501,F401,F403,F405,I201
|
||||||
|
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||||
|
max-complexity=47
|
||||||
|
application-import-names=mongoengine,tests
|
||||||
68
setup.py
68
setup.py
@@ -1,27 +1,39 @@
|
|||||||
from setuptools import setup, find_packages
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
# Hack to silence atexit traceback in newer python versions
|
||||||
|
|
||||||
LONG_DESCRIPTION = None
|
|
||||||
try:
|
try:
|
||||||
LONG_DESCRIPTION = open('README.rst').read()
|
import multiprocessing
|
||||||
except:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
DESCRIPTION = (
|
||||||
|
'MongoEngine is a Python Object-Document '
|
||||||
|
'Mapper for working with MongoDB.'
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open('README.rst') as fin:
|
||||||
|
LONG_DESCRIPTION = fin.read()
|
||||||
|
except Exception:
|
||||||
|
LONG_DESCRIPTION = None
|
||||||
|
|
||||||
|
|
||||||
def get_version(version_tuple):
|
def get_version(version_tuple):
|
||||||
version = '%s.%s' % (version_tuple[0], version_tuple[1])
|
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||||
if version_tuple[2]:
|
return '0.10.7'.
|
||||||
version = '%s.%s' % (version, version_tuple[2])
|
"""
|
||||||
return version
|
return '.'.join(map(str, version_tuple))
|
||||||
|
|
||||||
|
|
||||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
# file is read
|
# file is read
|
||||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||||
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
|
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||||
|
|
||||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||||
print VERSION
|
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Development Status :: 4 - Beta',
|
'Development Status :: 4 - Beta',
|
||||||
@@ -29,25 +41,47 @@ CLASSIFIERS = [
|
|||||||
'License :: OSI Approved :: MIT License',
|
'License :: OSI Approved :: MIT License',
|
||||||
'Operating System :: OS Independent',
|
'Operating System :: OS Independent',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.3",
|
||||||
|
"Programming Language :: Python :: 3.4",
|
||||||
|
"Programming Language :: Python :: 3.5",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
'Topic :: Database',
|
'Topic :: Database',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
]
|
]
|
||||||
|
|
||||||
setup(name='mongoengine',
|
extra_opts = {
|
||||||
|
'packages': find_packages(exclude=['tests', 'tests.*']),
|
||||||
|
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
|
||||||
|
}
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
extra_opts['use_2to3'] = True
|
||||||
|
if 'test' in sys.argv or 'nosetests' in sys.argv:
|
||||||
|
extra_opts['packages'] = find_packages()
|
||||||
|
extra_opts['package_data'] = {
|
||||||
|
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
|
||||||
|
else:
|
||||||
|
extra_opts['tests_require'] += ['python-dateutil']
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
packages=find_packages(),
|
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Ross Lawley",
|
maintainer="Ross Lawley",
|
||||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||||
url='http://mongoengine.org/',
|
url='http://mongoengine.org/',
|
||||||
|
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||||
license='MIT',
|
license='MIT',
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
description=DESCRIPTION,
|
description=DESCRIPTION,
|
||||||
long_description=LONG_DESCRIPTION,
|
long_description=LONG_DESCRIPTION,
|
||||||
platforms=['any'],
|
platforms=['any'],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo>=2.7.1', 'six'],
|
||||||
test_suite='tests',
|
test_suite='nose.collector',
|
||||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -0,0 +1,4 @@
|
|||||||
|
from all_warnings import AllWarnings
|
||||||
|
from document import *
|
||||||
|
from queryset import *
|
||||||
|
from fields import *
|
||||||
|
|||||||
42
tests/all_warnings/__init__.py
Normal file
42
tests/all_warnings/__init__.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"""
|
||||||
|
This test has been put into a module. This is because it tests warnings that
|
||||||
|
only get triggered on first hit. This way we can ensure its imported into the
|
||||||
|
top level and called first by the test suite.
|
||||||
|
"""
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('AllWarnings', )
|
||||||
|
|
||||||
|
|
||||||
|
class AllWarnings(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.warning_list = []
|
||||||
|
self.showwarning_default = warnings.showwarning
|
||||||
|
warnings.showwarning = self.append_to_warning_list
|
||||||
|
|
||||||
|
def append_to_warning_list(self, message, category, *args):
|
||||||
|
self.warning_list.append({"message": message,
|
||||||
|
"category": category})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# restore default handling of warnings
|
||||||
|
warnings.showwarning = self.showwarning_default
|
||||||
|
|
||||||
|
def test_document_collection_syntax_warning(self):
|
||||||
|
|
||||||
|
class NonAbstractBase(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class InheritedDocumentFailTest(NonAbstractBase):
|
||||||
|
meta = {'collection': 'fail'}
|
||||||
|
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(SyntaxWarning, warning["category"])
|
||||||
|
self.assertEqual('non_abstract_base',
|
||||||
|
InheritedDocumentFailTest._get_collection_name())
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
import mongoengine.connection
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
mongoengine.connection._connection_settings = {}
|
|
||||||
mongoengine.connection._connections = {}
|
|
||||||
mongoengine.connection._dbs = {}
|
|
||||||
|
|
||||||
def test_connect(self):
|
|
||||||
"""Ensure that the connect() method works properly.
|
|
||||||
"""
|
|
||||||
connect('mongoenginetest')
|
|
||||||
|
|
||||||
conn = get_connection()
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db()
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest')
|
|
||||||
|
|
||||||
connect('mongoenginetest2', alias='testdb')
|
|
||||||
conn = get_connection('testdb')
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
def test_connect_uri(self):
|
|
||||||
"""Ensure that the connect() method works properly with uri's
|
|
||||||
"""
|
|
||||||
c = connect(db='mongoenginetest', alias='admin')
|
|
||||||
c.admin.system.users.remove({})
|
|
||||||
c.mongoenginetest.system.users.remove({})
|
|
||||||
|
|
||||||
c.admin.add_user("admin", "password")
|
|
||||||
c.admin.authenticate("admin", "password")
|
|
||||||
c.mongoenginetest.add_user("username", "password")
|
|
||||||
|
|
||||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
|
||||||
|
|
||||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
|
||||||
|
|
||||||
conn = get_connection()
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db()
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest')
|
|
||||||
|
|
||||||
def test_register_connection(self):
|
|
||||||
"""Ensure that connections with different aliases may be registered.
|
|
||||||
"""
|
|
||||||
register_connection('testdb', 'mongoenginetest2')
|
|
||||||
|
|
||||||
self.assertRaises(ConnectionError, get_connection)
|
|
||||||
conn = get_connection('testdb')
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db('testdb')
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest2')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.django.shortcuts import get_document_or_404
|
|
||||||
|
|
||||||
from django.http import Http404
|
|
||||||
from django.template import Context, Template
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.paginator import Paginator
|
|
||||||
|
|
||||||
settings.configure()
|
|
||||||
|
|
||||||
from django.contrib.sessions.tests import SessionTestsMixin
|
|
||||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_order_by_in_django_template(self):
|
|
||||||
"""Ensure that QuerySets are properly ordered in Django template.
|
|
||||||
"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person(name="A", age=20).save()
|
|
||||||
self.Person(name="D", age=10).save()
|
|
||||||
self.Person(name="B", age=40).save()
|
|
||||||
self.Person(name="C", age=30).save()
|
|
||||||
|
|
||||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
|
||||||
|
|
||||||
d = {"ol": self.Person.objects.order_by('-name')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('+name')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('-age')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('+age')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
def test_q_object_filter_in_template(self):
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person(name="A", age=20).save()
|
|
||||||
self.Person(name="D", age=10).save()
|
|
||||||
self.Person(name="B", age=40).save()
|
|
||||||
self.Person(name="C", age=30).save()
|
|
||||||
|
|
||||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
|
||||||
|
|
||||||
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
|
||||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
|
||||||
|
|
||||||
# Check double rendering doesn't throw an error
|
|
||||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
|
||||||
|
|
||||||
def test_get_document_or_404(self):
|
|
||||||
p = self.Person(name="G404")
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
|
||||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
|
||||||
|
|
||||||
def test_pagination(self):
|
|
||||||
"""Ensure that Pagination works as expected
|
|
||||||
"""
|
|
||||||
class Page(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Page.drop_collection()
|
|
||||||
|
|
||||||
for i in xrange(1, 11):
|
|
||||||
Page(name=str(i)).save()
|
|
||||||
|
|
||||||
paginator = Paginator(Page.objects.all(), 2)
|
|
||||||
|
|
||||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
|
||||||
for p in paginator.page_range:
|
|
||||||
d = {"page": paginator.page(p)}
|
|
||||||
end = p * 2
|
|
||||||
start = end - 1
|
|
||||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
|
||||||
backend = SessionStore
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
MongoSession.drop_collection()
|
|
||||||
super(MongoDBSessionTest, self).setUp()
|
|
||||||
|
|
||||||
2958
tests/document.py
2958
tests/document.py
File diff suppressed because it is too large
Load Diff
13
tests/document/__init__.py
Normal file
13
tests/document/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from class_methods import *
|
||||||
|
from delta import *
|
||||||
|
from dynamic import *
|
||||||
|
from indexes import *
|
||||||
|
from inheritance import *
|
||||||
|
from instance import *
|
||||||
|
from json_serialisation import *
|
||||||
|
from validation import *
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
350
tests/document/class_methods.py
Normal file
350
tests/document/class_methods.py
Normal file
@@ -0,0 +1,350 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
from mongoengine.queryset import NULLIFY, PULL
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("ClassMethodsTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class ClassMethodsTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_definition(self):
|
||||||
|
"""Ensure that document may be defined using fields.
|
||||||
|
"""
|
||||||
|
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||||
|
sorted(self.Person._fields.keys()))
|
||||||
|
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||||
|
sorted([x.__class__.__name__ for x in
|
||||||
|
self.Person._fields.values()]))
|
||||||
|
|
||||||
|
def test_get_db(self):
|
||||||
|
"""Ensure that get_db returns the expected db.
|
||||||
|
"""
|
||||||
|
db = self.Person._get_db()
|
||||||
|
self.assertEqual(self.db, db)
|
||||||
|
|
||||||
|
def test_get_collection_name(self):
|
||||||
|
"""Ensure that get_collection_name returns the expected collection
|
||||||
|
name.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_get_collection(self):
|
||||||
|
"""Ensure that get_collection returns the expected collection.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
collection = self.Person._get_collection()
|
||||||
|
self.assertEqual(self.db[collection_name], collection)
|
||||||
|
|
||||||
|
def test_drop_collection(self):
|
||||||
|
"""Ensure that the collection may be dropped from the database.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.Person(name='Test').save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_register_delete_rule(self):
|
||||||
|
"""Ensure that register delete rule adds a delete rule to the document
|
||||||
|
meta.
|
||||||
|
"""
|
||||||
|
class Job(Document):
|
||||||
|
employee = ReferenceField(self.Person)
|
||||||
|
|
||||||
|
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||||
|
|
||||||
|
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||||
|
self.assertEqual(self.Person._meta['delete_rules'],
|
||||||
|
{(Job, 'employee'): NULLIFY})
|
||||||
|
|
||||||
|
def test_compare_indexes(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'title')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPost.ensure_index(['author', 'description'])
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
||||||
|
|
||||||
|
def test_compare_indexes_inheritance(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes for subclassed
|
||||||
|
documents (_cls included)
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
||||||
|
|
||||||
|
def test_compare_indexes_multiple_subclasses(self):
|
||||||
|
""" Ensure that compare_indexes behaves correctly if called from a
|
||||||
|
class, which base class has multiple subclasses
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithCustomField(BlogPost):
|
||||||
|
custom = DictField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'custom')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithCustomField.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||||
|
|
||||||
|
def test_list_indexes_inheritance(self):
|
||||||
|
""" ensure that all of the indexes are listed regardless of the super-
|
||||||
|
or sub-class that we call it from
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags')]
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||||
|
extra_text = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [('author', 'tags', 'extra_text')]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
BlogPostWithTags.list_indexes())
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
BlogPostWithTagsAndExtraText.list_indexes())
|
||||||
|
self.assertEqual(BlogPost.list_indexes(),
|
||||||
|
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||||
|
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||||
|
[(u'_id', 1)], [('_cls', 1)]])
|
||||||
|
|
||||||
|
def test_register_delete_rule_inherited(self):
|
||||||
|
|
||||||
|
class Vaccine(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
meta = {"indexes": ["name"]}
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
family = StringField(required=True)
|
||||||
|
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||||
|
|
||||||
|
class Cat(Animal):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||||
|
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||||
|
|
||||||
|
def test_collection_naming(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DefaultNamingTest(Document):
|
||||||
|
pass
|
||||||
|
self.assertEqual('default_naming_test',
|
||||||
|
DefaultNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class CustomNamingTest(Document):
|
||||||
|
meta = {'collection': 'pimp_my_collection'}
|
||||||
|
|
||||||
|
self.assertEqual('pimp_my_collection',
|
||||||
|
CustomNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class DynamicNamingTest(Document):
|
||||||
|
meta = {'collection': lambda c: "DYNAMO"}
|
||||||
|
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Use Abstract class to handle backwards compatibility
|
||||||
|
class BaseDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldNamingConvention(BaseDocument):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldnamingconvention',
|
||||||
|
OldNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class InheritedAbstractNamingTest(BaseDocument):
|
||||||
|
meta = {'collection': 'wibble'}
|
||||||
|
self.assertEqual('wibble',
|
||||||
|
InheritedAbstractNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Mixin tests
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldMixinNamingConvention(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldmixinnamingconvention',
|
||||||
|
OldMixinNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class BaseDocument(Document, BaseMixin):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class MyDocument(BaseDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||||
|
|
||||||
|
def test_custom_collection_name_operations(self):
|
||||||
|
"""Ensure that a collection with a specified name is used as expected.
|
||||||
|
"""
|
||||||
|
collection_name = 'personCollTest'
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'collection': collection_name}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
user_obj = self.db[collection_name].find_one()
|
||||||
|
self.assertEqual(user_obj['name'], "Test User")
|
||||||
|
|
||||||
|
user_obj = Person.objects[0]
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_collection_name_and_primary(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(primary_key=True)
|
||||||
|
meta = {'collection': 'app'}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
|
||||||
|
user_obj = Person.objects.first()
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
867
tests/document/delta.py
Normal file
867
tests/document/delta.py
Normal file
@@ -0,0 +1,867 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from bson import SON
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DeltaTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class DeltaTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_delta(self):
|
||||||
|
self.delta(Document)
|
||||||
|
self.delta(DynamicDocument)
|
||||||
|
|
||||||
|
def delta(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_recursive(self):
|
||||||
|
self.delta_recursive(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
id = StringField()
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.id = "010101"
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'id': "010101",
|
||||||
|
'string_field': 'hello',
|
||||||
|
'int_field': 1,
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'embedded_field.list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field.2.string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'list_field.2.string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field.2'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}
|
||||||
|
}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'] = embedded_1
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['dict_field.Embedded.string_field'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||||
|
|
||||||
|
def test_circular_reference_deltas(self):
|
||||||
|
self.circular_reference_deltas(Document, Document)
|
||||||
|
self.circular_reference_deltas(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner").save()
|
||||||
|
organization = Organization(name="company").save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
p = Person.objects[0].select_related()
|
||||||
|
o = Organization.objects.first()
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
|
||||||
|
def test_circular_reference_deltas_2(self):
|
||||||
|
self.circular_reference_deltas_2(Document, Document)
|
||||||
|
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization', dbref=dbref))
|
||||||
|
employer = ReferenceField('Organization', dbref=dbref)
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person', dbref=dbref)
|
||||||
|
employees = ListField(ReferenceField('Person', dbref=dbref))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner").save()
|
||||||
|
employee = Person(name="employee").save()
|
||||||
|
organization = Organization(name="company").save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
organization.employees.append(employee)
|
||||||
|
employee.employer = organization
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
p = Person.objects.get(name="owner")
|
||||||
|
e = Person.objects.get(name="employee")
|
||||||
|
o = Organization.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
self.assertEqual(e.employer, o)
|
||||||
|
|
||||||
|
return person, organization, employee
|
||||||
|
|
||||||
|
def test_delta_db_field(self):
|
||||||
|
self.delta_db_field(Document)
|
||||||
|
self.delta_db_field(DynamicDocument)
|
||||||
|
|
||||||
|
def delta_db_field(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||||
|
|
||||||
|
# Test it saves that data
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
doc.int_field = 1
|
||||||
|
doc.dict_field = {'hello': 'world'}
|
||||||
|
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.string_field, 'hello')
|
||||||
|
self.assertEqual(doc.int_field, 1)
|
||||||
|
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field(self):
|
||||||
|
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded,
|
||||||
|
db_field='db_embedded_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||||
|
{}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field.2'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field.2': {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field
|
||||||
|
self.assertEqual(doc._delta(), ({},
|
||||||
|
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_for_dynamic_documents(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="James", age=34)
|
||||||
|
self.assertEqual(p._delta(), (
|
||||||
|
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||||
|
|
||||||
|
p.doc = 123
|
||||||
|
del p.doc
|
||||||
|
self.assertEqual(p._delta(), (
|
||||||
|
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||||
|
|
||||||
|
p = Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p = Person.objects(age=22).get()
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p.save()
|
||||||
|
self.assertEqual(1, Person.objects(age=24).count())
|
||||||
|
|
||||||
|
def test_dynamic_delta(self):
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_with_dbref_true(self):
|
||||||
|
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
|
||||||
|
employee.name = 'test'
|
||||||
|
|
||||||
|
self.assertEqual(organization._get_changed_fields(), [])
|
||||||
|
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
self.assertEqual({}, removals)
|
||||||
|
self.assertEqual({}, updates)
|
||||||
|
|
||||||
|
organization.employees.append(person)
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
self.assertEqual({}, removals)
|
||||||
|
self.assertTrue('employees' in updates)
|
||||||
|
|
||||||
|
def test_delta_with_dbref_false(self):
|
||||||
|
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||||
|
employee.name = 'test'
|
||||||
|
|
||||||
|
self.assertEqual(organization._get_changed_fields(), [])
|
||||||
|
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
self.assertEqual({}, removals)
|
||||||
|
self.assertEqual({}, updates)
|
||||||
|
|
||||||
|
organization.employees.append(person)
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
self.assertEqual({}, removals)
|
||||||
|
self.assertTrue('employees' in updates)
|
||||||
|
|
||||||
|
def test_nested_nested_fields_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
subdoc = mydoc.subs['a']['b']
|
||||||
|
subdoc.name = 'bar'
|
||||||
|
|
||||||
|
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||||
|
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
self.assertEqual([], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
def test_lower_level_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc().save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
mydoc.subs['a'] = EmbeddedDoc()
|
||||||
|
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
subdoc = mydoc.subs['a']
|
||||||
|
subdoc.name = 'bar'
|
||||||
|
|
||||||
|
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||||
|
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||||
|
mydoc.save()
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
self.assertEqual([], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
def test_upper_level_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
subdoc = mydoc.subs['a']
|
||||||
|
subdoc.name = 'bar'
|
||||||
|
|
||||||
|
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||||
|
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
mydoc.subs['a'] = EmbeddedDoc()
|
||||||
|
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||||
|
mydoc.save()
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
self.assertEqual([], mydoc._get_changed_fields())
|
||||||
|
|
||||||
|
def test_referenced_object_changed_attributes(self):
|
||||||
|
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||||
|
|
||||||
|
class Organization(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
org = ReferenceField('Organization', required=True)
|
||||||
|
|
||||||
|
Organization.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
org1 = Organization(name='Org 1')
|
||||||
|
org1.save()
|
||||||
|
|
||||||
|
org2 = Organization(name='Org 2')
|
||||||
|
org2.save()
|
||||||
|
|
||||||
|
user = User(name='Fred', org=org1)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
org1.reload()
|
||||||
|
org2.reload()
|
||||||
|
user.reload()
|
||||||
|
self.assertEqual(org1.name, 'Org 1')
|
||||||
|
self.assertEqual(org2.name, 'Org 2')
|
||||||
|
self.assertEqual(user.name, 'Fred')
|
||||||
|
|
||||||
|
user.name = 'Harold'
|
||||||
|
user.org = org2
|
||||||
|
|
||||||
|
org2.name = 'New Org 2'
|
||||||
|
self.assertEqual(org2.name, 'New Org 2')
|
||||||
|
|
||||||
|
user.save()
|
||||||
|
org2.save()
|
||||||
|
|
||||||
|
self.assertEqual(org2.name, 'New Org 2')
|
||||||
|
org2.reload()
|
||||||
|
self.assertEqual(org2.name, 'New Org 2')
|
||||||
|
|
||||||
|
def test_delta_for_nested_map_fields(self):
|
||||||
|
class UInfoDocument(Document):
|
||||||
|
phone = StringField()
|
||||||
|
|
||||||
|
class EmbeddedRole(EmbeddedDocument):
|
||||||
|
type = StringField()
|
||||||
|
|
||||||
|
class EmbeddedUser(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||||
|
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||||
|
info = ReferenceField(UInfoDocument)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||||
|
num = IntField(default=-1)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
doc = Doc(num=1)
|
||||||
|
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
uinfo = UInfoDocument(phone="79089269066")
|
||||||
|
uinfo.save()
|
||||||
|
|
||||||
|
d = Doc.objects(num=1).first()
|
||||||
|
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||||
|
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||||
|
d.users["007"]["info"] = uinfo
|
||||||
|
delta = d._delta()
|
||||||
|
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||||
|
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||||
|
self.assertEqual(True, "users.007.info" in delta[0])
|
||||||
|
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||||
|
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||||
|
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
373
tests/document/dynamic.py
Normal file
373
tests/document/dynamic.py
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DynamicTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_simple_dynamic_document(self):
|
||||||
|
"""Ensures simple dynamic documents are saved correctly"""
|
||||||
|
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "James"
|
||||||
|
p.age = 34
|
||||||
|
|
||||||
|
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||||
|
"age": 34})
|
||||||
|
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||||
|
p.save()
|
||||||
|
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||||
|
|
||||||
|
self.assertEqual(self.Person.objects.first().age, 34)
|
||||||
|
|
||||||
|
# Confirm no changes to self.Person
|
||||||
|
self.assertFalse(hasattr(self.Person, 'age'))
|
||||||
|
|
||||||
|
def test_change_scope_of_variable(self):
|
||||||
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
|
||||||
|
def test_delete_dynamic_field(self):
|
||||||
|
"""Test deleting a dynamic field works"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
collection = self.db[self.Person._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||||
|
|
||||||
|
del p.misc
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertFalse(hasattr(p, 'misc'))
|
||||||
|
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||||
|
|
||||||
|
def test_reload_after_unsetting(self):
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
p.update(unset__misc=1)
|
||||||
|
p.reload()
|
||||||
|
|
||||||
|
def test_reload_dynamic_field(self):
|
||||||
|
self.Person.objects.delete()
|
||||||
|
p = self.Person.objects.create()
|
||||||
|
p.update(age=1)
|
||||||
|
|
||||||
|
self.assertEqual(len(p._data), 3)
|
||||||
|
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||||
|
|
||||||
|
p.reload()
|
||||||
|
self.assertEqual(len(p._data), 4)
|
||||||
|
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||||
|
|
||||||
|
def test_dynamic_document_queries(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||||
|
p = self.Person.objects(age=22)
|
||||||
|
p = p.get()
|
||||||
|
self.assertEqual(22, p.age)
|
||||||
|
|
||||||
|
def test_complex_dynamic_document_queries(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="test")
|
||||||
|
p.age = "ten"
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p1 = Person(name="test1")
|
||||||
|
p1.age = "less then ten and a half"
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="test2")
|
||||||
|
p2.age = 10
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||||
|
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||||
|
|
||||||
|
def test_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query dynamic document dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||||
|
|
||||||
|
def test_three_level_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query three level document dynamic fields"""
|
||||||
|
p = self.Person.objects.create(
|
||||||
|
misc={'hello': {'hello2': 'world'}}
|
||||||
|
)
|
||||||
|
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||||
|
|
||||||
|
def test_complex_embedded_document_validation(self):
|
||||||
|
"""Ensure embedded dynamic documents may be validated"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
content = URLField()
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||||
|
embedded_doc_1.validate()
|
||||||
|
|
||||||
|
embedded_doc_2 = Embedded(content='this is not a url')
|
||||||
|
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||||
|
|
||||||
|
doc.embedded_field_1 = embedded_doc_1
|
||||||
|
doc.embedded_field_2 = embedded_doc_2
|
||||||
|
self.assertRaises(ValidationError, doc.validate)
|
||||||
|
|
||||||
|
def test_inheritance(self):
|
||||||
|
"""Ensure that dynamic document plays nice with inheritance"""
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
Employee.drop_collection()
|
||||||
|
|
||||||
|
self.assertTrue('name' in Employee._fields)
|
||||||
|
self.assertTrue('salary' in Employee._fields)
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
self.Person._get_collection_name())
|
||||||
|
|
||||||
|
joe_bloggs = Employee()
|
||||||
|
joe_bloggs.name = "Joe Bloggs"
|
||||||
|
joe_bloggs.salary = 10
|
||||||
|
joe_bloggs.age = 20
|
||||||
|
joe_bloggs.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||||
|
self.assertEqual(1, Employee.objects(age=20).count())
|
||||||
|
|
||||||
|
joe_bloggs = self.Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||||
|
|
||||||
|
def test_embedded_dynamic_document(self):
|
||||||
|
"""Test dynamic embedded documents"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field,
|
||||||
|
['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_complex_embedded_documents(self):
|
||||||
|
"""Test complex dynamic embedded documents setups"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
embedded_1.list_field = ['1', 2, embedded_2]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2,
|
||||||
|
{"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
|
||||||
|
embedded_field = doc.embedded_field.list_field[2]
|
||||||
|
|
||||||
|
self.assertEqual(embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||||
|
{'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded(self):
|
||||||
|
"""Ensure embedded documents play nicely"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address.city = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.age = 35
|
||||||
|
person.save()
|
||||||
|
self.assertEqual(Person.objects.first().age, 35)
|
||||||
|
|
||||||
|
def test_dynamic_embedded_works_with_only(self):
|
||||||
|
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||||
|
|
||||||
|
class Address(DynamicEmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
address = EmbeddedDocumentField(Address)
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||||
|
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded_dict_access(self):
|
||||||
|
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.attrval = "This works"
|
||||||
|
|
||||||
|
person["phone"] = "555-1212" # but this should too
|
||||||
|
|
||||||
|
# Same thing two levels deep
|
||||||
|
person["address"]["city"] = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().phone, "555-1212")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person["age"] = 35
|
||||||
|
person.save()
|
||||||
|
self.assertEqual(Person.objects.first().age, 35)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
1020
tests/document/indexes.py
Normal file
1020
tests/document/indexes.py
Normal file
File diff suppressed because it is too large
Load Diff
512
tests/document/inheritance.py
Normal file
512
tests/document/inheritance.py
Normal file
@@ -0,0 +1,512 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from tests.fixtures import Base
|
||||||
|
|
||||||
|
from mongoengine import Document, EmbeddedDocument, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||||
|
IntField, StringField)
|
||||||
|
|
||||||
|
__all__ = ('InheritanceTest', )
|
||||||
|
|
||||||
|
|
||||||
|
class InheritanceTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_superclasses(self):
|
||||||
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_external_superclasses(self):
|
||||||
|
"""Ensure that the correct list of super classes is assembled when
|
||||||
|
importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||||
|
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',
|
||||||
|
'Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',
|
||||||
|
'Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_external_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled when importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||||
|
'Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',
|
||||||
|
'Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_dynamic_declarations(self):
|
||||||
|
"""Test that declaring an extra class updates meta data"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||||
|
|
||||||
|
# Test dynamically adding a class changes the meta data
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||||
|
|
||||||
|
# Test dynamically adding an inherited class changes the meta data
|
||||||
|
class Pike(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||||
|
'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||||
|
|
||||||
|
def test_inheritance_meta_data(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||||
|
sorted(Employee._fields.keys()))
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_inheritance_to_mongo_keys(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||||
|
sorted(Employee._fields.keys()))
|
||||||
|
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||||
|
['_cls', 'name', 'age'])
|
||||||
|
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||||
|
['_cls', 'name', 'age', 'salary'])
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_indexes_and_multiple_inheritance(self):
|
||||||
|
""" Ensure that all of the indexes are created for a document with
|
||||||
|
multiple inheritance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class A(Document):
|
||||||
|
a = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['a']
|
||||||
|
}
|
||||||
|
|
||||||
|
class B(Document):
|
||||||
|
b = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['b']
|
||||||
|
}
|
||||||
|
|
||||||
|
class C(A, B):
|
||||||
|
pass
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
B.drop_collection()
|
||||||
|
C.drop_collection()
|
||||||
|
|
||||||
|
C.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||||
|
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_polymorphic_queries(self):
|
||||||
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
Animal().save()
|
||||||
|
Fish().save()
|
||||||
|
Mammal().save()
|
||||||
|
Dog().save()
|
||||||
|
Human().save()
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Animal.objects]
|
||||||
|
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Mammal.objects]
|
||||||
|
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Human.objects]
|
||||||
|
self.assertEqual(classes, [Human])
|
||||||
|
|
||||||
|
def test_allow_inheritance(self):
|
||||||
|
"""Ensure that inheritance is disabled by default on simple
|
||||||
|
classes and that _cls will not be used.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
# can't inherit because Animal didn't explicitly allow inheritance
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
class Dog(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check that _cls etc aren't present on simple documents
|
||||||
|
dog = Animal(name='dog').save()
|
||||||
|
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||||
|
|
||||||
|
collection = self.db[Animal._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertFalse('_cls' in obj)
|
||||||
|
|
||||||
|
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||||
|
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
|
||||||
|
def test_allow_inheritance_abstract_document(self):
|
||||||
|
"""Ensure that abstract documents can set inheritance rules and that
|
||||||
|
_cls will not be used.
|
||||||
|
"""
|
||||||
|
class FinalDocument(Document):
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class Animal(FinalDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check that _cls isn't present in simple documents
|
||||||
|
doc = Animal(name='dog')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||||
|
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||||
|
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||||
|
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||||
|
|
||||||
|
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||||
|
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
city_id = IntField(primary_key=True)
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||||
|
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||||
|
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||||
|
|
||||||
|
def test_auto_id_vs_non_pk_id_field(self):
|
||||||
|
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
id = IntField()
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||||
|
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||||
|
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||||
|
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||||
|
berlin.save()
|
||||||
|
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||||
|
|
||||||
|
def test_abstract_document_creation_does_not_fail(self):
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
bkk = City(continent='asia')
|
||||||
|
self.assertEqual(None, bkk.pk)
|
||||||
|
# TODO: expected error? Shouldn't we create a new error type?
|
||||||
|
with self.assertRaises(KeyError):
|
||||||
|
setattr(bkk, 'pk', 1)
|
||||||
|
|
||||||
|
def test_allow_inheritance_embedded_document(self):
|
||||||
|
"""Ensure embedded documents respect inheritance."""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
class SpecialComment(Comment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertTrue('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_document_inheritance(self):
|
||||||
|
"""Ensure mutliple inheritance of abstract documents
|
||||||
|
"""
|
||||||
|
class DateCreatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class DateUpdatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||||
|
|
||||||
|
def test_abstract_documents(self):
|
||||||
|
"""Ensure that a document superclass can be marked as abstract
|
||||||
|
thereby not using it as the name for the collection."""
|
||||||
|
|
||||||
|
defaults = {'index_background': True,
|
||||||
|
'index_drop_dups': True,
|
||||||
|
'index_opts': {'hello': 'world'},
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'queryset_class': 'QuerySet',
|
||||||
|
'db_alias': 'myDB',
|
||||||
|
'shard_key': ('hello', 'world')}
|
||||||
|
|
||||||
|
meta_settings = {'abstract': True}
|
||||||
|
meta_settings.update(defaults)
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = meta_settings
|
||||||
|
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'abstract': True}
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
for k, v in defaults.iteritems():
|
||||||
|
for cls in [Animal, Fish, Guppy]:
|
||||||
|
self.assertEqual(cls._meta[k], v)
|
||||||
|
|
||||||
|
self.assertFalse('collection' in Animal._meta)
|
||||||
|
self.assertFalse('collection' in Mammal._meta)
|
||||||
|
|
||||||
|
self.assertEqual(Animal._get_collection_name(), None)
|
||||||
|
self.assertEqual(Mammal._get_collection_name(), None)
|
||||||
|
|
||||||
|
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Human._get_collection_name(), 'human')
|
||||||
|
|
||||||
|
# ensure that a subclass of a non-abstract class can't be abstract
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
class EvilHuman(Human):
|
||||||
|
evil = BooleanField(default=True)
|
||||||
|
meta = {'abstract': True}
|
||||||
|
|
||||||
|
def test_abstract_embedded_documents(self):
|
||||||
|
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||||
|
class A(EmbeddedDocument):
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
class B(A):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertFalse(B._meta["abstract"])
|
||||||
|
|
||||||
|
def test_inherited_collections(self):
|
||||||
|
"""Ensure that subclassed documents don't override parents'
|
||||||
|
collections
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Drink(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Drinker(Document):
|
||||||
|
drink = GenericReferenceField()
|
||||||
|
|
||||||
|
try:
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
|
class AcloholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
except SyntaxWarning:
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
class AlcoholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError("SyntaxWarning should be triggered")
|
||||||
|
|
||||||
|
warnings.resetwarnings()
|
||||||
|
|
||||||
|
Drink.drop_collection()
|
||||||
|
AlcoholicDrink.drop_collection()
|
||||||
|
Drinker.drop_collection()
|
||||||
|
|
||||||
|
red_bull = Drink(name='Red Bull')
|
||||||
|
red_bull.save()
|
||||||
|
|
||||||
|
programmer = Drinker(drink=red_bull)
|
||||||
|
programmer.save()
|
||||||
|
|
||||||
|
beer = AlcoholicDrink(name='Beer')
|
||||||
|
beer.save()
|
||||||
|
real_person = Drinker(drink=beer)
|
||||||
|
real_person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||||
|
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
3142
tests/document/instance.py
Normal file
3142
tests/document/instance.py
Normal file
File diff suppressed because it is too large
Load Diff
112
tests/document/json_serialisation.py
Normal file
112
tests/document/json_serialisation.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
import unittest
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from datetime import datetime
|
||||||
|
from bson import ObjectId
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("TestJson",)
|
||||||
|
|
||||||
|
|
||||||
|
class TestJson(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_json_names(self):
|
||||||
|
"""
|
||||||
|
Going to test reported issue:
|
||||||
|
https://github.com/MongoEngine/mongoengine/issues/654
|
||||||
|
where the reporter asks for the availability to perform
|
||||||
|
a to_json with the original class names and not the abreviated
|
||||||
|
mongodb document keys
|
||||||
|
"""
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField(db_field='s')
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField(db_field='s')
|
||||||
|
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||||
|
|
||||||
|
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||||
|
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
|
||||||
|
|
||||||
|
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||||
|
|
||||||
|
self.assertEqual( doc_json, expected_json)
|
||||||
|
|
||||||
|
def test_json_simple(self):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField()
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.string == other.string and
|
||||||
|
self.embedded_field == other.embedded_field)
|
||||||
|
|
||||||
|
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||||
|
|
||||||
|
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||||
|
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||||
|
self.assertEqual(doc_json, expected_json)
|
||||||
|
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
def test_json_complex(self):
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
||||||
|
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
||||||
|
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string_field = StringField(default='1')
|
||||||
|
int_field = IntField(default=1)
|
||||||
|
float_field = FloatField(default=1.1)
|
||||||
|
boolean_field = BooleanField(default=True)
|
||||||
|
datetime_field = DateTimeField(default=datetime.now)
|
||||||
|
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
list_field = ListField(default=lambda: [1, 2, 3])
|
||||||
|
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||||
|
objectid_field = ObjectIdField(default=ObjectId)
|
||||||
|
reference_field = ReferenceField(Simple, default=lambda:
|
||||||
|
Simple().save())
|
||||||
|
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||||
|
decimal_field = DecimalField(default=1.0)
|
||||||
|
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||||
|
url_field = URLField(default="http://mongoengine.org")
|
||||||
|
dynamic_field = DynamicField(default=1)
|
||||||
|
generic_reference_field = GenericReferenceField(
|
||||||
|
default=lambda: Simple().save())
|
||||||
|
sorted_list_field = SortedListField(IntField(),
|
||||||
|
default=lambda: [1, 2, 3])
|
||||||
|
email_field = EmailField(default="ross@example.com")
|
||||||
|
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||||
|
sequence_field = SequenceField()
|
||||||
|
uuid_field = UUIDField(default=uuid.uuid4)
|
||||||
|
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
import json
|
||||||
|
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||||
|
|
||||||
|
doc = Doc()
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
214
tests/document/validation.py
Normal file
214
tests/document/validation.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("ValidatorErrorTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidatorErrorTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_to_dict(self):
|
||||||
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
|
error = ValidationError('root')
|
||||||
|
self.assertEqual(error.to_dict(), {})
|
||||||
|
|
||||||
|
# 1st level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||||
|
|
||||||
|
# 2nd level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd'),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||||
|
|
||||||
|
# moar levels
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd', errors={
|
||||||
|
'3rd': ValidationError('bad 3rd', errors={
|
||||||
|
'4th': ValidationError('Inception'),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||||
|
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||||
|
'Inception')
|
||||||
|
|
||||||
|
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||||
|
|
||||||
|
def test_model_validation(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
User().validate()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.assertTrue("User:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'username': 'Field is required',
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
user = User(username="RossC0", name="Ross").save()
|
||||||
|
user.name = None
|
||||||
|
try:
|
||||||
|
user.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.assertTrue("User:RossC0" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
def test_fields_rewrite(self):
|
||||||
|
class BasePerson(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'abstract': True}
|
||||||
|
|
||||||
|
class Person(BasePerson):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
p = Person(age=15)
|
||||||
|
self.assertRaises(ValidationError, p.validate)
|
||||||
|
|
||||||
|
def test_embedded_document_validation(self):
|
||||||
|
"""Ensure that embedded documents may be validated.
|
||||||
|
"""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
date = DateTimeField()
|
||||||
|
content = StringField(required=True)
|
||||||
|
|
||||||
|
comment = Comment()
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.content = 'test'
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.date = 4
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.date = datetime.now()
|
||||||
|
comment.validate()
|
||||||
|
self.assertEqual(comment._instance, None)
|
||||||
|
|
||||||
|
def test_embedded_db_field_validate(self):
|
||||||
|
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
id = StringField(primary_key=True)
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||||
|
|
||||||
|
try:
|
||||||
|
Doc(id="bad").validate()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.assertTrue("SubDoc:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
Doc(id="test", e=SubDoc(val=15)).save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
keys = doc._data.keys()
|
||||||
|
self.assertEqual(2, len(keys))
|
||||||
|
self.assertTrue('e' in keys)
|
||||||
|
self.assertTrue('id' in keys)
|
||||||
|
|
||||||
|
doc.e.val = "OK"
|
||||||
|
try:
|
||||||
|
doc.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.assertTrue("Doc:test" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
def test_embedded_weakref(self):
|
||||||
|
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
d1 = Doc()
|
||||||
|
d2 = Doc()
|
||||||
|
|
||||||
|
s = SubDoc()
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, s.validate)
|
||||||
|
|
||||||
|
d1.e = s
|
||||||
|
d2.e = s
|
||||||
|
|
||||||
|
del d1
|
||||||
|
|
||||||
|
self.assertRaises(ValidationError, d2.validate)
|
||||||
|
|
||||||
|
def test_parent_reference_in_child_document(self):
|
||||||
|
"""
|
||||||
|
Test to ensure a ReferenceField can store a reference to a parent
|
||||||
|
class when inherited. Issue #954.
|
||||||
|
"""
|
||||||
|
class Parent(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
reference = ReferenceField('self')
|
||||||
|
|
||||||
|
class Child(Parent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
parent = Parent()
|
||||||
|
parent.save()
|
||||||
|
|
||||||
|
child = Child(reference=parent)
|
||||||
|
|
||||||
|
# Saving child should not raise a ValidationError
|
||||||
|
try:
|
||||||
|
child.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.fail("ValidationError raised: %s" % e.message)
|
||||||
|
|
||||||
|
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||||
|
"""
|
||||||
|
Test to ensure a ReferenceField can store a reference to a parent
|
||||||
|
class when inherited and when set via attribute. Issue #954.
|
||||||
|
"""
|
||||||
|
class Parent(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
reference = ReferenceField('self')
|
||||||
|
|
||||||
|
class Child(Parent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
parent = Parent()
|
||||||
|
parent.save()
|
||||||
|
|
||||||
|
child = Child()
|
||||||
|
child.reference = parent
|
||||||
|
|
||||||
|
# Saving the child should not raise a ValidationError
|
||||||
|
try:
|
||||||
|
child.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.fail("ValidationError raised: %s" % e.message)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -1,502 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_simple_dynamic_document(self):
|
|
||||||
"""Ensures simple dynamic documents are saved correctly"""
|
|
||||||
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "James"
|
|
||||||
p.age = 34
|
|
||||||
|
|
||||||
self.assertEquals(p.to_mongo(),
|
|
||||||
{"_types": ["Person"], "_cls": "Person",
|
|
||||||
"name": "James", "age": 34}
|
|
||||||
)
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(self.Person.objects.first().age, 34)
|
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
|
||||||
self.assertFalse(hasattr(self.Person, 'age'))
|
|
||||||
|
|
||||||
def test_dynamic_document_delta(self):
|
|
||||||
"""Ensures simple dynamic documents can delta correctly"""
|
|
||||||
p = self.Person(name="James", age=34)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
|
||||||
|
|
||||||
p.doc = 123
|
|
||||||
del(p.doc)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
|
||||||
"""Test deleting a dynamic field works"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
|
||||||
|
|
||||||
del(p.misc)
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertFalse(hasattr(p, 'misc'))
|
|
||||||
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
|
||||||
p = self.Person.objects(age=22)
|
|
||||||
p = p.get()
|
|
||||||
self.assertEquals(22, p.age)
|
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="test")
|
|
||||||
p.age = "ten"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1 = Person(name="test1")
|
|
||||||
p1.age = "less then ten and a half"
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="test2")
|
|
||||||
p2.age = 10
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
|
||||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
self.assertTrue('name' in Employee._fields)
|
|
||||||
self.assertTrue('salary' in Employee._fields)
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
self.Person._get_collection_name())
|
|
||||||
|
|
||||||
joe_bloggs = Employee()
|
|
||||||
joe_bloggs.name = "Joe Bloggs"
|
|
||||||
joe_bloggs.salary = 10
|
|
||||||
joe_bloggs.age = 20
|
|
||||||
joe_bloggs.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
|
||||||
self.assertEquals(1, Employee.objects(age=20).count())
|
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
|
||||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
|
||||||
|
|
||||||
def test_embedded_dynamic_document(self):
|
|
||||||
"""Test dynamic embedded documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
|
||||||
"""Test complex dynamic embedded documents setups"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
embedded_1.list_field = ['1', 2, embedded_2]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2,
|
|
||||||
{"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
|
||||||
|
|
||||||
self.assertEquals(embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p = self.Person.objects(age=22).get()
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
|
||||||
|
|
||||||
def test_delta(self):
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
|
||||||
"""Testing deltaing works with dynamic documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
'string_field': 'hello',
|
|
||||||
'int_field': 1,
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
|
||||||
embedded_delta.update({
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
})
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}]}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}
|
|
||||||
]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort()
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field = {'embedded': embedded_1}
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
|
||||||
|
|
||||||
def test_indexes(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
|
||||||
"""
|
|
||||||
class BlogPost(DynamicDocument):
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date',
|
|
||||||
('category', '-date')
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date', ('cat', 'date')
|
|
||||||
# NB: there is no index on _types by itself, since
|
|
||||||
# the indices on -date and tags will both contain
|
|
||||||
# _types as first element in the key
|
|
||||||
self.assertEqual(len(info), 3)
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(BlogPost.objects)
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
|
||||||
in info)
|
|
||||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
|
||||||
1906
tests/fields.py
1906
tests/fields.py
File diff suppressed because it is too large
Load Diff
3
tests/fields/__init__.py
Normal file
3
tests/fields/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from fields import *
|
||||||
|
from file_tests import *
|
||||||
|
from geo import *
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user