Compare commits
626 Commits
external-r
...
misleading
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a06e605e67 | ||
|
|
e86cf962e9 | ||
|
|
99a58d5c91 | ||
|
|
eecbb5ca90 | ||
|
|
0d01365751 | ||
|
|
f4a06ad65d | ||
|
|
05a22d5a54 | ||
|
|
2424ece0c5 | ||
|
|
2d02551d0a | ||
|
|
ac416aeeb3 | ||
|
|
d09af430e8 | ||
|
|
79454b5eed | ||
|
|
921c1fa412 | ||
|
|
1aba145bc6 | ||
|
|
290d9df3eb | ||
|
|
aa76ccdd25 | ||
|
|
abe8070c36 | ||
|
|
2d28c258fd | ||
|
|
1338839b52 | ||
|
|
058203a0ec | ||
|
|
8fdf664968 | ||
|
|
50555ec73e | ||
|
|
951a532a9f | ||
|
|
e940044603 | ||
|
|
babfbb0fcd | ||
|
|
bbed312bdd | ||
|
|
b593764ded | ||
|
|
483c840fc8 | ||
|
|
de80f0ccff | ||
|
|
d0b87f7f82 | ||
|
|
bf32d3c39a | ||
|
|
bc14f2cdaa | ||
|
|
06a21e038a | ||
|
|
4d5eba317e | ||
|
|
9170eea784 | ||
|
|
2769967e1e | ||
|
|
609f50d261 | ||
|
|
82f0eb1cbc | ||
|
|
b47669403b | ||
|
|
91899acfe5 | ||
|
|
ffedd33101 | ||
|
|
af292b0ec2 | ||
|
|
1ead7f9b2b | ||
|
|
5c91877b69 | ||
|
|
e57d834a0d | ||
|
|
0578cdb62e | ||
|
|
b661afba01 | ||
|
|
b1002dd4f9 | ||
|
|
8e69008699 | ||
|
|
f45552f8f8 | ||
|
|
a4fe091a51 | ||
|
|
216217e2c6 | ||
|
|
799775b3a7 | ||
|
|
ae0384df29 | ||
|
|
8f57279dc7 | ||
|
|
e8dbd12f22 | ||
|
|
ca230d28b4 | ||
|
|
c96065b187 | ||
|
|
2abcf4764d | ||
|
|
bb0b1e88ef | ||
|
|
63c9135184 | ||
|
|
7fac0ef961 | ||
|
|
5a2e268160 | ||
|
|
a4e4e8f440 | ||
|
|
b62ce947a6 | ||
|
|
9538662262 | ||
|
|
09d7ae4f80 | ||
|
|
d7ded366c7 | ||
|
|
09c77973a0 | ||
|
|
22f3c70234 | ||
|
|
6527b1386f | ||
|
|
baabf97acd | ||
|
|
97005aca66 | ||
|
|
6e8ea50c19 | ||
|
|
1fcd706e11 | ||
|
|
008bb19b0b | ||
|
|
023acab779 | ||
|
|
68e8584520 | ||
|
|
5d120ebca0 | ||
|
|
f91b89f723 | ||
|
|
1181b75e16 | ||
|
|
5f00b4f923 | ||
|
|
4c31193b82 | ||
|
|
17fc9d1886 | ||
|
|
d7285d43dd | ||
|
|
aa8a991d20 | ||
|
|
40ba51ac43 | ||
|
|
d20430a778 | ||
|
|
f08f749cd9 | ||
|
|
a6c04f4f9a | ||
|
|
15b6c1590f | ||
|
|
4a8985278d | ||
|
|
996618a495 | ||
|
|
1f02d5fbbd | ||
|
|
c58b9f00f0 | ||
|
|
f131b18cbe | ||
|
|
118a998138 | ||
|
|
7ad6f036e7 | ||
|
|
1d29b824a8 | ||
|
|
3caf2dce28 | ||
|
|
1fc5b954f2 | ||
|
|
31d99c0bd2 | ||
|
|
0ac59c67ea | ||
|
|
8e8c74c621 | ||
|
|
f996f3df74 | ||
|
|
9499c97e18 | ||
|
|
c1c81fc07b | ||
|
|
072e86a2f0 | ||
|
|
70d6e763b0 | ||
|
|
15f4d4fee6 | ||
|
|
82e28dec43 | ||
|
|
b407c0e6c6 | ||
|
|
27ea01ee05 | ||
|
|
7ed5829b2c | ||
|
|
5bf1dd55b1 | ||
|
|
36aebffcc0 | ||
|
|
84c42ed58c | ||
|
|
9634e44343 | ||
|
|
048a045966 | ||
|
|
a18c8c0eb4 | ||
|
|
5fb0f46e3f | ||
|
|
962997ed16 | ||
|
|
daca0ebc14 | ||
|
|
9ae8fe7c2d | ||
|
|
1907133f99 | ||
|
|
4334955e39 | ||
|
|
f00c9dc4d6 | ||
|
|
7d0687ec73 | ||
|
|
da3773bfe8 | ||
|
|
6e1c132ee8 | ||
|
|
24ba35d76f | ||
|
|
64b63e9d52 | ||
|
|
7848a82a1c | ||
|
|
6a843cc8b2 | ||
|
|
ecdb0785a4 | ||
|
|
9a55caed75 | ||
|
|
2e01eb87db | ||
|
|
597b962ad5 | ||
|
|
7531f533e0 | ||
|
|
6b9d71554e | ||
|
|
bb1089e03d | ||
|
|
c82f0c937d | ||
|
|
00d2fd685a | ||
|
|
f28e1b8c90 | ||
|
|
2b17985a11 | ||
|
|
b392e3102e | ||
|
|
58b0b18ddd | ||
|
|
6a9ef319d0 | ||
|
|
cf38ef70cb | ||
|
|
ac64ade10f | ||
|
|
ee85af34d8 | ||
|
|
9d53ad53e5 | ||
|
|
9cdc3ebee6 | ||
|
|
14a5e05d64 | ||
|
|
f7b7d0f79e | ||
|
|
d98f36ceff | ||
|
|
abfabc30c9 | ||
|
|
c1aff7a248 | ||
|
|
e44f71eeb1 | ||
|
|
cb578c84e2 | ||
|
|
565e1dc0ed | ||
|
|
b1e28d02f7 | ||
|
|
d1467c2f73 | ||
|
|
c439150431 | ||
|
|
9bb3dfd639 | ||
|
|
4caa58b9ec | ||
|
|
b5213097e8 | ||
|
|
61081651e4 | ||
|
|
4ccfdf051d | ||
|
|
9f2a9d9cda | ||
|
|
827de76345 | ||
|
|
fdcaca42ae | ||
|
|
0744892244 | ||
|
|
b70ffc69df | ||
|
|
73b12cc32f | ||
|
|
ba6a37f315 | ||
|
|
6f8be8c8ac | ||
|
|
68497542b3 | ||
|
|
3d762fed10 | ||
|
|
48b849c031 | ||
|
|
88c4aa2d87 | ||
|
|
fb8c0d8fe3 | ||
|
|
1a863725d1 | ||
|
|
7b4245c91c | ||
|
|
9bd0d6b99d | ||
|
|
b640c766db | ||
|
|
50ffa8014e | ||
|
|
7ef688b256 | ||
|
|
b4fe0b35e4 | ||
|
|
a2cbbdf819 | ||
|
|
35b7efe3f4 | ||
|
|
7cea2a768f | ||
|
|
7247b9b68e | ||
|
|
dca837b843 | ||
|
|
c60c2ee8d0 | ||
|
|
3cdb5b5db2 | ||
|
|
b9cc8a4ca9 | ||
|
|
28606e9985 | ||
|
|
5bbe782812 | ||
|
|
d65861cdf7 | ||
|
|
c8df3fd2a7 | ||
|
|
6cfe6652a3 | ||
|
|
6b711da69d | ||
|
|
9b02867293 | ||
|
|
595cb99b2d | ||
|
|
f0a3445250 | ||
|
|
6d353dae1e | ||
|
|
57a38282a9 | ||
|
|
db47604865 | ||
|
|
2a121fe202 | ||
|
|
36baff0d7f | ||
|
|
201f3008b1 | ||
|
|
f4873fee18 | ||
|
|
e02261be6d | ||
|
|
2919e6765c | ||
|
|
b8fc4d0079 | ||
|
|
4a46f5f095 | ||
|
|
3484ceabb8 | ||
|
|
cab659dce6 | ||
|
|
a657f29439 | ||
|
|
4c054bf316 | ||
|
|
dc7922c38b | ||
|
|
c6c68abfcc | ||
|
|
6aacb0c898 | ||
|
|
e7000db491 | ||
|
|
fce994ea7f | ||
|
|
6c6446765e | ||
|
|
69a99c70c6 | ||
|
|
56d9f7a8af | ||
|
|
363aefe399 | ||
|
|
7fd4f792ba | ||
|
|
6fbdde63d8 | ||
|
|
b04dc90cdf | ||
|
|
b525c91bd3 | ||
|
|
a32c893078 | ||
|
|
2c6a744848 | ||
|
|
4492874d08 | ||
|
|
d3a592e5bf | ||
|
|
cab21b1b21 | ||
|
|
1319e422ea | ||
|
|
c88ea40b57 | ||
|
|
3194a37fcb | ||
|
|
72ebaa52e9 | ||
|
|
0e00695fc7 | ||
|
|
48a691e722 | ||
|
|
cf54d6d6f8 | ||
|
|
a03fe234d0 | ||
|
|
d88d40cc08 | ||
|
|
d3b4af116e | ||
|
|
352b23331b | ||
|
|
bdd6041a5c | ||
|
|
1894003f8a | ||
|
|
220513ae42 | ||
|
|
fcbabbe357 | ||
|
|
3627969fce | ||
|
|
8807c0dbef | ||
|
|
23cc9f6ff8 | ||
|
|
e50799e9c4 | ||
|
|
b92c4844eb | ||
|
|
c306d42d08 | ||
|
|
e31558318e | ||
|
|
78a9420f26 | ||
|
|
b47c5b5bfc | ||
|
|
28a312accf | ||
|
|
611094e92e | ||
|
|
2a8579a6a5 | ||
|
|
47577f2f47 | ||
|
|
34e3e45843 | ||
|
|
364dc9ddfb | ||
|
|
23324f0f87 | ||
|
|
17fa9a3b77 | ||
|
|
424b3ca308 | ||
|
|
26e2fc8fd4 | ||
|
|
8e18484898 | ||
|
|
354cfe0f9c | ||
|
|
983474b2bd | ||
|
|
14d861bcbb | ||
|
|
f6cd349a16 | ||
|
|
8e1c4dec87 | ||
|
|
18b47e4a73 | ||
|
|
4f157f50ed | ||
|
|
f44a2f4857 | ||
|
|
c685ace327 | ||
|
|
f23b0faf41 | ||
|
|
e0e2ca7ccd | ||
|
|
83fe7f7eef | ||
|
|
1feaa8f2e9 | ||
|
|
598d6bf4c5 | ||
|
|
0afd5a40d6 | ||
|
|
26b70e9ed3 | ||
|
|
a1a93a4bdd | ||
|
|
4939a7dd7c | ||
|
|
0fa6610fdb | ||
|
|
b0148e7860 | ||
|
|
59a06a242d | ||
|
|
ffe902605d | ||
|
|
556f7e85fc | ||
|
|
45c86be402 | ||
|
|
bf34f413de | ||
|
|
9b022b187f | ||
|
|
c3409d64dc | ||
|
|
3c5c3b5026 | ||
|
|
f240f00d84 | ||
|
|
68c7764c63 | ||
|
|
adfb039ba6 | ||
|
|
89416d9856 | ||
|
|
9b6c972e0f | ||
|
|
55fc04752a | ||
|
|
96f0919633 | ||
|
|
17b140baf4 | ||
|
|
45c2151d0f | ||
|
|
1887f5b7e7 | ||
|
|
708d1c7a32 | ||
|
|
acf8c3015a | ||
|
|
f83ae5789b | ||
|
|
57ccfcfc1b | ||
|
|
dd0fdcfdd4 | ||
|
|
5c805be067 | ||
|
|
e423380d7f | ||
|
|
4d8bebc917 | ||
|
|
4314fa883f | ||
|
|
d6e39b362b | ||
|
|
f89214f9cf | ||
|
|
d17cac8210 | ||
|
|
aa49283fa9 | ||
|
|
e79ea7a2cf | ||
|
|
8a1d280f19 | ||
|
|
6a8eb9562f | ||
|
|
8f76e1e344 | ||
|
|
7b9f084e6b | ||
|
|
5b1693a908 | ||
|
|
fd7c00da49 | ||
|
|
7fc5ced3af | ||
|
|
a86092fb64 | ||
|
|
003827e916 | ||
|
|
b15673c525 | ||
|
|
00363303b1 | ||
|
|
48fbe890f8 | ||
|
|
4179877cc7 | ||
|
|
282b83ac08 | ||
|
|
193656e71b | ||
|
|
a25d127f36 | ||
|
|
cf9df548ca | ||
|
|
f29b93c762 | ||
|
|
032ace40d1 | ||
|
|
f74dd1cb3c | ||
|
|
29889d1e35 | ||
|
|
d6d19c4229 | ||
|
|
ab08e67eaf | ||
|
|
00bf6ac258 | ||
|
|
b65478e7d9 | ||
|
|
e83b529f1c | ||
|
|
408274152b | ||
|
|
8ff82996fb | ||
|
|
d59c4044b7 | ||
|
|
3574e21e4f | ||
|
|
5a091956ef | ||
|
|
14e9c58444 | ||
|
|
bfe5b03c69 | ||
|
|
f96f7f840e | ||
|
|
a3bcf26dce | ||
|
|
a7852a89cc | ||
|
|
1b0c761fc0 | ||
|
|
5e4e8d4eda | ||
|
|
bd524d2e1e | ||
|
|
60fe919992 | ||
|
|
b90063b170 | ||
|
|
d9fce49b08 | ||
|
|
5dbee2a270 | ||
|
|
4779106139 | ||
|
|
bf2de81873 | ||
|
|
28cdedc9aa | ||
|
|
7e90571404 | ||
|
|
42bbe63927 | ||
|
|
7ddbea697e | ||
|
|
b4860de34d | ||
|
|
576f23d5fb | ||
|
|
86548fc7bf | ||
|
|
b3b4d992fe | ||
|
|
d72daf5f39 | ||
|
|
9ad959a478 | ||
|
|
cc00a321da | ||
|
|
de74273108 | ||
|
|
a7658c7573 | ||
|
|
48a85ee6e0 | ||
|
|
461b789515 | ||
|
|
b71ff6fbb8 | ||
|
|
1bcdcce93a | ||
|
|
c09bfca634 | ||
|
|
36c5f02bfb | ||
|
|
eae6e5d9a1 | ||
|
|
364813dd73 | ||
|
|
1a2b1f283b | ||
|
|
a0e5cf4ecc | ||
|
|
820f7b4d93 | ||
|
|
727866f090 | ||
|
|
3d45cdc339 | ||
|
|
02a557aa67 | ||
|
|
6da27e5976 | ||
|
|
19a6e324c4 | ||
|
|
62eadbc174 | ||
|
|
ae783d4f45 | ||
|
|
1241a902e3 | ||
|
|
fdba648afb | ||
|
|
b070e7de07 | ||
|
|
d0741946c7 | ||
|
|
080226dd72 | ||
|
|
3cb6a5cfac | ||
|
|
758971e068 | ||
|
|
8739ab9c66 | ||
|
|
e8e47c39d7 | ||
|
|
446c101018 | ||
|
|
3654591a1b | ||
|
|
7fb1c9dd35 | ||
|
|
0fffaccdf4 | ||
|
|
5902b241f9 | ||
|
|
784386fddc | ||
|
|
d424583cbf | ||
|
|
290b821a3a | ||
|
|
a0dfa8d421 | ||
|
|
ceb00f6748 | ||
|
|
9bd328e147 | ||
|
|
6fb5c312c3 | ||
|
|
3f9ff7254f | ||
|
|
f7a3acfaf4 | ||
|
|
e4451ccaf8 | ||
|
|
2adb640821 | ||
|
|
765038274c | ||
|
|
2cbdced974 | ||
|
|
fc5d9ae100 | ||
|
|
506168ab83 | ||
|
|
088fd6334b | ||
|
|
94cda90a6e | ||
|
|
78601d90c9 | ||
|
|
fa4ac95ecc | ||
|
|
dd4d4e23ad | ||
|
|
acba86993d | ||
|
|
0fc55451c2 | ||
|
|
5c0bd8a810 | ||
|
|
1aebc95145 | ||
|
|
1d3f20b666 | ||
|
|
eb2e106871 | ||
|
|
f9a887c8c6 | ||
|
|
67ab810cb2 | ||
|
|
3e0d84383e | ||
|
|
d245ea3eaa | ||
|
|
843fc03bf4 | ||
|
|
c83c635067 | ||
|
|
f605eb14e8 | ||
|
|
fd02d77c59 | ||
|
|
0da8fb379d | ||
|
|
257a43298b | ||
|
|
a2d3bcd571 | ||
|
|
d4142c2cdd | ||
|
|
e50d66b303 | ||
|
|
08b6433843 | ||
|
|
8cd536aab5 | ||
|
|
2b495c648f | ||
|
|
06048b6d71 | ||
|
|
bb22287336 | ||
|
|
a45942a966 | ||
|
|
85d621846d | ||
|
|
534acf8df2 | ||
|
|
5a6d4387ea | ||
|
|
317e844886 | ||
|
|
b1f62a2735 | ||
|
|
65e4fea4ef | ||
|
|
faca8512c5 | ||
|
|
2121387aa2 | ||
|
|
72c4444a60 | ||
|
|
2d8d2e7e6f | ||
|
|
49bff5d544 | ||
|
|
806a80cef1 | ||
|
|
c6f0d5e478 | ||
|
|
bf30aba005 | ||
|
|
727778b730 | ||
|
|
b081ffce50 | ||
|
|
e46779f87b | ||
|
|
dabe8c1bb7 | ||
|
|
4042f88bd8 | ||
|
|
a0947d0c54 | ||
|
|
a34fd9ac89 | ||
|
|
aa68322641 | ||
|
|
2d76aebb8e | ||
|
|
7cc1d23bc7 | ||
|
|
0bd2103a8c | ||
|
|
7d8916b6e9 | ||
|
|
8b5df3ca17 | ||
|
|
ffdfe99d37 | ||
|
|
7efa67e7e6 | ||
|
|
d69808c204 | ||
|
|
de360c61dd | ||
|
|
6b04ddfad1 | ||
|
|
0d854ce906 | ||
|
|
38fdf26405 | ||
|
|
6835c15d9b | ||
|
|
fa38bfd4e8 | ||
|
|
4d5c6d11ab | ||
|
|
9e80da705a | ||
|
|
9b04391f82 | ||
|
|
8f6c0796e3 | ||
|
|
326fcf4398 | ||
|
|
fdda27abd1 | ||
|
|
7e8c62104a | ||
|
|
fb213f6e74 | ||
|
|
22e75c1691 | ||
|
|
919f221be9 | ||
|
|
da7d64667e | ||
|
|
d19c6a1573 | ||
|
|
5cd23039a0 | ||
|
|
19b18d3d0a | ||
|
|
101947da8b | ||
|
|
d3c3c23630 | ||
|
|
abc14316ea | ||
|
|
b66621f9c6 | ||
|
|
aa5510531d | ||
|
|
12b846586c | ||
|
|
b705f5b743 | ||
|
|
18a5fba42b | ||
|
|
b5a3b6f86a | ||
|
|
00f2eda576 | ||
|
|
c70d252dc3 | ||
|
|
2f088ce29e | ||
|
|
ff408c604b | ||
|
|
6621c318db | ||
|
|
22a8ad2fde | ||
|
|
7674dc9b34 | ||
|
|
9e0ca51c2f | ||
|
|
961629d156 | ||
|
|
2cbebf9c99 | ||
|
|
08a4deca17 | ||
|
|
ce9ea7baad | ||
|
|
b35efb9f72 | ||
|
|
c45dfacb41 | ||
|
|
91152a7977 | ||
|
|
0ce081323f | ||
|
|
79486e3393 | ||
|
|
60758dd76b | ||
|
|
e74f659015 | ||
|
|
c1c09fa6b4 | ||
|
|
47c7cb9327 | ||
|
|
4d6256e1a1 | ||
|
|
13180d92e3 | ||
|
|
6b38ef3c9f | ||
|
|
4f5b0634ad | ||
|
|
ea25972257 | ||
|
|
b6168898ec | ||
|
|
da33cb54fe | ||
|
|
35d0458228 | ||
|
|
e6c0280b40 | ||
|
|
15451ff42b | ||
|
|
9ab856e186 | ||
|
|
6e2db1ced6 | ||
|
|
5c4ce8754e | ||
|
|
416486c370 | ||
|
|
2f075be6f8 | ||
|
|
a1494c4c93 | ||
|
|
d79ab5ffeb | ||
|
|
01526a7b37 | ||
|
|
091a02f737 | ||
|
|
aa4996ef28 | ||
|
|
2f4e2bde6b | ||
|
|
e90f6a2fa3 | ||
|
|
be8f1b9fdd | ||
|
|
ba99190f53 | ||
|
|
70088704e2 | ||
|
|
02733e6e58 | ||
|
|
44732a5dd9 | ||
|
|
5bdd35464b | ||
|
|
1eae97731f | ||
|
|
0325a62f18 | ||
|
|
3a5538813c | ||
|
|
1f1b4b95ce | ||
|
|
8c3ed57ecc | ||
|
|
dc8a64fa7d | ||
|
|
0d1e72a764 | ||
|
|
9b3fe09508 | ||
|
|
7c0cfb1da2 | ||
|
|
66429ce331 | ||
|
|
bce859569f | ||
|
|
425fb8905b | ||
|
|
4f59c7f77f | ||
|
|
21d1faa793 | ||
|
|
b9f3991d03 | ||
|
|
c4de879b20 | ||
|
|
ee5686e91a | ||
|
|
2a795e9138 | ||
|
|
9a6aa8f8c6 | ||
|
|
3794b181d5 | ||
|
|
f09256a24e | ||
|
|
34fca9d6f5 | ||
|
|
433f10ef93 | ||
|
|
9f02f71c52 | ||
|
|
3dcc9bc143 | ||
|
|
7311895894 | ||
|
|
a7cab51369 | ||
|
|
437b11af9a | ||
|
|
820b5cbb86 | ||
|
|
e6a30f899c | ||
|
|
0bc6507df3 | ||
|
|
71c3c632d7 | ||
|
|
99a5f2cd9d | ||
|
|
fb00b79d19 | ||
|
|
7782aa7379 | ||
|
|
f3ee4a5dac | ||
|
|
a8d6e59a7a | ||
|
|
1d4b1870cf | ||
|
|
f63ad2dd69 | ||
|
|
6903eed4e7 | ||
|
|
b9e922c658 | ||
|
|
54d8c64ad5 | ||
|
|
2f1fe5468e | ||
|
|
24d15d4274 | ||
|
|
0bc7aa52d8 | ||
|
|
e52603b4a7 | ||
|
|
3b88712402 | ||
|
|
33e9ef2106 | ||
|
|
689fe4ed9a | ||
|
|
b82d026f39 | ||
|
|
009059def4 | ||
|
|
03ff61d113 | ||
|
|
c00914bea2 | ||
|
|
944d1c0a4a | ||
|
|
2cf23e33e3 | ||
|
|
e2a0b42d03 | ||
|
|
894e9818ac |
@@ -1,23 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
|
||||
if [ "$MONGODB" = "2.4" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-10gen=2.4.14
|
||||
sudo service mongodb start
|
||||
elif [ "$MONGODB" = "2.6" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=2.6.12
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.0" ]; then
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.0.14
|
||||
# service should be started automatically
|
||||
else
|
||||
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||
exit 1
|
||||
fi;
|
||||
96
.travis.yml
96
.travis.yml
@@ -2,68 +2,78 @@
|
||||
# PyMongo combinations. However, that would result in an overly long build
|
||||
# with a very large number of jobs, hence we only test a subset of all the
|
||||
# combinations:
|
||||
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
|
||||
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
|
||||
# * MongoDB v3.0 is tested against PyMongo v3.x.
|
||||
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
|
||||
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
|
||||
# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||
# tested against Python v2.7, v3.5, v3.6, and PyPy.
|
||||
# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo
|
||||
# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7.
|
||||
# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8.
|
||||
#
|
||||
# We should periodically check MongoDB Server versions supported by MongoDB
|
||||
# Inc., add newly released versions to the test matrix, and remove versions
|
||||
# which have reached their End of Life. See:
|
||||
# 1. https://www.mongodb.com/support-policy.
|
||||
# 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
|
||||
#
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
language: python
|
||||
|
||||
language: python
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- 3.7
|
||||
- pypy
|
||||
- pypy3
|
||||
|
||||
dist: xenial
|
||||
|
||||
env:
|
||||
- MONGODB=2.6 PYMONGO=2.7
|
||||
- MONGODB=2.6 PYMONGO=2.8
|
||||
- MONGODB=2.6 PYMONGO=3.0
|
||||
global:
|
||||
- MONGODB_3_4=3.4.17
|
||||
- MONGODB_3_6=3.6.12
|
||||
- PYMONGO_3_6=3.6
|
||||
- PYMONGO_3_4=3.4
|
||||
matrix:
|
||||
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6}
|
||||
|
||||
matrix:
|
||||
|
||||
# Finish the build as soon as one job fails
|
||||
fast_finish: true
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=2.7
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.0
|
||||
- python: 2.7
|
||||
env: MONGODB=3.0 PYMONGO=3.0
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=2.7
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=3.0
|
||||
- python: 3.5
|
||||
env: MONGODB=3.0 PYMONGO=3.0
|
||||
env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4}
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||
|
||||
before_install:
|
||||
- bash .install_mongodb_on_travis.sh
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||
python-tk
|
||||
- travis_retry pip install --upgrade pip
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry pip install flake8 flake8-import-order
|
||||
- travis_retry pip install tox>=1.9
|
||||
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
# Install Mongo
|
||||
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||
# Install Python dependencies.
|
||||
- pip install --upgrade pip
|
||||
- pip install coveralls
|
||||
- pip install flake8 flake8-import-order
|
||||
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
# Install the tox venv.
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
# Install black for Python v3.7 only.
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi
|
||||
|
||||
# Cache dependencies installed via pip
|
||||
cache: pip
|
||||
|
||||
# Run flake8 for py27
|
||||
before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
|
||||
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only
|
||||
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
|
||||
script:
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
@@ -87,15 +97,15 @@ deploy:
|
||||
password:
|
||||
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||
|
||||
# create a source distribution and a pure python wheel for faster installs
|
||||
# Create a source distribution and a pure python wheel for faster installs.
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
# only deploy on tagged commits (aka GitHub releases) and only for the
|
||||
# parent repo's builds running Python 2.7 along with dev PyMongo (we run
|
||||
# Travis against many different Python and PyMongo versions and we don't
|
||||
# want the deploy to occur multiple times).
|
||||
# Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||
# repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||
# We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||
# and we don't want the deploy to occur multiple times).
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: "$PYMONGO = 3.0"
|
||||
condition: ($PYMONGO = ${PYMONGO_3_6}) && ($MONGODB = ${MONGODB_3_4})
|
||||
python: 2.7
|
||||
|
||||
9
AUTHORS
9
AUTHORS
@@ -243,3 +243,12 @@ that much better:
|
||||
* Victor Varvaryuk
|
||||
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||
* Dmitry Yantsen (https://github.com/mrTable)
|
||||
* Renjianxin (https://github.com/Davidrjx)
|
||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||
* Andy Yankovsky (https://github.com/werat)
|
||||
* Bastien Gérard (https://github.com/bagerard)
|
||||
* Trevor Hall (https://github.com/tjhall13)
|
||||
* Gleb Voropaev (https://github.com/buggyspace)
|
||||
* Paulo Amaral (https://github.com/pauloAmaral)
|
||||
* Gaurav Dadhania (https://github.com/GVRV)
|
||||
* Yurii Andrieiev (https://github.com/yandrieiev)
|
||||
|
||||
@@ -22,18 +22,17 @@ Supported Interpreters
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
Please also ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||
The codebase is written in python 2 so you must be using python 2
|
||||
when developing new features. Compatibility of the library with Python 3
|
||||
relies on the 2to3 package that gets executed as part of the installation
|
||||
build. You should ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||
including 4 space indents. When possible we try to stick to 79 character line
|
||||
limits. However, screens got bigger and an ORM has a strong focus on
|
||||
readability and if it can help, we accept 119 as maximum line length, in a
|
||||
similar way as `django does
|
||||
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||
MongoEngine uses `black <https://github.com/python/black>`_ for code
|
||||
formatting.
|
||||
|
||||
Testing
|
||||
-------
|
||||
|
||||
18
README.rst
18
README.rst
@@ -26,26 +26,28 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Supported MongoDB Versions
|
||||
==========================
|
||||
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
|
||||
versions should be supported as well, but aren't actively tested at the moment.
|
||||
Make sure to open an issue or submit a pull request if you experience any
|
||||
problems with MongoDB v3.2+.
|
||||
MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions
|
||||
should be supported as well, but aren't actively tested at the moment. Make
|
||||
sure to open an issue or submit a pull request if you experience any problems
|
||||
with MongoDB version > 3.6.
|
||||
|
||||
Installation
|
||||
============
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||
to both create the virtual environment and install the package. Otherwise, you can
|
||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||
run ``python setup.py install``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||
At the very least, you'll need these two packages to use MongoEngine:
|
||||
|
||||
- pymongo>=2.7.1
|
||||
- pymongo>=3.4
|
||||
- six>=1.10.0
|
||||
|
||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||
|
||||
207
benchmark.py
207
benchmark.py
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Simple benchmark comparing PyMongo and MongoEngine.
|
||||
|
||||
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
||||
|
||||
Benchmarking...
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
2.58979988098
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
1.26657605171
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
8.4351580143
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
7.20191693306
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
||||
6.31104588509
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
6.07083487511
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
5.97704291344
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
5.9111430645
|
||||
"""
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
print("Benchmarking...")
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']['key' + str(j)] = 'value ' + str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
from pymongo.write_concern import WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect('timeit_test')
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(validate=False, write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
142
benchmarks/test_basic_doc_ops.py
Normal file
142
benchmarks/test_basic_doc_ops.py
Normal file
@@ -0,0 +1,142 @@
|
||||
from timeit import repeat
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmailField,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
IntField,
|
||||
ListField,
|
||||
StringField,
|
||||
)
|
||||
|
||||
mongoengine.connect(db="mongoengine_benchmark_test")
|
||||
|
||||
|
||||
def timeit(f, n=10000):
|
||||
return min(repeat(f, repeat=3, number=n)) / float(n)
|
||||
|
||||
|
||||
def test_basic():
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
pages = IntField()
|
||||
tags = ListField(StringField())
|
||||
is_published = BooleanField()
|
||||
author_email = EmailField()
|
||||
|
||||
Book.drop_collection()
|
||||
|
||||
def init_book():
|
||||
return Book(
|
||||
name="Always be closing",
|
||||
pages=100,
|
||||
tags=["self-help", "sales"],
|
||||
is_published=True,
|
||||
author_email="alec@example.com",
|
||||
)
|
||||
|
||||
print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6))
|
||||
|
||||
b = init_book()
|
||||
print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6))
|
||||
|
||||
print(
|
||||
"Doc setattr: %.3fus"
|
||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)
|
||||
)
|
||||
|
||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||
|
||||
print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6))
|
||||
|
||||
def save_book():
|
||||
b._mark_as_changed("name")
|
||||
b._mark_as_changed("tags")
|
||||
b.save()
|
||||
|
||||
print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6))
|
||||
|
||||
son = b.to_mongo()
|
||||
print(
|
||||
"Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6)
|
||||
)
|
||||
|
||||
print(
|
||||
"Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6)
|
||||
)
|
||||
|
||||
def create_and_delete_book():
|
||||
b = init_book()
|
||||
b.save()
|
||||
b.delete()
|
||||
|
||||
print(
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_book, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
def test_big_doc():
|
||||
class Contact(EmbeddedDocument):
|
||||
name = StringField()
|
||||
title = StringField()
|
||||
address = StringField()
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
contacts = ListField(EmbeddedDocumentField(Contact))
|
||||
|
||||
Company.drop_collection()
|
||||
|
||||
def init_company():
|
||||
return Company(
|
||||
name="MongoDB, Inc.",
|
||||
contacts=[
|
||||
Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x)
|
||||
for x in range(1000)
|
||||
],
|
||||
)
|
||||
|
||||
company = init_company()
|
||||
print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3))
|
||||
|
||||
print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3))
|
||||
|
||||
company.save()
|
||||
|
||||
def save_company():
|
||||
company._mark_as_changed("name")
|
||||
company._mark_as_changed("contacts")
|
||||
company.save()
|
||||
|
||||
print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3))
|
||||
|
||||
son = company.to_mongo()
|
||||
print(
|
||||
"Load from SON: %.3fms"
|
||||
% (timeit(lambda: Company._from_son(son), 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
print(
|
||||
"Load from database: %.3fms"
|
||||
% (timeit(lambda: Company.objects[0], 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
def create_and_delete_company():
|
||||
c = init_company()
|
||||
c.save()
|
||||
c.delete()
|
||||
|
||||
print(
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_company, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_basic()
|
||||
print("-" * 100)
|
||||
test_big_doc()
|
||||
158
benchmarks/test_inserts.py
Normal file
158
benchmarks/test_inserts.py
Normal file
@@ -0,0 +1,158 @@
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("PyMongo: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient, WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("mongoengine_benchmark_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print("{}s".format(t.timeit(1)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -13,6 +13,7 @@ Documents
|
||||
|
||||
.. autoclass:: mongoengine.Document
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. attribute:: objects
|
||||
|
||||
@@ -21,15 +22,18 @@ Documents
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||
:members:
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.ValidationError
|
||||
:members:
|
||||
@@ -87,7 +91,9 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.DictField
|
||||
.. autoclass:: mongoengine.fields.MapField
|
||||
.. autoclass:: mongoengine.fields.ReferenceField
|
||||
.. autoclass:: mongoengine.fields.LazyReferenceField
|
||||
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||
.. autoclass:: mongoengine.fields.GenericLazyReferenceField
|
||||
.. autoclass:: mongoengine.fields.CachedReferenceField
|
||||
.. autoclass:: mongoengine.fields.BinaryField
|
||||
.. autoclass:: mongoengine.fields.FileField
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
=========
|
||||
Changelog
|
||||
=========
|
||||
@@ -5,81 +6,215 @@ Changelog
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
||||
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
||||
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
||||
- This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``.
|
||||
- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113
|
||||
- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111
|
||||
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
||||
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
||||
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
||||
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
||||
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
||||
- The codebase is now formatted using ``black``. #2109
|
||||
|
||||
Changes in 0.18.2
|
||||
=================
|
||||
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
|
||||
- Various code clarity and documentation improvements.
|
||||
|
||||
Changes in 0.18.1
|
||||
=================
|
||||
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
|
||||
- Add Python 3.7 to Travis CI. #2058
|
||||
|
||||
Changes in 0.18.0
|
||||
=================
|
||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066
|
||||
- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049
|
||||
- Connection/disconnection improvements:
|
||||
- Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``.
|
||||
- Fix disconnecting. #566 #1599 #605 #607 #1213 #565
|
||||
- Improve documentation of ``connect``/``disconnect``.
|
||||
- Fix issue when using multiple connections to the same mongo with different credentials. #2047
|
||||
- ``connect`` fails immediately when db name contains invalid characters. #2031 #1718
|
||||
- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568
|
||||
- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492
|
||||
- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475
|
||||
- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029
|
||||
- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020
|
||||
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050
|
||||
- BREAKING CHANGES (associated with connection/disconnection fixes):
|
||||
- Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first).
|
||||
- ``disconnect`` now clears ``mongoengine.connection._connection_settings``.
|
||||
- ``disconnect`` now clears the cached attribute ``Document._collection``.
|
||||
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
|
||||
|
||||
Changes in 0.17.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995
|
||||
- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552
|
||||
- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``.
|
||||
- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011
|
||||
- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127
|
||||
- Fix test suite and CI to support MongoDB v3.4. #1445
|
||||
- Fix reference fields querying the database on each access if value contains orphan DBRefs.
|
||||
|
||||
Changes in 0.16.3
|
||||
=================
|
||||
- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965
|
||||
|
||||
Changes in 0.16.2
|
||||
=================
|
||||
- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958
|
||||
|
||||
Changes in 0.16.1
|
||||
=================
|
||||
- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950
|
||||
- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733
|
||||
- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899
|
||||
|
||||
Changes in 0.16.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGES:
|
||||
- ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661
|
||||
- Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876
|
||||
- Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368
|
||||
- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685
|
||||
- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768
|
||||
- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919
|
||||
- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920
|
||||
- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202
|
||||
- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903
|
||||
- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677
|
||||
- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879
|
||||
- Improve Python 2-3 codebase compatibility. #1889
|
||||
- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368
|
||||
- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877
|
||||
- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320
|
||||
- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869
|
||||
- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870
|
||||
- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865
|
||||
- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688
|
||||
- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611
|
||||
- Bulk insert updates the IDs of the input documents instances. #1919
|
||||
- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934
|
||||
- Improve validation of the ``BinaryField``. #273
|
||||
- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806
|
||||
- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710
|
||||
- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843
|
||||
- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676
|
||||
- Add a ``DateField``. #513
|
||||
- Various improvements to the documentation.
|
||||
- Various code quality improvements.
|
||||
|
||||
Changes in 0.15.3
|
||||
=================
|
||||
- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491
|
||||
- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704
|
||||
- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652
|
||||
- Use each modifier only with ``$position``. #1673 #1675
|
||||
- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067
|
||||
- Update cached fields when a ``fields`` argument is given. #1712
|
||||
- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``.
|
||||
- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491
|
||||
- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491
|
||||
- Fix how ``reload(fields)`` affects changed fields. #1371
|
||||
- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338
|
||||
|
||||
Changes in 0.15.0
|
||||
=================
|
||||
- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230
|
||||
|
||||
Changes in 0.14.1
|
||||
=================
|
||||
- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630
|
||||
- Add support for the ``$position`` param in the ``$push`` operator. #1566
|
||||
- Fix ``DateTimeField`` interpreting an empty string as today. #1533
|
||||
- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632
|
||||
- Fix ``BaseQuerySet._fields_to_db_fields``. #1553
|
||||
|
||||
Changes in 0.14.0
|
||||
=================
|
||||
- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549
|
||||
- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528
|
||||
- Improve code quality. #1531, #1540, #1541, #1547
|
||||
|
||||
Changes in 0.13.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see
|
||||
docs/upgrade.rst for details.
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details.
|
||||
|
||||
Changes in 0.12.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
|
||||
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
|
||||
- Fixed the way `Document.objects.create` works with duplicate IDs #1485
|
||||
- Fixed connecting to a replica set with PyMongo 2.x #1436
|
||||
- Fixed using sets in field choices #1481
|
||||
- Fixed deleting items from a `ListField` #1318
|
||||
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
|
||||
- Fixed behavior of a `dec` update operator #1450
|
||||
- Added a `rename` update operator #1454
|
||||
- Added validation for the `db_field` parameter #1448
|
||||
- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440
|
||||
- Fixed the error message displayed when validating unicode URLs #1486
|
||||
- Raise an error when trying to save an abstract document #1449
|
||||
- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476
|
||||
- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476
|
||||
- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485
|
||||
- Fix connecting to a replica set with PyMongo 2.x. #1436
|
||||
- Fix using sets in field choices. #1481
|
||||
- Fix deleting items from a ``ListField``. #1318
|
||||
- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237
|
||||
- Fix behavior of a ``dec`` update operator. #1450
|
||||
- Add a ``rename`` update operator. #1454
|
||||
- Add validation for the ``db_field`` parameter. #1448
|
||||
- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440
|
||||
- Fix the error message displayed when validating Unicode URLs. #1486
|
||||
- Raise an error when trying to save an abstract document. #1449
|
||||
|
||||
Changes in 0.11.0
|
||||
=================
|
||||
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
|
||||
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
|
||||
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
|
||||
- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Drop Python v2.6 support. #1428
|
||||
- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334
|
||||
- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103
|
||||
|
||||
Changes in 0.10.8
|
||||
=================
|
||||
- Added support for QuerySet.batch_size (#1426)
|
||||
- Fixed query set iteration within iteration #1427
|
||||
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
|
||||
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
|
||||
- Fixed delete cascade for models with a custom primary key field #1247
|
||||
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
|
||||
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
|
||||
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
|
||||
- Fixed filtering by embedded_doc=None #1422
|
||||
- Added support for cursor.comment #1420
|
||||
- Fixed doc.get_<field>_display #1419
|
||||
- Fixed __repr__ method of the StrictDict #1424
|
||||
- Added a deprecation warning for Python 2.6
|
||||
- Add support for ``QuerySet.batch_size``. (#1426)
|
||||
- Fix a query set iteration within an iteration. #1427
|
||||
- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421
|
||||
- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425
|
||||
- Fix cascading deletes for models with a custom primary key field. #1247
|
||||
- Add ability to specify an authentication mechanism (e.g. X.509). #1333
|
||||
- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354
|
||||
- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417
|
||||
- Fix filtering by ``embedded_doc=None``. #1422
|
||||
- Add support for ``Cursor.comment``. #1420
|
||||
- Fix ``doc.get_<field>_display`` methods. #1419
|
||||
- Fix the ``__repr__`` method of the ``StrictDict`` #1424
|
||||
- Add a deprecation warning for Python v2.6.
|
||||
|
||||
Changes in 0.10.7
|
||||
=================
|
||||
- Dropped Python 3.2 support #1390
|
||||
- Fixed the bug where dynamic doc has index inside a dict field #1278
|
||||
- Fixed: ListField minus index assignment does not work #1128
|
||||
- Fixed cascade delete mixing among collections #1224
|
||||
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
|
||||
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
- Fixed long fields stored as int32 in Python 3. #1253
|
||||
- MapField now handles unicodes keys correctly. #1267
|
||||
- ListField now handles negative indicies correctly. #1270
|
||||
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
|
||||
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
|
||||
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
|
||||
- Fixed support for `__` to escape field names that match operators names in `update` #1351
|
||||
- Fixed BaseDocument#_mark_as_changed #1369
|
||||
- Added support for pickling QuerySet instances. #1397
|
||||
- Fixed connecting to a list of hosts #1389
|
||||
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
|
||||
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
|
||||
- Improvements to the dictionary fields docs #1383
|
||||
- Drop Python 3.2 support #1390
|
||||
- Fix a bug where a dynamic doc has an index inside a dict field. #1278
|
||||
- Fix: ``ListField`` minus index assignment does not work. #1128
|
||||
- Fix cascade delete mixing among collections. #1224
|
||||
- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206
|
||||
- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set.
|
||||
- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187
|
||||
- Fix ``LongField`` values stored as int32 in Python 3. #1253
|
||||
- ``MapField`` now handles unicode keys correctly. #1267
|
||||
- ``ListField`` now handles negative indicies correctly. #1270
|
||||
- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681
|
||||
- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304
|
||||
- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336
|
||||
- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351
|
||||
- Fix ``BaseDocument._mark_as_changed``. #1369
|
||||
- Add support for pickling ``QuerySet`` instances. #1397
|
||||
- Fix connecting to a list of hosts. #1389
|
||||
- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334
|
||||
- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218
|
||||
- Improvements to the dictionary field's docs. #1383
|
||||
|
||||
Changes in 0.10.6
|
||||
=================
|
||||
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||
- Fixed not being able to run tests on Windows. #1153
|
||||
- Fix not being able to run tests on Windows. #1153
|
||||
- Allow creation of sparse compound indexes. #1114
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
|
||||
Changes in 0.10.5
|
||||
=================
|
||||
@@ -87,12 +222,12 @@ Changes in 0.10.5
|
||||
|
||||
Changes in 0.10.4
|
||||
=================
|
||||
- SaveConditionError is now importable from the top level package. #1165
|
||||
- upsert_one method added. #1157
|
||||
- ``SaveConditionError`` is now importable from the top level package. #1165
|
||||
- Add a ``QuerySet.upsert_one`` method. #1157
|
||||
|
||||
Changes in 0.10.3
|
||||
=================
|
||||
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
|
||||
- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042
|
||||
|
||||
Changes in 0.10.2
|
||||
=================
|
||||
@@ -102,16 +237,16 @@ Changes in 0.10.2
|
||||
|
||||
Changes in 0.10.1
|
||||
=================
|
||||
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
|
||||
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
|
||||
- Fix ignored chained options #842
|
||||
- Document save's save_condition error raises `SaveConditionError` exception #1070
|
||||
- Fix Document.reload for DynamicDocument. #1050
|
||||
- StrictDict & SemiStrictDict are shadowed at init time. #1105
|
||||
- Fix ListField minus index assignment does not work. #1119
|
||||
- Remove code that marks field as changed when the field has default but not existed in database #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
|
||||
- Recursively build query when using elemMatch operator. #1130
|
||||
- Fix infinite recursion with cascade delete rules under specific conditions. #1046
|
||||
- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047
|
||||
- Fix ignored chained options. #842
|
||||
- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070
|
||||
- Fix ``Document.reload`` for the ``DynamicDocument``. #1050
|
||||
- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105
|
||||
- Fix ``ListField`` negative index assignment not working. #1119
|
||||
- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies. #1079
|
||||
- Recursively build a query when using the ``elemMatch`` operator. #1130
|
||||
- Fix instance back references for lists of embedded documents. #1131
|
||||
|
||||
Changes in 0.10.0
|
||||
@@ -122,7 +257,7 @@ Changes in 0.10.0
|
||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||
- Improve Document._created status when switch collection and db #1020
|
||||
- Queryset update doesn't go through field validation #453
|
||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||
- Added support for specifying authentication source as option ``authSource`` in URI. #967
|
||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||
- Support += and *= for ListField #595
|
||||
@@ -138,7 +273,7 @@ Changes in 0.10.0
|
||||
- Fixes some internal _id handling issue. #961
|
||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||
- Capped collection multiple of 256. #1011
|
||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||
- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods.
|
||||
- Fix for delete with write_concern {'w': 0}. #1008
|
||||
- Allow dynamic lookup for more than two parts. #882
|
||||
- Added support for min_distance on geo queries. #831
|
||||
@@ -147,10 +282,10 @@ Changes in 0.10.0
|
||||
Changes in 0.9.0
|
||||
================
|
||||
- Update FileField when creating a new file #714
|
||||
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||
- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826
|
||||
- ComplexDateTimeField should fall back to None when null=True #864
|
||||
- Request Support for $min, $max Field update operators #863
|
||||
- `BaseDict` does not follow `setdefault` #866
|
||||
- ``BaseDict`` does not follow ``setdefault`` #866
|
||||
- Add support for $type operator # 766
|
||||
- Fix tests for pymongo 2.8+ #877
|
||||
- No module named 'django.utils.importlib' (Django dev) #872
|
||||
@@ -171,13 +306,13 @@ Changes in 0.9.0
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
- Added ``Queryset.aggregate`` wrapper to aggregation framework #703
|
||||
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||
- Added Queryset.search_text to Text indexes searchs #700
|
||||
- Fixed tests for Django 1.7 #696
|
||||
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||
- Added preliminary support for text indexes #680
|
||||
- Added `elemMatch` operator as well - `match` is too obscure #653
|
||||
- Added ``elemMatch`` operator as well - ``match`` is too obscure #653
|
||||
- Added support for progressive JPEG #486 #548
|
||||
- Allow strings to be used in index creation #675
|
||||
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||
@@ -213,11 +348,11 @@ Changes in 0.9.0
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Allow atomic update for the entire ``DictField`` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||
- Make `in_bulk()` respect `no_dereference()` #775
|
||||
- Make ``in_bulk()`` respect ``no_dereference()`` #775
|
||||
- Handle None from model __str__; Fixes #753 #754
|
||||
- _get_changed_fields fix for embedded documents with id field. #925
|
||||
|
||||
@@ -271,18 +406,18 @@ Changes in 0.8.4
|
||||
|
||||
Changes in 0.8.3
|
||||
================
|
||||
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
|
||||
- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402)
|
||||
- Added get_proxy_object helper to filefields (#391)
|
||||
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
||||
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
||||
- Fixed as_pymongo to return the id (#386)
|
||||
- Document.select_related() now respects `db_alias` (#377)
|
||||
- Document.select_related() now respects ``db_alias`` (#377)
|
||||
- Reload uses shard_key if applicable (#384)
|
||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||
|
||||
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||
|
||||
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
|
||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||
- Added Django 1.5 PY3 support (#392)
|
||||
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
||||
@@ -323,7 +458,7 @@ Changes in 0.8.0
|
||||
================
|
||||
- Fixed querying ReferenceField custom_id (#317)
|
||||
- Fixed pickle issues with collections (#316)
|
||||
- Added `get_next_value` preview for SequenceFields (#319)
|
||||
- Added ``get_next_value`` preview for SequenceFields (#319)
|
||||
- Added no_sub_classes context manager and queryset helper (#312)
|
||||
- Querysets now utilises a local cache
|
||||
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||
@@ -352,7 +487,7 @@ Changes in 0.8.0
|
||||
- Updated connection to use MongoClient (#262, #274)
|
||||
- Fixed db_alias and inherited Documents (#143)
|
||||
- Documentation update for document errors (#124)
|
||||
- Deprecated `get_or_create` (#35)
|
||||
- Deprecated ``get_or_create`` (#35)
|
||||
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||
- Added to_json and from_json to Document (#1)
|
||||
@@ -473,7 +608,7 @@ Changes in 0.7.0
|
||||
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||
- Updated ReferenceField's to optionally store ObjectId strings
|
||||
this will become the default in 0.8 (#89)
|
||||
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||
- Added FutureWarning - save will default to ``cascade=False`` in 0.8
|
||||
- Added example of indexing embedded document fields (#75)
|
||||
- Fixed ImageField resizing when forcing size (#80)
|
||||
- Add flexibility for fields handling bad data (#78)
|
||||
@@ -569,7 +704,7 @@ Changes in 0.6.8
|
||||
================
|
||||
- Fixed FileField losing reference when no default set
|
||||
- Removed possible race condition from FileField (grid_file)
|
||||
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||
- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()``
|
||||
- Added support for pull operations on nested EmbeddedDocuments
|
||||
- Added support for choices with GenericReferenceFields
|
||||
- Added support for choices with GenericEmbeddedDocumentFields
|
||||
@@ -584,7 +719,7 @@ Changes in 0.6.7
|
||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||
- Invalid data from the DB now raises a InvalidDocumentError
|
||||
- Cleaned up the Validation Error - docs and code
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added meta ``auto_create_index`` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
@@ -625,7 +760,6 @@ Changes in 0.6.1
|
||||
|
||||
Changes in 0.6
|
||||
==============
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
- No longer always upsert on save for items with a '_id'
|
||||
@@ -850,7 +984,6 @@ Changes in v0.1.3
|
||||
querying takes place
|
||||
- A few minor bugfixes
|
||||
|
||||
|
||||
Changes in v0.1.2
|
||||
=================
|
||||
- Query values may be processed before before being used in queries
|
||||
@@ -859,7 +992,6 @@ Changes in v0.1.2
|
||||
- Added ``BooleanField``
|
||||
- Added ``Document.reload()`` method
|
||||
|
||||
|
||||
Changes in v0.1.1
|
||||
=================
|
||||
- Documents may now use capped collections
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
from mongoengine import *
|
||||
|
||||
connect('tumblelog')
|
||||
connect("tumblelog")
|
||||
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
name = StringField(max_length=120)
|
||||
|
||||
|
||||
class User(Document):
|
||||
email = StringField(required=True)
|
||||
first_name = StringField(max_length=50)
|
||||
last_name = StringField(max_length=50)
|
||||
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(max_length=120, required=True)
|
||||
author = ReferenceField(User)
|
||||
@@ -18,54 +21,57 @@ class Post(Document):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
# bugfix
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
|
||||
class ImagePost(Post):
|
||||
image_path = StringField()
|
||||
|
||||
|
||||
class LinkPost(Post):
|
||||
link_url = StringField()
|
||||
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
||||
john = User(email="jdoe@example.com", first_name="John", last_name="Doe")
|
||||
john.save()
|
||||
|
||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||
post1.tags = ['mongodb', 'mongoengine']
|
||||
post1 = TextPost(title="Fun with MongoEngine", author=john)
|
||||
post1.content = "Took a look at MongoEngine today, looks pretty cool."
|
||||
post1.tags = ["mongodb", "mongoengine"]
|
||||
post1.save()
|
||||
|
||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||
post2.tags = ['mongoengine']
|
||||
post2 = LinkPost(title="MongoEngine Documentation", author=john)
|
||||
post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs"
|
||||
post2.tags = ["mongoengine"]
|
||||
post2.save()
|
||||
|
||||
print 'ALL POSTS'
|
||||
print
|
||||
print("ALL POSTS")
|
||||
print()
|
||||
for post in Post.objects:
|
||||
print post.title
|
||||
#print '=' * post.title.count()
|
||||
print "=" * 20
|
||||
print(post.title)
|
||||
# print '=' * post.title.count()
|
||||
print("=" * 20)
|
||||
|
||||
if isinstance(post, TextPost):
|
||||
print post.content
|
||||
print(post.content)
|
||||
|
||||
if isinstance(post, LinkPost):
|
||||
print 'Link:', post.link_url
|
||||
print("Link:", post.link_url)
|
||||
|
||||
print
|
||||
print
|
||||
print()
|
||||
print()
|
||||
|
||||
print 'POSTS TAGGED \'MONGODB\''
|
||||
print
|
||||
for post in Post.objects(tags='mongodb'):
|
||||
print post.title
|
||||
print
|
||||
print("POSTS TAGGED 'MONGODB'")
|
||||
print()
|
||||
for post in Post.objects(tags="mongodb"):
|
||||
print(post.title)
|
||||
print()
|
||||
|
||||
num_posts = Post.objects(tags='mongodb').count()
|
||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||
num_posts = Post.objects(tags="mongodb").count()
|
||||
print('Found %d posts with tag "mongodb"' % num_posts)
|
||||
|
||||
93
docs/conf.py
93
docs/conf.py
@@ -20,29 +20,29 @@ import mongoengine
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8'
|
||||
# source_encoding = 'utf-8'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009, MongoEngine Authors'
|
||||
project = u"MongoEngine"
|
||||
copyright = u"2009, MongoEngine Authors"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -55,68 +55,66 @@ release = mongoengine.get_version()
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of documents that shouldn't be included in the build.
|
||||
#unused_docs = []
|
||||
# unused_docs = []
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
exclude_trees = ['_build']
|
||||
exclude_trees = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {
|
||||
'canonical_url': 'http://docs.mongoengine.org/en/latest/'
|
||||
}
|
||||
html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
@@ -126,11 +124,11 @@ html_favicon = "favicon.ico"
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
#html_static_path = ['_static']
|
||||
# html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
@@ -138,69 +136,68 @@ html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
html_sidebars = {
|
||||
'index': ['globaltoc.html', 'searchbox.html'],
|
||||
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||
"index": ["globaltoc.html", "searchbox.html"],
|
||||
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
||||
}
|
||||
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_use_modindex = True
|
||||
# html_use_modindex = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = ''
|
||||
# html_file_suffix = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'MongoEnginedoc'
|
||||
htmlhelp_basename = "MongoEnginedoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
latex_paper_size = 'a4'
|
||||
latex_paper_size = "a4"
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#latex_font_size = '10pt'
|
||||
# latex_font_size = '10pt'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
||||
'Ross Lawley', 'manual'),
|
||||
("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual")
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#latex_preamble = ''
|
||||
# latex_preamble = ''
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_use_modindex = True
|
||||
# latex_use_modindex = True
|
||||
|
||||
autoclass_content = 'both'
|
||||
autoclass_content = "both"
|
||||
|
||||
@@ -4,9 +4,11 @@
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to::
|
||||
Connections in MongoEngine are registered globally and are identified with aliases.
|
||||
If no `alias` is provided during the connection, it will use "default" as alias.
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
||||
function. The first argument is the name of the database to connect to::
|
||||
|
||||
from mongoengine import connect
|
||||
connect('project1')
|
||||
@@ -18,10 +20,10 @@ provide the :attr:`host` and :attr:`port` arguments to
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments should be provided::
|
||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||
and :attr:`authentication_source` arguments should be provided::
|
||||
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
@@ -42,6 +44,9 @@ the :attr:`host` to
|
||||
will establish connection to ``production`` database using
|
||||
``admin`` username and ``qwerty`` password.
|
||||
|
||||
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
||||
a connection to the "test" database by default
|
||||
|
||||
Replica Sets
|
||||
============
|
||||
|
||||
@@ -71,28 +76,61 @@ is used.
|
||||
In the background this uses :func:`~mongoengine.register_connection` to
|
||||
store the data and you can register all aliases up front if required.
|
||||
|
||||
Individual documents can also support multiple databases by providing a
|
||||
Documents defined in different database
|
||||
---------------------------------------
|
||||
Individual documents can be attached to different databases by providing a
|
||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
|
||||
objects to point across databases and collections. Below is an example schema,
|
||||
using 3 different databases to store data::
|
||||
|
||||
connect(alias='user-db-alias', db='user-db')
|
||||
connect(alias='book-db-alias', db='book-db')
|
||||
connect(alias='users-books-db-alias', db='users-books-db')
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'db_alias': 'user-db'}
|
||||
meta = {'db_alias': 'user-db-alias'}
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'db_alias': 'book-db'}
|
||||
meta = {'db_alias': 'book-db-alias'}
|
||||
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
|
||||
meta = {'db_alias': 'users-books-db'}
|
||||
meta = {'db_alias': 'users-books-db-alias'}
|
||||
|
||||
|
||||
Disconnecting an existing connection
|
||||
------------------------------------
|
||||
The function :func:`~mongoengine.disconnect` can be used to
|
||||
disconnect a particular connection. This can be used to change a
|
||||
connection globally::
|
||||
|
||||
from mongoengine import connect, disconnect
|
||||
connect('a_db', alias='db1')
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db1'}
|
||||
|
||||
disconnect(alias='db1')
|
||||
|
||||
connect('another_db', alias='db1')
|
||||
|
||||
.. note:: Calling :func:`~mongoengine.disconnect` without argument
|
||||
will disconnect the "default" connection
|
||||
|
||||
.. note:: Since connections gets registered globally, it is important
|
||||
to use the `disconnect` function from MongoEngine and not the
|
||||
`disconnect()` method of an existing connection (pymongo.MongoClient)
|
||||
|
||||
.. note:: :class:`~mongoengine.Document` are caching the pymongo collection.
|
||||
using `disconnect` ensures that it gets cleaned as well
|
||||
|
||||
Context Managers
|
||||
================
|
||||
Sometimes you may want to switch the database or collection to query against.
|
||||
@@ -119,7 +157,7 @@ access to the same User document across databases::
|
||||
|
||||
Switch Collection
|
||||
-----------------
|
||||
The :class:`~mongoengine.context_managers.switch_collection` context manager
|
||||
The :func:`~mongoengine.context_managers.switch_collection` context manager
|
||||
allows you to change the collection for a given class allowing quick and easy
|
||||
access to the same Group document across collection::
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ objects** as class attributes to the document class::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
date_modified = DateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||
documents are serialized based on their field order.
|
||||
@@ -80,13 +80,16 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||
* :class:`~mongoengine.fields.GenericLazyReferenceField`
|
||||
* :class:`~mongoengine.fields.GeoPointField`
|
||||
* :class:`~mongoengine.fields.ImageField`
|
||||
* :class:`~mongoengine.fields.IntField`
|
||||
* :class:`~mongoengine.fields.ListField`
|
||||
* :class:`~mongoengine.fields.LongField`
|
||||
* :class:`~mongoengine.fields.MapField`
|
||||
* :class:`~mongoengine.fields.ObjectIdField`
|
||||
* :class:`~mongoengine.fields.ReferenceField`
|
||||
* :class:`~mongoengine.fields.LazyReferenceField`
|
||||
* :class:`~mongoengine.fields.SequenceField`
|
||||
* :class:`~mongoengine.fields.SortedListField`
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
@@ -153,7 +156,7 @@ arguments can be set on all fields:
|
||||
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
||||
field should be limited.
|
||||
|
||||
Can be either be a nested tuples of value (stored in mongo) and a
|
||||
Can either be nested tuples of value (stored in mongo) and a
|
||||
human readable key ::
|
||||
|
||||
SIZE = (('S', 'Small'),
|
||||
@@ -173,6 +176,21 @@ arguments can be set on all fields:
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`validation` (Optional)
|
||||
A callable to validate the value of the field.
|
||||
The callable takes the value as parameter and should raise a ValidationError
|
||||
if validation fails
|
||||
|
||||
e.g ::
|
||||
|
||||
def _not_empty(val):
|
||||
if not val:
|
||||
raise ValidationError('value can not be empty')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(validation=_not_empty)
|
||||
|
||||
|
||||
:attr:`**kwargs` (Optional)
|
||||
You can supply additional metadata as arbitrary additional keyword
|
||||
arguments. You can not override existing attributes, however. Common
|
||||
@@ -224,7 +242,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate
|
||||
user = ReferenceField(User)
|
||||
answers = DictField()
|
||||
|
||||
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
||||
survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user)
|
||||
response_form = ResponseForm(request.POST)
|
||||
survey_response.answers = response_form.cleaned_data()
|
||||
survey_response.save()
|
||||
@@ -490,7 +508,9 @@ the field name with a **#**::
|
||||
]
|
||||
}
|
||||
|
||||
If a dictionary is passed then the following options are available:
|
||||
If a dictionary is passed then additional options become available. Valid options include,
|
||||
but are not limited to:
|
||||
|
||||
|
||||
:attr:`fields` (Default: None)
|
||||
The fields to index. Specified in the same format as described above.
|
||||
@@ -511,8 +531,15 @@ If a dictionary is passed then the following options are available:
|
||||
Allows you to automatically expire data from a collection by setting the
|
||||
time in seconds to expire the a field.
|
||||
|
||||
:attr:`name` (Optional)
|
||||
Allows you to specify a name for the index
|
||||
|
||||
:attr:`collation` (Optional)
|
||||
Allows to create case insensitive indexes (MongoDB v3.4+ only)
|
||||
|
||||
.. note::
|
||||
|
||||
Additional options are forwarded as **kwargs to pymongo's create_index method.
|
||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||
|
||||
Global index default options
|
||||
@@ -524,15 +551,16 @@ There are a few top level defaults for all indexes that can be set::
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
meta = {
|
||||
'index_options': {},
|
||||
'index_opts': {},
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
'index_cls': False
|
||||
}
|
||||
|
||||
|
||||
:attr:`index_options` (Optional)
|
||||
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
||||
:attr:`index_opts` (Optional)
|
||||
Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_
|
||||
|
||||
:attr:`index_background` (Optional)
|
||||
Set the default value for if an index should be indexed in the background
|
||||
@@ -540,10 +568,15 @@ There are a few top level defaults for all indexes that can be set::
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
|
||||
:attr:`auto_create_index` (Optional)
|
||||
When this is True (default), MongoEngine will ensure that the correct
|
||||
indexes exist in MongoDB each time a command is run. This can be disabled
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
@@ -618,7 +651,7 @@ collection after a given period. See the official
|
||||
documentation for more information. A common usecase might be session data::
|
||||
|
||||
class Session(Document):
|
||||
created = DateTimeField(default=datetime.now)
|
||||
created = DateTimeField(default=datetime.utcnow)
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||
@@ -681,11 +714,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
If your collection is sharded by multiple keys, then you can improve shard
|
||||
routing (and thus the performance of your application) by specifying the shard
|
||||
key, using the :attr:`shard_key` attribute of
|
||||
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
||||
|
||||
This ensures that the full shard key is sent with the query when calling
|
||||
methods such as :meth:`~mongoengine.document.Document.save`,
|
||||
:meth:`~mongoengine.document.Document.update`,
|
||||
:meth:`~mongoengine.document.Document.modify`, or
|
||||
:meth:`~mongoengine.document.Document.delete` on an existing
|
||||
:class:`~mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
@@ -695,7 +733,8 @@ This ensures that the shard key is sent with the query when calling the
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
'shard_key': ('machine', 'timestamp'),
|
||||
'indexes': ('machine', 'timestamp'),
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
@@ -725,6 +764,9 @@ document.::
|
||||
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||
to False, meaning you must set it to True to use inheritance.
|
||||
|
||||
Setting :attr:`allow_inheritance` to True should also be used in
|
||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
|
||||
@@ -57,7 +57,8 @@ document values for example::
|
||||
|
||||
def clean(self):
|
||||
"""Ensures that only published essays have a `pub_date` and
|
||||
automatically sets the pub_date if published and not set"""
|
||||
automatically sets `pub_date` if essay is published and `pub_date`
|
||||
is not set"""
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
msg = 'Draft entries should not have a publication date.'
|
||||
raise ValidationError(msg)
|
||||
|
||||
@@ -53,7 +53,8 @@ Deletion
|
||||
|
||||
Deleting stored files is achieved with the :func:`delete` method::
|
||||
|
||||
marmot.photo.delete()
|
||||
marmot.photo.delete() # Deletes the GridFS document
|
||||
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like
|
||||
the :func:`put` method so even metadata can (and should) be replaced::
|
||||
|
||||
another_marmot = open('another_marmot.png', 'rb')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
||||
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
||||
|
||||
@@ -19,3 +19,30 @@ or with an alias:
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
|
||||
Example of test file:
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
import unittest
|
||||
from mongoengine import connect, disconnect
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
class TestPerson(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
disconnect()
|
||||
|
||||
def test_thing(self):
|
||||
pers = Person(name='John')
|
||||
pers.save()
|
||||
|
||||
fresh_pers = Person.objects().first()
|
||||
self.assertEqual(fresh_pers.name, 'John')
|
||||
|
||||
@@ -64,7 +64,7 @@ Available operators are as follows:
|
||||
* ``gt`` -- greater than
|
||||
* ``gte`` -- greater than or equal to
|
||||
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
||||
``Q(age__not__mod=5)``)
|
||||
``Q(age__not__mod=(5, 0))``)
|
||||
* ``in`` -- value is in list (a list of values should be provided)
|
||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||
@@ -456,14 +456,14 @@ data. To turn off dereferencing of the results of a query use
|
||||
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||
|
||||
post = Post.objects.no_dereference().first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
You can also turn off all dereferencing for a fixed period by using the
|
||||
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||
|
||||
with no_dereference(Post) as Post:
|
||||
post = Post.objects.first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
# Outside the context manager dereferencing occurs.
|
||||
assert(isinstance(post.author, User))
|
||||
@@ -565,6 +565,15 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||
>>> post.tags
|
||||
['database', 'mongodb']
|
||||
|
||||
From MongoDB version 2.6, push operator supports $position value which allows
|
||||
to push values with index.
|
||||
>>> post = BlogPost(title="Test", tags=["mongo"])
|
||||
>>> post.save()
|
||||
>>> post.update(push__tags__0=["database", "code"])
|
||||
>>> post.reload()
|
||||
>>> post.tags
|
||||
['database', 'code', 'mongo']
|
||||
|
||||
.. note::
|
||||
Currently only top level lists are handled, future versions of mongodb /
|
||||
pymongo plan to support nested positional operators. See `The $ positional
|
||||
|
||||
@@ -43,10 +43,10 @@ Available signals include:
|
||||
has taken place but before saving.
|
||||
|
||||
`post_save`
|
||||
Called within :meth:`~mongoengine.Document.save` after all actions
|
||||
(validation, insert/update, cascades, clearing dirty flags) have completed
|
||||
successfully. Passed the additional boolean keyword argument `created` to
|
||||
indicate if the save was an insert or an update.
|
||||
Called within :meth:`~mongoengine.Document.save` after most actions
|
||||
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
||||
completed successfully. Passed the additional boolean keyword argument
|
||||
`created` to indicate if the save was an insert or an update.
|
||||
|
||||
`pre_delete`
|
||||
Called within :meth:`~mongoengine.Document.delete` prior to
|
||||
@@ -113,6 +113,10 @@ handlers within your subclass::
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
.. warning::
|
||||
|
||||
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
||||
|
||||
Finally, you can also use this small decorator to quickly create a number of
|
||||
signals and attach them to your :class:`~mongoengine.Document` or
|
||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||
|
||||
@@ -48,4 +48,4 @@ Ordering by text score
|
||||
|
||||
::
|
||||
|
||||
objects = News.objects.search('mongo').order_by('$text_score')
|
||||
objects = News.objects.search_text('mongo').order_by('$text_score')
|
||||
|
||||
@@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions.
|
||||
Posts
|
||||
^^^^^
|
||||
|
||||
Happily mongoDB *isn't* a relational database, so we're not going to do it that
|
||||
Happily MongoDB *isn't* a relational database, so we're not going to do it that
|
||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||
a much nicer solution. We will store all of the posts in *one collection* and
|
||||
each post type will only store the fields it needs. If we later want to add
|
||||
@@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments
|
||||
separately from their associated posts, other than to work around the
|
||||
relational model. Using MongoDB we can store the comments as a list of
|
||||
*embedded documents* directly on a post document. An embedded document should
|
||||
be treated no differently that a regular document; it just doesn't have its own
|
||||
be treated no differently than a regular document; it just doesn't have its own
|
||||
collection in the database. Using MongoEngine, we can define the structure of
|
||||
embedded documents, along with utility methods, in exactly the same way we do
|
||||
with regular documents::
|
||||
|
||||
@@ -6,6 +6,23 @@ Development
|
||||
***********
|
||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||
|
||||
URLField's constructor no longer takes `verify_exists`
|
||||
|
||||
0.15.0
|
||||
******
|
||||
|
||||
0.14.0
|
||||
******
|
||||
This release includes a few bug fixes and a significant code cleanup. The most
|
||||
important change is that `QuerySet.as_pymongo` no longer supports a
|
||||
`coerce_types` mode. If you used it in the past, a) please let us know of your
|
||||
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
|
||||
|
||||
This release also makes the EmbeddedDocument not hashable by default. If you
|
||||
use embedded documents in sets or dictionaries, you might have to override
|
||||
`__hash__` and implement a hashing logic specific to your use case. See #1528
|
||||
for the reason behind this change.
|
||||
|
||||
0.13.0
|
||||
******
|
||||
This release adds Unicode support to the `EmailField` and changes its
|
||||
|
||||
@@ -18,19 +18,25 @@ from mongoengine.queryset import *
|
||||
from mongoengine.signals import *
|
||||
|
||||
|
||||
__all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(connection.__all__) + list(queryset.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
__all__ = (
|
||||
list(document.__all__)
|
||||
+ list(fields.__all__)
|
||||
+ list(connection.__all__)
|
||||
+ list(queryset.__all__)
|
||||
+ list(signals.__all__)
|
||||
+ list(errors.__all__)
|
||||
)
|
||||
|
||||
|
||||
VERSION = (0, 13, 0)
|
||||
VERSION = (0, 18, 2)
|
||||
|
||||
|
||||
def get_version():
|
||||
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""Return the VERSION as a string.
|
||||
|
||||
For example, if `VERSION == (0, 10, 7)`, return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, VERSION))
|
||||
return ".".join(map(str, VERSION))
|
||||
|
||||
|
||||
__version__ = get_version()
|
||||
|
||||
@@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import *
|
||||
|
||||
__all__ = (
|
||||
# common
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
"UPDATE_OPERATORS",
|
||||
"_document_registry",
|
||||
"get_document",
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||
|
||||
"BaseDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
||||
"BaseDocument",
|
||||
# fields
|
||||
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||
|
||||
"BaseField",
|
||||
"ComplexBaseField",
|
||||
"ObjectIdField",
|
||||
"GeoJsonBaseField",
|
||||
# metaclasses
|
||||
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||
"DocumentMetaclass",
|
||||
"TopLevelDocumentMetaclass",
|
||||
)
|
||||
|
||||
@@ -1,31 +1,62 @@
|
||||
from mongoengine.errors import NotRegistered
|
||||
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry")
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max', 'rename'])
|
||||
UPDATE_OPERATORS = {
|
||||
"set",
|
||||
"unset",
|
||||
"inc",
|
||||
"dec",
|
||||
"mul",
|
||||
"pop",
|
||||
"push",
|
||||
"push_all",
|
||||
"pull",
|
||||
"pull_all",
|
||||
"add_to_set",
|
||||
"set_on_insert",
|
||||
"min",
|
||||
"max",
|
||||
"rename",
|
||||
}
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
|
||||
def get_document(name):
|
||||
"""Get a document class by name."""
|
||||
"""Get a registered Document class by name."""
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style name
|
||||
single_end = name.split('.')[-1]
|
||||
compound_end = '.%s' % single_end
|
||||
possible_match = [k for k in _document_registry.keys()
|
||||
if k.endswith(compound_end) or k == single_end]
|
||||
single_end = name.split(".")[-1]
|
||||
compound_end = ".%s" % single_end
|
||||
possible_match = [
|
||||
k for k in _document_registry if k.endswith(compound_end) or k == single_end
|
||||
]
|
||||
if len(possible_match) == 1:
|
||||
doc = _document_registry.get(possible_match.pop(), None)
|
||||
if not doc:
|
||||
raise NotRegistered("""
|
||||
raise NotRegistered(
|
||||
"""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it, has it
|
||||
been imported?
|
||||
""".strip() % name)
|
||||
""".strip()
|
||||
% name
|
||||
)
|
||||
return doc
|
||||
|
||||
|
||||
def _get_documents_by_db(connection_alias, default_connection_alias):
|
||||
"""Get all registered Documents class attached to a given database"""
|
||||
|
||||
def get_doc_alias(doc_cls):
|
||||
return doc_cls._meta.get("db_alias", default_connection_alias)
|
||||
|
||||
return [
|
||||
doc_cls
|
||||
for doc_cls in _document_registry.values()
|
||||
if get_doc_alias(doc_cls) == connection_alias
|
||||
]
|
||||
|
||||
@@ -1,12 +1,43 @@
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||
__all__ = (
|
||||
"BaseDict",
|
||||
"StrictDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
)
|
||||
|
||||
|
||||
def mark_as_changed_wrapper(parent_method):
|
||||
"""Decorator that ensures _mark_as_changed method gets called."""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, *args, **kwargs)
|
||||
self._mark_as_changed()
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def mark_key_as_changed_wrapper(parent_method):
|
||||
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
||||
|
||||
def wrapper(self, key, *args, **kwargs):
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, key, *args, **kwargs)
|
||||
self._mark_as_changed(key)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
@@ -17,46 +48,36 @@ class BaseDict(dict):
|
||||
_name = None
|
||||
|
||||
def __init__(self, dict_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
def get(self, key, default=None):
|
||||
# get does not use __getitem__ by default so we must override it as well
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__setitem__(key, value)
|
||||
|
||||
def __delete__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delitem__(key)
|
||||
|
||||
def __delattr__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delattr__(key)
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
self._dereferenced = False
|
||||
@@ -66,30 +87,19 @@ class BaseDict(dict):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).clear()
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).pop(*args, **kwargs)
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).popitem()
|
||||
|
||||
def setdefault(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).setdefault(*args, **kwargs)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).update(*args, **kwargs)
|
||||
__setitem__ = mark_key_as_changed_wrapper(dict.__setitem__)
|
||||
__delattr__ = mark_key_as_changed_wrapper(dict.__delattr__)
|
||||
__delitem__ = mark_key_as_changed_wrapper(dict.__delitem__)
|
||||
pop = mark_as_changed_wrapper(dict.pop)
|
||||
clear = mark_as_changed_wrapper(dict.clear)
|
||||
update = mark_as_changed_wrapper(dict.update)
|
||||
popitem = mark_as_changed_wrapper(dict.popitem)
|
||||
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
self._instance._mark_as_changed("%s.%s" % (self._name, key))
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@@ -102,52 +112,39 @@ class BaseList(list):
|
||||
_name = None
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(key, slice):
|
||||
# When receiving a slice operator, we don't convert the structure and bind
|
||||
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
||||
return value
|
||||
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, "%s.%s" % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in xrange(self.__len__()):
|
||||
yield self[i]
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
else:
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__setitem__(key, value)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delitem__(key)
|
||||
|
||||
def __setslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__setslice__(*args, **kwargs)
|
||||
|
||||
def __delslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delslice__(*args, **kwargs)
|
||||
for v in super(BaseList, self).__iter__():
|
||||
yield v
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@@ -158,53 +155,53 @@ class BaseList(list):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__iadd__(other)
|
||||
def __setitem__(self, key, value):
|
||||
changed_key = key
|
||||
if isinstance(key, slice):
|
||||
# In case of slice, we don't bother to identify the exact elements being updated
|
||||
# instead, we simply marks the whole list as changed
|
||||
changed_key = None
|
||||
|
||||
def __imul__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__imul__(other)
|
||||
result = super(BaseList, self).__setitem__(key, value)
|
||||
self._mark_as_changed(changed_key)
|
||||
return result
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
append = mark_as_changed_wrapper(list.append)
|
||||
extend = mark_as_changed_wrapper(list.extend)
|
||||
insert = mark_as_changed_wrapper(list.insert)
|
||||
pop = mark_as_changed_wrapper(list.pop)
|
||||
remove = mark_as_changed_wrapper(list.remove)
|
||||
reverse = mark_as_changed_wrapper(list.reverse)
|
||||
sort = mark_as_changed_wrapper(list.sort)
|
||||
__delitem__ = mark_as_changed_wrapper(list.__delitem__)
|
||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).extend(*args, **kwargs)
|
||||
if six.PY2:
|
||||
# Under py3 __setslice__, __delslice__ and __getslice__
|
||||
# are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter
|
||||
# so we mimic this under python 2
|
||||
def __setslice__(self, i, j, sequence):
|
||||
return self.__setitem__(slice(i, j), sequence)
|
||||
|
||||
def insert(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).insert(*args, **kwargs)
|
||||
def __delslice__(self, i, j):
|
||||
return self.__delitem__(slice(i, j))
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).pop(*args, **kwargs)
|
||||
|
||||
def remove(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).remove(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).sort(*args, **kwargs)
|
||||
def __getslice__(self, i, j):
|
||||
return self.__getitem__(slice(i, j))
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed(
|
||||
'%s.%s' % (self._name, key % len(self))
|
||||
)
|
||||
self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self)))
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, embedded_doc, kwargs):
|
||||
@@ -224,15 +221,14 @@ class EmbeddedDocumentList(BaseList):
|
||||
return embedded_docs
|
||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
def filter(self, **kwargs):
|
||||
"""
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
This method only supports simple comparison (e.g: .filter(name='John Doe'))
|
||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
filter on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
@@ -287,12 +283,10 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
'%s matching query does not exist.' % self._name
|
||||
)
|
||||
raise DoesNotExist("%s matching query does not exist." % self._name)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
'%d items returned, instead of 1' % len(values)
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
@@ -350,7 +344,8 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def update(self, **update):
|
||||
"""
|
||||
Updates the embedded documents with the given update values.
|
||||
Updates the embedded documents with the given replacement values. This
|
||||
function does not support mongoDB update operators such as ``inc__``.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
@@ -372,22 +367,22 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
class StrictDict(object):
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in iteritems(kwargs):
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
return setattr(self, key, value)
|
||||
|
||||
def __contains__(self, key):
|
||||
@@ -424,7 +419,7 @@ class StrictDict(object):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(self.iteritems()))
|
||||
return len(list(iteritems(self)))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
@@ -434,53 +429,60 @@ class StrictDict(object):
|
||||
|
||||
@classmethod
|
||||
def create(cls, allowed_keys):
|
||||
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
|
||||
allowed_keys_tuple = tuple(
|
||||
("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys
|
||||
)
|
||||
allowed_keys = frozenset(allowed_keys_tuple)
|
||||
if allowed_keys not in cls._classes:
|
||||
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
return "{%s}" % ", ".join(
|
||||
'"{0!s}": {1!r}'.format(k, v) for k, v in self.items()
|
||||
)
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class SemiStrictDict(StrictDict):
|
||||
__slots__ = ('_extras', )
|
||||
_classes = {}
|
||||
class LazyReference(DBRef):
|
||||
__slots__ = ("_cached_doc", "passthrough", "document_type")
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__getattr__(attr)
|
||||
except AttributeError:
|
||||
try:
|
||||
return self.__getattribute__('_extras')[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
def fetch(self, force=False):
|
||||
if not self._cached_doc or force:
|
||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
||||
if not self._cached_doc:
|
||||
raise DoesNotExist("Trying to dereference unknown document %s" % (self))
|
||||
return self._cached_doc
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
try:
|
||||
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||
except AttributeError:
|
||||
try:
|
||||
self._extras[attr] = value
|
||||
except AttributeError:
|
||||
self._extras = {attr: value}
|
||||
@property
|
||||
def pk(self):
|
||||
return self.id
|
||||
|
||||
def __delattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__delattr__(attr)
|
||||
except AttributeError:
|
||||
try:
|
||||
del self._extras[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
|
||||
self.document_type = document_type
|
||||
self._cached_doc = cached_doc
|
||||
self.passthrough = passthrough
|
||||
super(LazyReference, self).__init__(
|
||||
self.document_type._get_collection_name(), pk
|
||||
)
|
||||
|
||||
def __iter__(self):
|
||||
def __getitem__(self, name):
|
||||
if not self.passthrough:
|
||||
raise KeyError()
|
||||
document = self.fetch()
|
||||
return document[name]
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not object.__getattribute__(self, "passthrough"):
|
||||
raise AttributeError()
|
||||
document = self.fetch()
|
||||
try:
|
||||
extras_iter = iter(self.__getattribute__('_extras'))
|
||||
except AttributeError:
|
||||
extras_iter = ()
|
||||
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
|
||||
return document[name]
|
||||
except KeyError:
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,16 +5,14 @@ import weakref
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList)
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
from mongoengine.errors import DeprecatedError, ValidationError
|
||||
|
||||
|
||||
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||
'GeoJsonBaseField')
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
@@ -23,6 +21,7 @@ class BaseField(object):
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
|
||||
name = None
|
||||
_geo_index = False
|
||||
_auto_gen = False # Call `generate` to generate a value
|
||||
@@ -34,10 +33,21 @@ class BaseField(object):
|
||||
creation_counter = 0
|
||||
auto_creation_counter = -1
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None, null=False, sparse=False,
|
||||
**kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
db_field=None,
|
||||
name=None,
|
||||
required=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
unique_with=None,
|
||||
primary_key=False,
|
||||
validation=None,
|
||||
choices=None,
|
||||
null=False,
|
||||
sparse=False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
@@ -52,10 +62,10 @@ class BaseField(object):
|
||||
unique with.
|
||||
:param primary_key: Mark this field as the primary key. Defaults to False.
|
||||
:param validation: (optional) A callable to validate the value of the
|
||||
field. Generally this is deprecated in favour of the
|
||||
`FIELD.validate` method
|
||||
field. The callable takes the value as parameter and should raise
|
||||
a ValidationError if validation fails
|
||||
:param choices: (optional) The valid choices
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
:param null: (optional) If the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
@@ -65,7 +75,7 @@ class BaseField(object):
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
self.db_field = (db_field or name) if not primary_key else "_id"
|
||||
|
||||
if name:
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
@@ -81,11 +91,17 @@ class BaseField(object):
|
||||
self.sparse = sparse
|
||||
self._owner_document = None
|
||||
|
||||
# Validate the db_field
|
||||
# Make sure db_field is a string (if it's explicitly defined).
|
||||
if self.db_field is not None and not isinstance(
|
||||
self.db_field, six.string_types
|
||||
):
|
||||
raise TypeError("db_field should be a string.")
|
||||
|
||||
# Make sure db_field doesn't contain any forbidden characters.
|
||||
if isinstance(self.db_field, six.string_types) and (
|
||||
'.' in self.db_field or
|
||||
'\0' in self.db_field or
|
||||
self.db_field.startswith('$')
|
||||
"." in self.db_field
|
||||
or "\0" in self.db_field
|
||||
or self.db_field.startswith("$")
|
||||
):
|
||||
raise ValueError(
|
||||
'field names cannot contain dots (".") or null characters '
|
||||
@@ -95,15 +111,17 @@ class BaseField(object):
|
||||
# Detect and report conflicts between metadata and base properties.
|
||||
conflicts = set(dir(self)) & set(kwargs)
|
||||
if conflicts:
|
||||
raise TypeError('%s already has attribute(s): %s' % (
|
||||
self.__class__.__name__, ', '.join(conflicts)))
|
||||
raise TypeError(
|
||||
"%s already has attribute(s): %s"
|
||||
% (self.__class__.__name__, ", ".join(conflicts))
|
||||
)
|
||||
|
||||
# Assign metadata to the instance
|
||||
# This efficient method is available because no __slots__ are defined.
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
if self.db_field == "_id":
|
||||
self.creation_counter = BaseField.auto_creation_counter
|
||||
BaseField.auto_creation_counter -= 1
|
||||
else:
|
||||
@@ -121,11 +139,9 @@ class BaseField(object):
|
||||
return instance._data.get(self.name)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
"""Descriptor for assigning a value to a field in a document."""
|
||||
# If setting to None and there is a default value provided for this
|
||||
# field, then set the value to the default value.
|
||||
if value is None:
|
||||
if self.null:
|
||||
value = None
|
||||
@@ -136,24 +152,29 @@ class BaseField(object):
|
||||
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
value_has_changed = (
|
||||
self.name not in instance._data
|
||||
or instance._data[self.name] != value
|
||||
)
|
||||
if value_has_changed:
|
||||
instance._mark_as_changed(self.name)
|
||||
except Exception:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
# Some values can't be compared and throw an error when we
|
||||
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
||||
# Mark the field as changed in such cases.
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
value._instance = weakref.proxy(instance)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for v in value:
|
||||
if isinstance(v, EmbeddedDocument):
|
||||
v._instance = weakref.proxy(instance)
|
||||
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raise a ValidationError."""
|
||||
field_name = field_name if field_name else self.name
|
||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||
@@ -170,11 +191,11 @@ class BaseField(object):
|
||||
"""Helper method to call to_mongo with proper inputs."""
|
||||
f_inputs = self.to_mongo.__code__.co_varnames
|
||||
ex_vars = {}
|
||||
if 'fields' in f_inputs:
|
||||
ex_vars['fields'] = fields
|
||||
if "fields" in f_inputs:
|
||||
ex_vars["fields"] = fields
|
||||
|
||||
if 'use_db_field' in f_inputs:
|
||||
ex_vars['use_db_field'] = use_db_field
|
||||
if "use_db_field" in f_inputs:
|
||||
ex_vars["use_db_field"] = use_db_field
|
||||
|
||||
return self.to_mongo(value, **ex_vars)
|
||||
|
||||
@@ -189,8 +210,8 @@ class BaseField(object):
|
||||
pass
|
||||
|
||||
def _validate_choices(self, value):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(next(iter(choice_list)), (list, tuple)):
|
||||
@@ -201,13 +222,13 @@ class BaseField(object):
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be an instance of %s' % (
|
||||
six.text_type(choice_list)
|
||||
)
|
||||
"Value must be an instance of %s" % (six.text_type(choice_list))
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
else:
|
||||
values = value if isinstance(value, (list, tuple)) else [value]
|
||||
if len(set(values) - set(choice_list)):
|
||||
self.error("Value must be one of %s" % six.text_type(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
@@ -217,11 +238,23 @@ class BaseField(object):
|
||||
# check validation argument
|
||||
if self.validation is not None:
|
||||
if callable(self.validation):
|
||||
if not self.validation(value):
|
||||
self.error('Value does not match custom validation method')
|
||||
try:
|
||||
# breaking change of 0.18
|
||||
# Get rid of True/False-type return for the validation method
|
||||
# in favor of having validation raising a ValidationError
|
||||
ret = self.validation(value)
|
||||
if ret is not None:
|
||||
raise DeprecatedError(
|
||||
"validation argument for `%s` must not return anything, "
|
||||
"it should raise a ValidationError if validation fails"
|
||||
% self.name
|
||||
)
|
||||
except ValidationError as ex:
|
||||
self.error(str(ex))
|
||||
else:
|
||||
raise ValueError('validation argument for "%s" must be a '
|
||||
'callable.' % self.name)
|
||||
raise ValueError(
|
||||
'validation argument for `"%s"` must be a ' "callable." % self.name
|
||||
)
|
||||
|
||||
self.validate(value, **kwargs)
|
||||
|
||||
@@ -255,28 +288,41 @@ class ComplexBaseField(BaseField):
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
dereference = (self._auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
|
||||
_dereference = _import_class('DeReference')()
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if instance._initialised and dereference and instance._data.get(self.name):
|
||||
dereference = auto_dereference and (
|
||||
self.field is None
|
||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||
)
|
||||
|
||||
_dereference = _import_class("DeReference")()
|
||||
|
||||
if (
|
||||
instance._initialised
|
||||
and dereference
|
||||
and instance._data.get(self.name)
|
||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||
):
|
||||
instance._data[self.name] = _dereference(
|
||||
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||
name=self.name
|
||||
instance._data.get(self.name),
|
||||
max_depth=1,
|
||||
instance=instance,
|
||||
name=self.name,
|
||||
)
|
||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||
instance._data[self.name]._dereferenced = True
|
||||
|
||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if isinstance(value, (list, tuple)):
|
||||
if (issubclass(type(self), EmbeddedDocumentListField) and
|
||||
not isinstance(value, EmbeddedDocumentList)):
|
||||
if issubclass(type(self), EmbeddedDocumentListField) and not isinstance(
|
||||
value, EmbeddedDocumentList
|
||||
):
|
||||
value = EmbeddedDocumentList(value, instance, self.name)
|
||||
elif not isinstance(value, BaseList):
|
||||
value = BaseList(value, instance, self.name)
|
||||
@@ -285,12 +331,13 @@ class ComplexBaseField(BaseField):
|
||||
value = BaseDict(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
if (
|
||||
auto_dereference
|
||||
and instance._initialised
|
||||
and isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced
|
||||
):
|
||||
value = _dereference(value, max_depth=1, instance=instance, name=self.name)
|
||||
value._dereferenced = True
|
||||
instance._data[self.name] = value
|
||||
|
||||
@@ -301,63 +348,72 @@ class ComplexBaseField(BaseField):
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
if hasattr(value, "to_python"):
|
||||
return value.to_python()
|
||||
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
if isinstance(value, BaseDocument):
|
||||
# Something is wrong, return the value as it is
|
||||
return value
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
value = {idx: v for idx, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = {key: self.field.to_python(item)
|
||||
for key, item in value.items()}
|
||||
value_dict = {
|
||||
key: self.field.to_python(item) for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
Document = _import_class('Document')
|
||||
Document = _import_class("Document")
|
||||
value_dict = {}
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_python'):
|
||||
elif hasattr(v, "to_python"):
|
||||
value_dict[k] = v.to_python()
|
||||
else:
|
||||
value_dict[k] = self.to_python(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
if hasattr(value, "to_mongo"):
|
||||
if isinstance(value, Document):
|
||||
return GenericReferenceField().to_mongo(value)
|
||||
cls = value.__class__
|
||||
val = value.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
return val
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
@@ -367,48 +423,51 @@ class ComplexBaseField(BaseField):
|
||||
if self.field:
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in value.iteritems()
|
||||
for key, item in iteritems(value)
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
for k, v in iteritems(value):
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
|
||||
# If its a document that is not inheritable it won't have
|
||||
# any _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = meta.get('allow_inheritance')
|
||||
meta = getattr(v, "_meta", {})
|
||||
allow_inheritance = meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
elif hasattr(v, "to_mongo"):
|
||||
cls = v.__class__
|
||||
val = v.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(v, (Document, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
value_dict[k] = val
|
||||
else:
|
||||
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
sequence = value.iteritems()
|
||||
if hasattr(value, "iteritems") or hasattr(value, "items"):
|
||||
sequence = iteritems(value)
|
||||
else:
|
||||
sequence = enumerate(value)
|
||||
for k, v in sequence:
|
||||
@@ -421,11 +480,10 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||
errors=errors)
|
||||
self.error("Invalid %s item (%s)" % (field_class, value), errors=errors)
|
||||
# Don't allow empty values if required
|
||||
if self.required and not value:
|
||||
self.error('Field is required and cannot be empty')
|
||||
self.error("Field is required and cannot be empty")
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
@@ -468,7 +526,7 @@ class ObjectIdField(BaseField):
|
||||
try:
|
||||
ObjectId(six.text_type(value))
|
||||
except Exception:
|
||||
self.error('Invalid Object ID')
|
||||
self.error("Invalid Object ID")
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
@@ -478,14 +536,14 @@ class GeoJsonBaseField(BaseField):
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = 'GeoBase'
|
||||
_type = "GeoBase"
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = '%sField' % self._type
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
@@ -493,57 +551,58 @@ class GeoJsonBaseField(BaseField):
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' %
|
||||
(self._name, self._type))
|
||||
return self.validate(value['coordinates'])
|
||||
if set(value.keys()) == {"type", "coordinates"}:
|
||||
if value["type"] != self._type:
|
||||
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||
return self.validate(value["coordinates"])
|
||||
else:
|
||||
self.error('%s can only accept a valid GeoJson dictionary'
|
||||
' or lists of (x, y)' % self._name)
|
||||
self.error(
|
||||
"%s can only accept a valid GeoJson dictionary"
|
||||
" or lists of (x, y)" % self._name
|
||||
)
|
||||
return
|
||||
elif not isinstance(value, (list, tuple)):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
self.error("%s can only accept lists of [x, y]" % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
return "Polygons must contain list of linestrings"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid Polygon must contain at least one valid linestring'
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
error = self._validate_linestring(val, False)
|
||||
if not error and val[0] != val[-1]:
|
||||
error = 'LineStrings must start and end at the same point'
|
||||
error = "LineStrings must start and end at the same point"
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validate a linestring."""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
return "LineStrings must contain list of coordinate pairs"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid LineString must contain at least one valid point'
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -552,29 +611,30 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
return "Points must be a list of coordinate pairs"
|
||||
elif not len(value) == 2:
|
||||
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
elif not isinstance(value[0], (float, int)) or not isinstance(
|
||||
value[1], (float, int)
|
||||
):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
return "MultiPoint must be a list of Point"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPoint must contain at least one valid point'
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
@@ -583,17 +643,17 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
return "MultiLineString must be a list of LineString"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
@@ -603,19 +663,19 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
return "MultiPolygon must be a list of Polygon"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
@@ -624,9 +684,9 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([('type', self._type), ('coordinates', value)])
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
|
||||
@@ -1,68 +1,75 @@
|
||||
import itertools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager)
|
||||
from mongoengine.queryset import (
|
||||
DO_NOTHING,
|
||||
DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
"""Metaclass for all documents."""
|
||||
|
||||
# TODO lower complexity of this method
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, mcs).__new__
|
||||
|
||||
# If a base class just call super
|
||||
metaclass = attrs.get('my_metaclass')
|
||||
metaclass = attrs.get("my_metaclass")
|
||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||
attrs['_cached_reference_fields'] = []
|
||||
attrs["_is_document"] = attrs.get("_is_document", False)
|
||||
attrs["_cached_reference_fields"] = []
|
||||
|
||||
# EmbeddedDocuments could have meta data for inheritance
|
||||
if 'meta' in attrs:
|
||||
attrs['_meta'] = attrs.pop('meta')
|
||||
if "meta" in attrs:
|
||||
attrs["_meta"] = attrs.pop("meta")
|
||||
|
||||
# EmbeddedDocuments should inherit meta data
|
||||
if '_meta' not in attrs:
|
||||
if "_meta" not in attrs:
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
attrs["_meta"] = meta
|
||||
attrs["_meta"][
|
||||
"abstract"
|
||||
] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||
if attrs['_meta'].get('allow_inheritance'):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
if attrs["_meta"].get("allow_inheritance"):
|
||||
StringField = _import_class("StringField")
|
||||
attrs["_cls"] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
# Merge all fields from subclasses
|
||||
doc_fields = {}
|
||||
for base in flattened_bases[::-1]:
|
||||
if hasattr(base, '_fields'):
|
||||
if hasattr(base, "_fields"):
|
||||
doc_fields.update(base._fields)
|
||||
|
||||
# Standard object mixin - merge in any Fields
|
||||
if not hasattr(base, '_meta'):
|
||||
if not hasattr(base, "_meta"):
|
||||
base_fields = {}
|
||||
for attr_name, attr_value in base.__dict__.iteritems():
|
||||
for attr_name, attr_value in iteritems(base.__dict__):
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -74,7 +81,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Discover any document fields
|
||||
field_names = {}
|
||||
for attr_name, attr_value in attrs.iteritems():
|
||||
for attr_name, attr_value in iteritems(attrs):
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -83,27 +90,31 @@ class DocumentMetaclass(type):
|
||||
doc_fields[attr_name] = attr_value
|
||||
|
||||
# Count names to ensure no db_field redefinitions
|
||||
field_names[attr_value.db_field] = field_names.get(
|
||||
attr_value.db_field, 0) + 1
|
||||
field_names[attr_value.db_field] = (
|
||||
field_names.get(attr_value.db_field, 0) + 1
|
||||
)
|
||||
|
||||
# Ensure no duplicate db_fields
|
||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||
if duplicate_db_fields:
|
||||
msg = ('Multiple db_fields defined for: %s ' %
|
||||
', '.join(duplicate_db_fields))
|
||||
msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||
for k, v in doc_fields.items()}
|
||||
attrs['_reverse_db_field_map'] = {
|
||||
v: k for k, v in attrs['_db_field_map'].items()
|
||||
attrs["_fields"] = doc_fields
|
||||
attrs["_db_field_map"] = {
|
||||
k: getattr(v, "db_field", k) for k, v in doc_fields.items()
|
||||
}
|
||||
attrs["_reverse_db_field_map"] = {
|
||||
v: k for k, v in attrs["_db_field_map"].items()
|
||||
}
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
for v in doc_fields.itervalues()))
|
||||
attrs["_fields_ordered"] = tuple(
|
||||
i[1]
|
||||
for i in sorted(
|
||||
(v.creation_counter, v.name) for v in itervalues(doc_fields)
|
||||
)
|
||||
)
|
||||
|
||||
#
|
||||
# Set document hierarchy
|
||||
@@ -111,34 +122,37 @@ class DocumentMetaclass(type):
|
||||
superclasses = ()
|
||||
class_name = [name]
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
if not getattr(base, "_is_base_cls", True) and not getattr(
|
||||
base, "_meta", {}
|
||||
).get("abstract", True):
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
if hasattr(base, "_meta"):
|
||||
# Warn if allow_inheritance isn't set and prevent
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
allow_inheritance = base._meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not base._meta.get("abstract"):
|
||||
raise ValueError(
|
||||
"Document %s may not be subclassed. "
|
||||
'To enable inheritance, use the "allow_inheritance" meta attribute.'
|
||||
% base.__name__
|
||||
)
|
||||
|
||||
# Get superclasses from last base superclass
|
||||
document_bases = [b for b in flattened_bases
|
||||
if hasattr(b, '_class_name')]
|
||||
document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")]
|
||||
if document_bases:
|
||||
superclasses = document_bases[0]._superclasses
|
||||
superclasses += (document_bases[0]._class_name, )
|
||||
superclasses += (document_bases[0]._class_name,)
|
||||
|
||||
_cls = '.'.join(reversed(class_name))
|
||||
attrs['_class_name'] = _cls
|
||||
attrs['_superclasses'] = superclasses
|
||||
attrs['_subclasses'] = (_cls, )
|
||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||
_cls = ".".join(reversed(class_name))
|
||||
attrs["_class_name"] = _cls
|
||||
attrs["_superclasses"] = superclasses
|
||||
attrs["_subclasses"] = (_cls,)
|
||||
attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types
|
||||
|
||||
# Create the new_class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Set _subclasses
|
||||
for base in document_bases:
|
||||
@@ -146,8 +160,12 @@ class DocumentMetaclass(type):
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
(
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
DictField,
|
||||
CachedReferenceField,
|
||||
) = mcs._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
@@ -166,86 +184,83 @@ class DocumentMetaclass(type):
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
if hasattr(f, "__func__") and not hasattr(f, "im_func"):
|
||||
f.__dict__.update({"im_func": getattr(f, "__func__")})
|
||||
if hasattr(f, "__self__") and not hasattr(f, "im_self"):
|
||||
f.__dict__.update({"im_self": getattr(f, "__self__")})
|
||||
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
for field in itervalues(new_class._fields):
|
||||
f = field
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||
'allowed in EmbeddedDocuments')
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
'Document is not available to sync')
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments"
|
||||
)
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
|
||||
f.document_type._cached_reference_fields.append(f)
|
||||
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||
delete_rule = getattr(f.field,
|
||||
'reverse_delete_rule',
|
||||
DO_NOTHING)
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
|
||||
delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||
msg = ('Reverse delete rules are not supported '
|
||||
'for %s (field: %s)' %
|
||||
(field.__class__.__name__, field.name))
|
||||
msg = (
|
||||
"Reverse delete rules are not supported "
|
||||
"for %s (field: %s)" % (field.__class__.__name__, field.name)
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
f = field.field
|
||||
|
||||
if delete_rule != DO_NOTHING:
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
msg = ('Reverse delete rules are not supported for '
|
||||
'EmbeddedDocuments (field: %s)' % field.name)
|
||||
msg = (
|
||||
"Reverse delete rules are not supported for "
|
||||
"EmbeddedDocuments (field: %s)" % field.name
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
f.document_type.register_delete_rule(new_class,
|
||||
field.name, delete_rule)
|
||||
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ('%s is a document method and not a valid '
|
||||
'field name' % field.name)
|
||||
if (
|
||||
field.name
|
||||
and hasattr(Document, field.name)
|
||||
and EmbeddedDocument not in new_class.mro()
|
||||
):
|
||||
msg = "%s is a document method and not a valid field name" % field.name
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
return new_class
|
||||
|
||||
def add_to_class(self, name, value):
|
||||
setattr(self, name, value)
|
||||
|
||||
@classmethod
|
||||
def _get_bases(cls, bases):
|
||||
def _get_bases(mcs, bases):
|
||||
if isinstance(bases, BasesTuple):
|
||||
return bases
|
||||
seen = []
|
||||
bases = cls.__get_bases(bases)
|
||||
bases = mcs.__get_bases(bases)
|
||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||
return BasesTuple(unique_bases)
|
||||
|
||||
@classmethod
|
||||
def __get_bases(cls, bases):
|
||||
def __get_bases(mcs, bases):
|
||||
for base in bases:
|
||||
if base is object:
|
||||
continue
|
||||
yield base
|
||||
for child_base in cls.__get_bases(base.__bases__):
|
||||
for child_base in mcs.__get_bases(base.__bases__):
|
||||
yield child_base
|
||||
|
||||
@classmethod
|
||||
def _import_classes(cls):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
def _import_classes(mcs):
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
DictField = _import_class("DictField")
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
@@ -254,193 +269,214 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
collection in the database.
|
||||
"""
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||
# defaults
|
||||
attrs['_meta'] = {
|
||||
'abstract': True,
|
||||
'max_documents': None,
|
||||
'max_size': None,
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'id_field': None,
|
||||
'index_background': False,
|
||||
'index_drop_dups': False,
|
||||
'index_opts': None,
|
||||
'delete_rules': None,
|
||||
|
||||
attrs["_meta"] = {
|
||||
"abstract": True,
|
||||
"max_documents": None,
|
||||
"max_size": None,
|
||||
"ordering": [], # default ordering applied at runtime
|
||||
"indexes": [], # indexes to be ensured at runtime
|
||||
"id_field": None,
|
||||
"index_background": False,
|
||||
"index_drop_dups": False,
|
||||
"index_opts": None,
|
||||
"delete_rules": None,
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
# "allow inheritance", False means "don't allow inheritance",
|
||||
# None means "do whatever your parent does, or don't allow
|
||||
# inheritance if you're a top-level class".
|
||||
'allow_inheritance': None,
|
||||
"allow_inheritance": None,
|
||||
}
|
||||
attrs['_is_base_cls'] = True
|
||||
attrs['_meta'].update(attrs.get('meta', {}))
|
||||
attrs["_is_base_cls"] = True
|
||||
attrs["_meta"].update(attrs.get("meta", {}))
|
||||
else:
|
||||
attrs['_meta'] = attrs.get('meta', {})
|
||||
attrs["_meta"] = attrs.get("meta", {})
|
||||
# Explicitly set abstract to false unless set
|
||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||
attrs['_is_base_cls'] = False
|
||||
attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False)
|
||||
attrs["_is_base_cls"] = False
|
||||
|
||||
# Set flag marking as document class - as opposed to an object mixin
|
||||
attrs['_is_document'] = True
|
||||
attrs["_is_document"] = True
|
||||
|
||||
# Ensure queryset_class is inherited
|
||||
if 'objects' in attrs:
|
||||
manager = attrs['objects']
|
||||
if hasattr(manager, 'queryset_class'):
|
||||
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||
if "objects" in attrs:
|
||||
manager = attrs["objects"]
|
||||
if hasattr(manager, "queryset_class"):
|
||||
attrs["_meta"]["queryset_class"] = manager.queryset_class
|
||||
|
||||
# Clean up top level meta
|
||||
if 'meta' in attrs:
|
||||
del attrs['meta']
|
||||
if "meta" in attrs:
|
||||
del attrs["meta"]
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
if b.__class__ == TopLevelDocumentMetaclass]
|
||||
parent_doc_cls = [
|
||||
b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass
|
||||
]
|
||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||
if (
|
||||
parent_doc_cls
|
||||
and "collection" in attrs.get("_meta", {})
|
||||
and not parent_doc_cls._meta.get("abstract", True)
|
||||
):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del attrs['_meta']['collection']
|
||||
del attrs["_meta"]["collection"]
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"):
|
||||
if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Merge base class metas.
|
||||
# Uses a special MetaDict that handles various merging rules
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
|
||||
# Set collection in the meta if its callable
|
||||
if (getattr(base, '_is_document', False) and
|
||||
not base._meta.get('abstract')):
|
||||
collection = meta.get('collection', None)
|
||||
if getattr(base, "_is_document", False) and not base._meta.get("abstract"):
|
||||
collection = meta.get("collection", None)
|
||||
if callable(collection):
|
||||
meta['collection'] = collection(base)
|
||||
meta["collection"] = collection(base)
|
||||
|
||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||
meta.merge(attrs.get("_meta", {})) # Top level meta
|
||||
|
||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
simple_class = all(
|
||||
[b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")]
|
||||
)
|
||||
if (
|
||||
not simple_class and
|
||||
meta['allow_inheritance'] is False and
|
||||
not meta['abstract']
|
||||
not simple_class
|
||||
and meta["allow_inheritance"] is False
|
||||
and not meta["abstract"]
|
||||
):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
raise ValueError(
|
||||
"Only direct subclasses of Document may set "
|
||||
'"allow_inheritance" to False'
|
||||
)
|
||||
|
||||
# Set default collection name
|
||||
if 'collection' not in meta:
|
||||
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||
for c in name).strip('_').lower()
|
||||
attrs['_meta'] = meta
|
||||
if "collection" not in meta:
|
||||
meta["collection"] = (
|
||||
"".join("_%s" % c if c.isupper() else c for c in name)
|
||||
.strip("_")
|
||||
.lower()
|
||||
)
|
||||
attrs["_meta"] = meta
|
||||
|
||||
# Call super and get the new class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
meta = new_class._meta
|
||||
|
||||
# Set index specifications
|
||||
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||
meta["index_specs"] = new_class._build_index_specs(meta["indexes"])
|
||||
|
||||
# If collection is a callable - call it and set the value
|
||||
collection = meta.get('collection')
|
||||
collection = meta.get("collection")
|
||||
if callable(collection):
|
||||
new_class._meta['collection'] = collection(new_class)
|
||||
new_class._meta["collection"] = collection(new_class)
|
||||
|
||||
# Provide a default queryset unless exists or one has been set
|
||||
if 'objects' not in dir(new_class):
|
||||
if "objects" not in dir(new_class):
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
# Validate the fields and set primary key if needed
|
||||
for field_name, field in new_class._fields.iteritems():
|
||||
for field_name, field in iteritems(new_class._fields):
|
||||
if field.primary_key:
|
||||
# Ensure only one primary key is set
|
||||
current_pk = new_class._meta.get('id_field')
|
||||
current_pk = new_class._meta.get("id_field")
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
raise ValueError("Cannot override primary key field")
|
||||
|
||||
# Set primary key
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
new_class._meta["id_field"] = field_name
|
||||
new_class.id = field
|
||||
|
||||
# Set primary key if not defined by the document
|
||||
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = id_name
|
||||
# If the document doesn't explicitly define a primary key field, create
|
||||
# one. Make it an ObjectIdField and give it a non-clashing name ("id"
|
||||
# by default, but can be different if that one's taken).
|
||||
if not new_class._meta.get("id_field"):
|
||||
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||
new_class._meta["id_field"] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
new_class._fields[id_name].name = id_name
|
||||
new_class.id = new_class._fields[id_name]
|
||||
new_class._db_field_map[id_name] = id_db_name
|
||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||
# Prepend id field to _fields_ordered
|
||||
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy
|
||||
# Prepend the ID field to _fields_ordered (so that it's *always*
|
||||
# the first field).
|
||||
new_class._fields_ordered = (id_name,) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy.
|
||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||
module = attrs.get('__module__')
|
||||
module = attrs.get("__module__")
|
||||
for exc in exceptions_to_merge:
|
||||
name = exc.__name__
|
||||
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||
if hasattr(base, name)) or (exc,)
|
||||
# Create new exception and set to new_class
|
||||
exception = type(name, parents, {'__module__': module})
|
||||
parents = tuple(
|
||||
getattr(base, name) for base in flattened_bases if hasattr(base, name)
|
||||
) or (exc,)
|
||||
|
||||
# Create a new exception and set it as an attribute on the new
|
||||
# class.
|
||||
exception = type(name, parents, {"__module__": module})
|
||||
setattr(new_class, name, exception)
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
def get_auto_id_names(mcs, new_class):
|
||||
"""Find a name for the automatic ID field for the given new class.
|
||||
|
||||
Return a two-element tuple where the first item is the field name (i.e.
|
||||
the attribute name on the object) and the second element is the DB
|
||||
field name (i.e. the name of the key stored in MongoDB).
|
||||
|
||||
Defaults to ('id', '_id'), or generates a non-clashing name in the form
|
||||
of ('auto_id_X', '_auto_id_X') if the default name is already taken.
|
||||
"""
|
||||
id_name, id_db_name = ("id", "_id")
|
||||
existing_fields = {field_name for field_name in new_class._fields}
|
||||
existing_db_fields = {v.db_field for v in new_class._fields.values()}
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
return id_name, id_db_name
|
||||
|
||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||
for i in itertools.count():
|
||||
id_name = "{0}_{1}".format(id_basename, i)
|
||||
id_db_name = "{0}_{1}".format(id_db_basename, i)
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
class MetaDict(dict):
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
_merge_options = ('indexes',)
|
||||
|
||||
_merge_options = ("indexes",)
|
||||
|
||||
def merge(self, new_options):
|
||||
for k, v in new_options.iteritems():
|
||||
for k, v in iteritems(new_options):
|
||||
if k in self._merge_options:
|
||||
self[k] = self.get(k, []) + v
|
||||
else:
|
||||
@@ -449,4 +485,5 @@ class MetaDict(dict):
|
||||
|
||||
class BasesTuple(tuple):
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
|
||||
pass
|
||||
|
||||
22
mongoengine/base/utils.py
Normal file
22
mongoengine/base/utils.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler(object):
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
self._pattern = pattern
|
||||
self._flags = flags
|
||||
self._compiled_regex = None
|
||||
|
||||
@property
|
||||
def compiled_regex(self):
|
||||
if self._compiled_regex is None:
|
||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||
return self._compiled_regex
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.compiled_regex
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
||||
@@ -19,38 +19,44 @@ def _import_class(cls_name):
|
||||
if cls_name in _class_registry_cache:
|
||||
return _class_registry_cache.get(cls_name)
|
||||
|
||||
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||
'MapReduceDocument')
|
||||
doc_classes = (
|
||||
"Document",
|
||||
"DynamicEmbeddedDocument",
|
||||
"EmbeddedDocument",
|
||||
"MapReduceDocument",
|
||||
)
|
||||
|
||||
# Field Classes
|
||||
if not _field_list_cache:
|
||||
from mongoengine.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
from mongoengine.base.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
|
||||
field_classes = _field_list_cache
|
||||
|
||||
queryset_classes = ('OperationError',)
|
||||
deref_classes = ('DeReference',)
|
||||
deref_classes = ("DeReference",)
|
||||
|
||||
if cls_name == 'BaseDocument':
|
||||
if cls_name == "BaseDocument":
|
||||
from mongoengine.base import document as module
|
||||
import_classes = ['BaseDocument']
|
||||
|
||||
import_classes = ["BaseDocument"]
|
||||
elif cls_name in doc_classes:
|
||||
from mongoengine import document as module
|
||||
|
||||
import_classes = doc_classes
|
||||
elif cls_name in field_classes:
|
||||
from mongoengine import fields as module
|
||||
|
||||
import_classes = field_classes
|
||||
elif cls_name in queryset_classes:
|
||||
from mongoengine import queryset as module
|
||||
import_classes = queryset_classes
|
||||
elif cls_name in deref_classes:
|
||||
from mongoengine import dereference as module
|
||||
|
||||
import_classes = deref_classes
|
||||
else:
|
||||
raise ValueError('No import set for: ' % cls_name)
|
||||
raise ValueError("No import set for: %s" % cls_name)
|
||||
|
||||
for cls in import_classes:
|
||||
_class_registry_cache[cls] = getattr(module, cls)
|
||||
|
||||
@@ -1,74 +1,96 @@
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
from pymongo.database import _check_name
|
||||
import six
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
__all__ = [
|
||||
"DEFAULT_CONNECTION_NAME",
|
||||
"DEFAULT_DATABASE_NAME",
|
||||
"ConnectionFailure",
|
||||
"connect",
|
||||
"disconnect",
|
||||
"disconnect_all",
|
||||
"get_connection",
|
||||
"get_db",
|
||||
"register_connection",
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
else:
|
||||
from pymongo import MongoReplicaSetClient
|
||||
READ_PREFERENCE = False
|
||||
|
||||
|
||||
class MongoEngineConnectionError(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
pass
|
||||
|
||||
DEFAULT_CONNECTION_NAME = "default"
|
||||
DEFAULT_DATABASE_NAME = "test"
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_PORT = 27017
|
||||
|
||||
_connection_settings = {}
|
||||
_connections = {}
|
||||
_dbs = {}
|
||||
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the :program:`mongod` instance to connect to
|
||||
:param port: the port that the :program:`mongod` instance is running on
|
||||
:param read_preference: The read preference for the collection
|
||||
** Added pymongo 2.1
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param authentication_mechanism: database authentication mechanisms.
|
||||
class ConnectionFailure(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _check_db_name(name):
|
||||
"""Check if a database name is valid.
|
||||
This functionality is copied from pymongo Database class constructor.
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError("name must be an instance of %s" % six.string_types)
|
||||
elif name != "$external":
|
||||
_check_name(name)
|
||||
|
||||
|
||||
def _get_connection_settings(
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Get the connection settings as a dict
|
||||
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
: param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
:param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock://` as db host prefix)
|
||||
:param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
: param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock: // ` as db host prefix)
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
'name': name or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'authentication_source': authentication_source,
|
||||
'authentication_mechanism': authentication_mechanism
|
||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||
"host": host or DEFAULT_HOST,
|
||||
"port": port or DEFAULT_PORT,
|
||||
"read_preference": read_preference,
|
||||
"username": username,
|
||||
"password": password,
|
||||
"authentication_source": authentication_source,
|
||||
"authentication_mechanism": authentication_mechanism,
|
||||
}
|
||||
|
||||
conn_host = conn_settings['host']
|
||||
_check_db_name(conn_settings["name"])
|
||||
conn_host = conn_settings["host"]
|
||||
|
||||
# Host can be a list or a string, so if string, force to a list.
|
||||
if isinstance(conn_host, six.string_types):
|
||||
@@ -78,51 +100,152 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
for entity in conn_host:
|
||||
|
||||
# Handle Mongomock
|
||||
if entity.startswith('mongomock://'):
|
||||
conn_settings['is_mock'] = True
|
||||
if entity.startswith("mongomock://"):
|
||||
conn_settings["is_mock"] = True
|
||||
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
|
||||
new_entity = entity.replace("mongomock://", "mongodb://", 1)
|
||||
resolved_hosts.append(new_entity)
|
||||
|
||||
uri_dict = uri_parser.parse_uri(new_entity)
|
||||
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
# Handle URI style connections, only updating connection params which
|
||||
# were explicitly specified in the URI.
|
||||
elif '://' in entity:
|
||||
elif "://" in entity:
|
||||
uri_dict = uri_parser.parse_uri(entity)
|
||||
resolved_hosts.append(entity)
|
||||
|
||||
if uri_dict.get('database'):
|
||||
conn_settings['name'] = uri_dict.get('database')
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
for param in ('read_preference', 'username', 'password'):
|
||||
for param in ("read_preference", "username", "password"):
|
||||
if uri_dict.get(param):
|
||||
conn_settings[param] = uri_dict[param]
|
||||
|
||||
uri_options = uri_dict['options']
|
||||
if 'replicaset' in uri_options:
|
||||
conn_settings['replicaSet'] = uri_options['replicaset']
|
||||
if 'authsource' in uri_options:
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
uri_options = uri_dict["options"]
|
||||
if "replicaset" in uri_options:
|
||||
conn_settings["replicaSet"] = uri_options["replicaset"]
|
||||
if "authsource" in uri_options:
|
||||
conn_settings["authentication_source"] = uri_options["authsource"]
|
||||
if "authmechanism" in uri_options:
|
||||
conn_settings["authentication_mechanism"] = uri_options["authmechanism"]
|
||||
if "readpreference" in uri_options:
|
||||
read_preferences = (
|
||||
ReadPreference.NEAREST,
|
||||
ReadPreference.PRIMARY,
|
||||
ReadPreference.PRIMARY_PREFERRED,
|
||||
ReadPreference.SECONDARY,
|
||||
ReadPreference.SECONDARY_PREFERRED,
|
||||
)
|
||||
|
||||
# Starting with PyMongo v3.5, the "readpreference" option is
|
||||
# returned as a string (e.g. "secondaryPreferred") and not an
|
||||
# int (e.g. 3).
|
||||
# TODO simplify the code below once we drop support for
|
||||
# PyMongo v3.4.
|
||||
read_pf_mode = uri_options["readpreference"]
|
||||
if isinstance(read_pf_mode, six.string_types):
|
||||
read_pf_mode = read_pf_mode.lower()
|
||||
for preference in read_preferences:
|
||||
if (
|
||||
preference.name.lower() == read_pf_mode
|
||||
or preference.mode == read_pf_mode
|
||||
):
|
||||
conn_settings["read_preference"] = preference
|
||||
break
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
conn_settings["host"] = resolved_hosts
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
kwargs.pop('is_slave', None)
|
||||
kwargs.pop("slaves", None)
|
||||
kwargs.pop("is_slave", None)
|
||||
|
||||
conn_settings.update(kwargs)
|
||||
return conn_settings
|
||||
|
||||
|
||||
def register_connection(
|
||||
alias,
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
):
|
||||
"""Register the connection settings.
|
||||
|
||||
: param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
: param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
: param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock: // ` as db host prefix)
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = _get_connection_settings(
|
||||
db=db,
|
||||
name=name,
|
||||
host=host,
|
||||
port=port,
|
||||
read_preference=read_preference,
|
||||
username=username,
|
||||
password=password,
|
||||
authentication_source=authentication_source,
|
||||
authentication_mechanism=authentication_mechanism,
|
||||
**kwargs
|
||||
)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
"""Close the connection with a given alias."""
|
||||
from mongoengine.base.common import _get_documents_by_db
|
||||
from mongoengine import Document
|
||||
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).close()
|
||||
del _connections[alias]
|
||||
|
||||
if alias in _dbs:
|
||||
# Detach all cached collections in Documents
|
||||
for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME):
|
||||
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
||||
doc_cls._disconnect()
|
||||
|
||||
del _dbs[alias]
|
||||
|
||||
if alias in _connection_settings:
|
||||
del _connection_settings[alias]
|
||||
|
||||
|
||||
def disconnect_all():
|
||||
"""Close all registered database."""
|
||||
for alias in list(_connections.keys()):
|
||||
disconnect(alias)
|
||||
|
||||
|
||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
"""Return a connection with a given alias."""
|
||||
@@ -137,84 +260,93 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
return _connections[alias]
|
||||
|
||||
# Validate that the requested alias exists in the _connection_settings.
|
||||
# Raise MongoEngineConnectionError if it doesn't.
|
||||
# Raise ConnectionFailure if it doesn't.
|
||||
if alias not in _connection_settings:
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
msg = "You have not defined a default connection"
|
||||
else:
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
raise MongoEngineConnectionError(msg)
|
||||
raise ConnectionFailure(msg)
|
||||
|
||||
def _clean_settings(settings_dict):
|
||||
irrelevant_fields = set([
|
||||
'name', 'username', 'password', 'authentication_source',
|
||||
'authentication_mechanism'
|
||||
])
|
||||
return {
|
||||
k: v for k, v in settings_dict.items()
|
||||
if k not in irrelevant_fields
|
||||
irrelevant_fields_set = {
|
||||
"name",
|
||||
"username",
|
||||
"password",
|
||||
"authentication_source",
|
||||
"authentication_mechanism",
|
||||
}
|
||||
return {
|
||||
k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set
|
||||
}
|
||||
|
||||
raw_conn_settings = _connection_settings[alias].copy()
|
||||
|
||||
# Retrieve a copy of the connection settings associated with the requested
|
||||
# alias and remove the database name and authentication info (we don't
|
||||
# care about them at this point).
|
||||
conn_settings = _clean_settings(_connection_settings[alias].copy())
|
||||
conn_settings = _clean_settings(raw_conn_settings)
|
||||
|
||||
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||
is_mock = conn_settings.pop('is_mock', False)
|
||||
is_mock = conn_settings.pop("is_mock", False)
|
||||
if is_mock:
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise RuntimeError('You need mongomock installed to mock '
|
||||
'MongoEngine.')
|
||||
raise RuntimeError("You need mongomock installed to mock MongoEngine.")
|
||||
connection_class = mongomock.MongoClient
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
# For replica set connections with PyMongo 2.x, use
|
||||
# MongoReplicaSetClient.
|
||||
# TODO remove this once we stop supporting PyMongo 2.x.
|
||||
if 'replicaSet' in conn_settings and not IS_PYMONGO_3:
|
||||
connection_class = MongoReplicaSetClient
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Re-use existing connection if one is suitable.
|
||||
existing_connection = _find_existing_connection(raw_conn_settings)
|
||||
if existing_connection:
|
||||
connection = existing_connection
|
||||
else:
|
||||
connection = _create_connection(
|
||||
alias=alias, connection_class=connection_class, **conn_settings
|
||||
)
|
||||
_connections[alias] = connection
|
||||
return _connections[alias]
|
||||
|
||||
# hosts_or_uri has to be a string, so if 'host' was provided
|
||||
# as a list, join its parts and separate them by ','
|
||||
if isinstance(conn_settings['hosts_or_uri'], list):
|
||||
conn_settings['hosts_or_uri'] = ','.join(
|
||||
conn_settings['hosts_or_uri'])
|
||||
|
||||
# Discard port since it can't be used on MongoReplicaSetClient
|
||||
conn_settings.pop('port', None)
|
||||
def _create_connection(alias, connection_class, **connection_settings):
|
||||
"""
|
||||
Create the new connection for this alias. Raise
|
||||
ConnectionFailure if it can't be established.
|
||||
"""
|
||||
try:
|
||||
return connection_class(**connection_settings)
|
||||
except Exception as e:
|
||||
raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
|
||||
# Iterate over all of the connection settings and if a connection with
|
||||
# the same parameters is already established, use it instead of creating
|
||||
# a new one.
|
||||
existing_connection = None
|
||||
connection_settings_iterator = (
|
||||
|
||||
def _find_existing_connection(connection_settings):
|
||||
"""
|
||||
Check if an existing connection could be reused
|
||||
|
||||
Iterate over all of the connection settings and if an existing connection
|
||||
with the same parameters is suitable, return it
|
||||
|
||||
:param connection_settings: the settings of the new connection
|
||||
:return: An existing connection or None
|
||||
"""
|
||||
connection_settings_bis = (
|
||||
(db_alias, settings.copy())
|
||||
for db_alias, settings in _connection_settings.items()
|
||||
)
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings = _clean_settings(connection_settings)
|
||||
if conn_settings == connection_settings and _connections.get(db_alias):
|
||||
existing_connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
# If an existing connection was found, assign it to the new alias
|
||||
if existing_connection:
|
||||
_connections[alias] = existing_connection
|
||||
else:
|
||||
# Otherwise, create the new connection for this alias. Raise
|
||||
# MongoEngineConnectionError if it can't be established.
|
||||
try:
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception as e:
|
||||
raise MongoEngineConnectionError(
|
||||
'Cannot connect to database %s :\n%s' % (alias, e))
|
||||
def _clean_settings(settings_dict):
|
||||
# Only remove the name but it's important to
|
||||
# keep the username/password/authentication_source/authentication_mechanism
|
||||
# to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047)
|
||||
return {k: v for k, v in settings_dict.items() if k != "name"}
|
||||
|
||||
return _connections[alias]
|
||||
cleaned_conn_settings = _clean_settings(connection_settings)
|
||||
for db_alias, connection_settings in connection_settings_bis:
|
||||
db_conn_settings = _clean_settings(connection_settings)
|
||||
if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias):
|
||||
return _connections[db_alias]
|
||||
|
||||
|
||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
@@ -224,14 +356,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
db = conn[conn_settings['name']]
|
||||
auth_kwargs = {'source': conn_settings['authentication_source']}
|
||||
if conn_settings['authentication_mechanism'] is not None:
|
||||
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
|
||||
db = conn[conn_settings["name"]]
|
||||
auth_kwargs = {"source": conn_settings["authentication_source"]}
|
||||
if conn_settings["authentication_mechanism"] is not None:
|
||||
auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and (conn_settings['password'] or
|
||||
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
|
||||
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
|
||||
if conn_settings["username"] and (
|
||||
conn_settings["password"]
|
||||
or conn_settings["authentication_mechanism"] == "MONGODB-X509"
|
||||
):
|
||||
db.authenticate(
|
||||
conn_settings["username"], conn_settings["password"], **auth_kwargs
|
||||
)
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
@@ -244,14 +380,27 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
provide username and password arguments as well.
|
||||
|
||||
Multiple databases are supported by using aliases. Provide a separate
|
||||
`alias` to connect to a different instance of :program:`mongod`.
|
||||
`alias` to connect to a different instance of: program: `mongod`.
|
||||
|
||||
In order to replace a connection identified by a given alias, you'll
|
||||
need to call ``disconnect`` first
|
||||
|
||||
See the docstring for `register_connection` for more details about all
|
||||
supported kwargs.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
if alias not in _connections:
|
||||
if alias in _connections:
|
||||
prev_conn_setting = _connection_settings[alias]
|
||||
new_conn_settings = _get_connection_settings(db, **kwargs)
|
||||
|
||||
if new_conn_settings != prev_conn_setting:
|
||||
err_msg = (
|
||||
u"A different connection with alias `{}` was already "
|
||||
u"registered. Use disconnect() first"
|
||||
).format(alias)
|
||||
raise ConnectionFailure(err_msg)
|
||||
else:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
return get_connection(alias)
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pymongo.write_concern import WriteConcern
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter')
|
||||
__all__ = (
|
||||
"switch_db",
|
||||
"switch_collection",
|
||||
"no_dereference",
|
||||
"no_sub_classes",
|
||||
"query_counter",
|
||||
"set_write_concern",
|
||||
)
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
@@ -33,17 +44,17 @@ class switch_db(object):
|
||||
self.cls = cls
|
||||
self.collection = cls._get_collection()
|
||||
self.db_alias = db_alias
|
||||
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
|
||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the db_alias and clear the cached collection."""
|
||||
self.cls._meta['db_alias'] = self.db_alias
|
||||
self.cls._meta["db_alias"] = self.db_alias
|
||||
self.cls._collection = None
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the db_alias and collection."""
|
||||
self.cls._meta['db_alias'] = self.ori_db_alias
|
||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||
self.cls._collection = self.collection
|
||||
|
||||
|
||||
@@ -106,14 +117,15 @@ class no_dereference(object):
|
||||
"""
|
||||
self.cls = cls
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
ComplexBaseField = _import_class('ComplexBaseField')
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
ComplexBaseField = _import_class("ComplexBaseField")
|
||||
|
||||
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||
if isinstance(v, (ReferenceField,
|
||||
GenericReferenceField,
|
||||
ComplexBaseField))]
|
||||
self.deref_fields = [
|
||||
k
|
||||
for k, v in iteritems(self.cls._fields)
|
||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||
]
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
@@ -143,66 +155,82 @@ class no_sub_classes(object):
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
self.cls_initial_subclasses = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
self.cls_initial_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls._class_name,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
"""Query_counter context manager to get the number of queries."""
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginnig of the context and counting the new entries.
|
||||
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
|
||||
Be aware that:
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Construct the query_counter."""
|
||||
self.counter = 0
|
||||
"""Construct the query_counter
|
||||
"""
|
||||
self.db = get_db()
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
def __enter__(self):
|
||||
"""On every with block we need to drop the profile collection."""
|
||||
self._ignored_query = {
|
||||
"ns": {"$ne": "%s.system.indexes" % self.db.name},
|
||||
"op": {"$ne": "killcursors"}, # MONGODB < 3.2
|
||||
"command.killCursors": {"$exists": False}, # MONGODB >= 3.2
|
||||
}
|
||||
|
||||
def _turn_on_profiling(self):
|
||||
self.initial_profiling_level = self.db.profiling_level()
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
|
||||
def _resets_profiling(self):
|
||||
self.db.set_profiling_level(self.initial_profiling_level)
|
||||
|
||||
def __enter__(self):
|
||||
self._turn_on_profiling()
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the profiling level."""
|
||||
self.db.set_profiling_level(0)
|
||||
self._resets_profiling()
|
||||
|
||||
def __eq__(self, value):
|
||||
"""== Compare querycounter."""
|
||||
counter = self._get_count()
|
||||
return value == counter
|
||||
|
||||
def __ne__(self, value):
|
||||
"""!= Compare querycounter."""
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
"""< Compare querycounter."""
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
"""<= Compare querycounter."""
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
"""> Compare querycounter."""
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""">= Compare querycounter."""
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
"""int representation."""
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
@@ -210,8 +238,22 @@ class query_counter(object):
|
||||
return u"%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries."""
|
||||
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||
self.counter += 1
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||
issued so we need to balance that
|
||||
"""
|
||||
count = (
|
||||
count_documents(self.db.system.profile, self._ignored_query)
|
||||
- self._ctx_query_counter
|
||||
)
|
||||
self._ctx_query_counter += (
|
||||
1
|
||||
) # Account for the query we just issued to gather the information
|
||||
return count
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_write_concern(collection, write_concerns):
|
||||
combined_concerns = dict(collection.write_concern.document.items())
|
||||
combined_concerns.update(write_concerns)
|
||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
from collections import OrderedDict
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.base import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass,
|
||||
get_document,
|
||||
)
|
||||
from mongoengine.base.datastructures import LazyReference
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
@@ -35,43 +41,59 @@ class DeReference(object):
|
||||
self.max_depth = max_depth
|
||||
doc_type = None
|
||||
|
||||
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
||||
TopLevelDocumentMetaclass)):
|
||||
if instance and isinstance(
|
||||
instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass)
|
||||
):
|
||||
doc_type = instance._fields.get(name)
|
||||
while hasattr(doc_type, 'field'):
|
||||
while hasattr(doc_type, "field"):
|
||||
doc_type = doc_type.field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
field = doc_type
|
||||
doc_type = doc_type.document_type
|
||||
is_list = not hasattr(items, 'items')
|
||||
is_list = not hasattr(items, "items")
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
return items
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
[i.__class__ == doc_type for i in items.values()]
|
||||
):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
if not hasattr(items, 'items'):
|
||||
# We must turn the ObjectIds into DBRefs
|
||||
|
||||
def _get_items(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
if isinstance(v, list):
|
||||
new_items.append(_get_items(v))
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
new_items.append(field.to_python(v))
|
||||
else:
|
||||
new_items.append(v)
|
||||
return new_items
|
||||
# Recursively dig into the sub items of a list/dict
|
||||
# to turn the ObjectIds into DBRefs
|
||||
def _get_items_from_list(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
value = v
|
||||
if isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
value = field.to_python(v)
|
||||
new_items.append(value)
|
||||
return new_items
|
||||
|
||||
items = _get_items(items)
|
||||
def _get_items_from_dict(items):
|
||||
new_items = {}
|
||||
for k, v in iteritems(items):
|
||||
value = v
|
||||
if isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
value = field.to_python(v)
|
||||
new_items[k] = value
|
||||
return new_items
|
||||
|
||||
if not hasattr(items, "items"):
|
||||
items = _get_items_from_list(items)
|
||||
else:
|
||||
items = {
|
||||
k: (v if isinstance(v, (DBRef, Document))
|
||||
else field.to_python(v))
|
||||
for k, v in items.iteritems()
|
||||
}
|
||||
items = _get_items_from_dict(items)
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
@@ -98,26 +120,40 @@ class DeReference(object):
|
||||
depth += 1
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
for field_name, field in iteritems(item._fields):
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, DBRef):
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
reference_map.setdefault(get_document(v["_cls"]), set()).add(
|
||||
v["_ref"].id
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
field_cls = getattr(
|
||||
getattr(field, "field", None), "document_type", None
|
||||
)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
for key, refs in iteritems(references):
|
||||
if isinstance(
|
||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||
):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, SON)) and "_ref" in item:
|
||||
reference_map.setdefault(get_document(item["_cls"]), set()).add(
|
||||
item["_ref"].id
|
||||
)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
for key, refs in iteritems(references):
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
@@ -126,35 +162,44 @@ class DeReference(object):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
for collection, dbrefs in iteritems(self.reference_map):
|
||||
|
||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
ref_document_cls_exists = getattr(collection, "objects", None) is not None
|
||||
|
||||
if ref_document_cls_exists:
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||
]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
for key, doc in iteritems(references):
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (collection, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (collection, dbref) not in object_map
|
||||
]
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = doc_type._get_db()[collection].find(
|
||||
{"_id": {"$in": refs}}
|
||||
)
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
else:
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = get_db()[collection].find({"_id": {"$in": refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
if "_cls" in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
"".join(x.capitalize() for x in collection.split("_"))
|
||||
)._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
@@ -182,19 +227,20 @@ class DeReference(object):
|
||||
return BaseList(items, instance, name)
|
||||
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
if "_ref" in items:
|
||||
return self.object_map.get(
|
||||
(items['_ref'].collection, items['_ref'].id), items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
(items["_ref"].collection, items["_ref"].id), items
|
||||
)
|
||||
elif "_cls" in items:
|
||||
doc = get_document(items["_cls"])._from_son(items)
|
||||
_cls = doc._data.pop("_cls", None)
|
||||
del items["_cls"]
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
doc._data["_cls"] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
if not hasattr(items, "items"):
|
||||
is_list = True
|
||||
list_type = BaseList
|
||||
if isinstance(items, EmbeddedDocumentList):
|
||||
@@ -202,13 +248,9 @@ class DeReference(object):
|
||||
as_tuple = isinstance(items, tuple)
|
||||
iterator = enumerate(items)
|
||||
data = []
|
||||
elif isinstance(items, OrderedDict):
|
||||
is_list = False
|
||||
iterator = items.iteritems()
|
||||
data = OrderedDict()
|
||||
else:
|
||||
is_list = False
|
||||
iterator = items.iteritems()
|
||||
iterator = iteritems(items)
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
@@ -225,17 +267,25 @@ class DeReference(object):
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
(v.collection, v.id), v
|
||||
)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
(v["_ref"].collection, v["_ref"].id), v
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
item_name = six.text_type("{0}.{1}.{2}").format(
|
||||
name, k, field_name
|
||||
)
|
||||
data[k]._data[field_name] = self._attach_objects(
|
||||
v, depth, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif hasattr(v, 'id'):
|
||||
item_name = "%s.%s" % (name, k) if name else name
|
||||
data[k] = self._attach_objects(
|
||||
v, depth - 1, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, DBRef) and hasattr(v, "id"):
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,23 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||
'ValidationError', 'SaveConditionError')
|
||||
__all__ = (
|
||||
"NotRegistered",
|
||||
"InvalidDocumentError",
|
||||
"LookUpError",
|
||||
"DoesNotExist",
|
||||
"MultipleObjectsReturned",
|
||||
"InvalidQueryError",
|
||||
"OperationError",
|
||||
"NotUniqueError",
|
||||
"BulkWriteError",
|
||||
"FieldDoesNotExist",
|
||||
"ValidationError",
|
||||
"SaveConditionError",
|
||||
"DeprecatedError",
|
||||
)
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
@@ -40,6 +52,10 @@ class NotUniqueError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class BulkWriteError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class SaveConditionError(OperationError):
|
||||
pass
|
||||
|
||||
@@ -70,24 +86,25 @@ class ValidationError(AssertionError):
|
||||
field_name = None
|
||||
_message = None
|
||||
|
||||
def __init__(self, message='', **kwargs):
|
||||
self.errors = kwargs.get('errors', {})
|
||||
self.field_name = kwargs.get('field_name')
|
||||
def __init__(self, message="", **kwargs):
|
||||
super(ValidationError, self).__init__(message)
|
||||
self.errors = kwargs.get("errors", {})
|
||||
self.field_name = kwargs.get("field_name")
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return six.text_type(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||
return "%s(%s,)" % (self.__class__.__name__, self.message)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
if name == 'message':
|
||||
if name == "message":
|
||||
if self.field_name:
|
||||
message = '%s' % message
|
||||
message = "%s" % message
|
||||
if self.errors:
|
||||
message = '%s(%s)' % (message, self._format_errors())
|
||||
message = "%s(%s)" % (message, self._format_errors())
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@@ -108,11 +125,8 @@ class ValidationError(AssertionError):
|
||||
|
||||
def build_dict(source):
|
||||
errors_dict = {}
|
||||
if not source:
|
||||
return errors_dict
|
||||
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in source.iteritems():
|
||||
for field_name, error in iteritems(source):
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
@@ -129,17 +143,22 @@ class ValidationError(AssertionError):
|
||||
def _format_errors(self):
|
||||
"""Returns a string listing all errors within a document"""
|
||||
|
||||
def generate_key(value, prefix=''):
|
||||
def generate_key(value, prefix=""):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
value = " ".join([generate_key(k) for k in value])
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
value = " ".join([generate_key(v, k) for k, v in iteritems(value)])
|
||||
|
||||
results = '%s.%s' % (prefix, value) if prefix else value
|
||||
results = "%s.%s" % (prefix, value) if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().iteritems():
|
||||
for k, v in iteritems(self.to_dict()):
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])
|
||||
return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)])
|
||||
|
||||
|
||||
class DeprecatedError(Exception):
|
||||
"""Raise when a user uses a feature that has been Deprecated"""
|
||||
|
||||
pass
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
19
mongoengine/mongodb_support.py
Normal file
19
mongoengine/mongodb_support.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with MongoDB version support
|
||||
"""
|
||||
from mongoengine.connection import get_connection
|
||||
|
||||
|
||||
# Constant that can be used to compare the version retrieved with
|
||||
# get_mongodb_version()
|
||||
MONGODB_34 = (3, 4)
|
||||
MONGODB_36 = (3, 6)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2)
|
||||
return tuple(version_list)
|
||||
32
mongoengine/pymongo_support.py
Normal file
32
mongoengine/pymongo_support.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
|
||||
_PYMONGO_37 = (3, 7)
|
||||
|
||||
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||
|
||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||
|
||||
|
||||
def count_documents(collection, filter):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||
if IS_PYMONGO_GTE_37:
|
||||
return collection.count_documents(filter)
|
||||
else:
|
||||
count = collection.find(filter).count()
|
||||
return count
|
||||
|
||||
|
||||
def list_collection_names(db, include_system_collections=False):
|
||||
"""Pymongo>3.7 deprecates collection_names in favour of list_collection_names"""
|
||||
if IS_PYMONGO_GTE_37:
|
||||
collections = db.list_collection_names()
|
||||
else:
|
||||
collections = db.collection_names()
|
||||
|
||||
if not include_system_collections:
|
||||
collections = [c for c in collections if not c.startswith("system.")]
|
||||
|
||||
return collections
|
||||
@@ -1,17 +1,8 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
|
||||
PyMongo v2.7 - v3.x support.
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x support
|
||||
"""
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
@@ -23,3 +14,10 @@ if not six.PY3:
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
|
||||
|
||||
if six.PY3:
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
# raises DeprecationWarnings in Python >=3.7
|
||||
from collections import Hashable
|
||||
|
||||
@@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import *
|
||||
|
||||
# Expose just the public subset of all imported objects and constants.
|
||||
__all__ = (
|
||||
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
|
||||
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
|
||||
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"Q",
|
||||
"queryset_manager",
|
||||
"QuerySetManager",
|
||||
"QueryFieldList",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
# Errors that might be related to a queryset, mostly here for backward
|
||||
# compatibility
|
||||
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
|
||||
'NotUniqueError', 'OperationError',
|
||||
"DoesNotExist",
|
||||
"InvalidQueryError",
|
||||
"MultipleObjectsReturned",
|
||||
"NotUniqueError",
|
||||
"OperationError",
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,15 @@
|
||||
__all__ = ('QueryFieldList',)
|
||||
__all__ = ("QueryFieldList",)
|
||||
|
||||
|
||||
class QueryFieldList(object):
|
||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||
|
||||
ONLY = 1
|
||||
EXCLUDE = 0
|
||||
|
||||
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
||||
def __init__(
|
||||
self, fields=None, value=ONLY, always_include=None, _only_called=False
|
||||
):
|
||||
"""The QueryFieldList builder
|
||||
|
||||
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||
@@ -49,7 +52,7 @@ class QueryFieldList(object):
|
||||
self.fields = f.fields - self.fields
|
||||
self._clean_slice()
|
||||
|
||||
if '_id' in f.fields:
|
||||
if "_id" in f.fields:
|
||||
self._id = f.value
|
||||
|
||||
if self.always_include:
|
||||
@@ -59,19 +62,21 @@ class QueryFieldList(object):
|
||||
else:
|
||||
self.fields -= self.always_include
|
||||
|
||||
if getattr(f, '_only_called', False):
|
||||
if getattr(f, "_only_called", False):
|
||||
self._only_called = True
|
||||
return self
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
return bool(self.fields)
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
def as_dict(self):
|
||||
field_list = {field: self.value for field in self.fields}
|
||||
if self.slice:
|
||||
field_list.update(self.slice)
|
||||
if self._id is not None:
|
||||
field_list['_id'] = self._id
|
||||
field_list["_id"] = self._id
|
||||
return field_list
|
||||
|
||||
def reset(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from functools import partial
|
||||
from mongoengine.queryset.queryset import QuerySet
|
||||
|
||||
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||
__all__ = ("queryset_manager", "QuerySetManager")
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
@@ -33,10 +33,10 @@ class QuerySetManager(object):
|
||||
return self
|
||||
|
||||
# owner is the document that contains the QuerySetManager
|
||||
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||
queryset_class = owner._meta.get("queryset_class", self.default)
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
arg_count = self.get_queryset.func_code.co_argcount
|
||||
arg_count = self.get_queryset.__code__.co_argcount
|
||||
if arg_count == 1:
|
||||
queryset = self.get_queryset(queryset)
|
||||
elif arg_count == 2:
|
||||
|
||||
@@ -1,9 +1,24 @@
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||
NULLIFY, PULL)
|
||||
import six
|
||||
|
||||
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
||||
'DENY', 'PULL')
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (
|
||||
BaseQuerySet,
|
||||
CASCADE,
|
||||
DENY,
|
||||
DO_NOTHING,
|
||||
NULLIFY,
|
||||
PULL,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
)
|
||||
|
||||
# The maximum number of items to display in a QuerySet.__repr__
|
||||
REPR_OUTPUT_SIZE = 20
|
||||
@@ -55,12 +70,12 @@ class QuerySet(BaseQuerySet):
|
||||
def __repr__(self):
|
||||
"""Provide a string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
self._populate_cache()
|
||||
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
||||
data = self._result_cache[: REPR_OUTPUT_SIZE + 1]
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
return repr(data)
|
||||
|
||||
def _iter_results(self):
|
||||
@@ -87,10 +102,10 @@ class QuerySet(BaseQuerySet):
|
||||
yield self._result_cache[pos]
|
||||
pos += 1
|
||||
|
||||
# Raise StopIteration if we already established there were no more
|
||||
# return if we already established there were no more
|
||||
# docs in the db cursor.
|
||||
if not self._has_more:
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
# Otherwise, populate more of the cache and repeat.
|
||||
if len(self._result_cache) <= pos:
|
||||
@@ -112,8 +127,8 @@ class QuerySet(BaseQuerySet):
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in xrange(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(six.next(self))
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
@@ -141,10 +156,9 @@ class QuerySet(BaseQuerySet):
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError('QuerySet already cached')
|
||||
raise OperationError("QuerySet already cached")
|
||||
|
||||
return self._clone_into(QuerySetNoCache(self._document,
|
||||
self._collection))
|
||||
return self._clone_into(QuerySetNoCache(self._document, self._collection))
|
||||
|
||||
|
||||
class QuerySetNoCache(BaseQuerySet):
|
||||
@@ -163,17 +177,17 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
data = []
|
||||
for _ in xrange(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
data.append(six.next(self))
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
|
||||
self.rewind()
|
||||
return repr(data)
|
||||
@@ -184,10 +198,3 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
queryset = self.clone()
|
||||
queryset.rewind()
|
||||
return queryset
|
||||
|
||||
|
||||
class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
return items
|
||||
|
||||
@@ -4,28 +4,60 @@ from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
__all__ = ("query", "update")
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
COMPARISON_OPERATORS = (
|
||||
"ne",
|
||||
"gt",
|
||||
"gte",
|
||||
"lt",
|
||||
"lte",
|
||||
"in",
|
||||
"nin",
|
||||
"mod",
|
||||
"all",
|
||||
"size",
|
||||
"exists",
|
||||
"not",
|
||||
"elemMatch",
|
||||
"type",
|
||||
)
|
||||
GEO_OPERATORS = (
|
||||
"within_distance",
|
||||
"within_spherical_distance",
|
||||
"within_box",
|
||||
"within_polygon",
|
||||
"near",
|
||||
"near_sphere",
|
||||
"max_distance",
|
||||
"min_distance",
|
||||
"geo_within",
|
||||
"geo_within_box",
|
||||
"geo_within_polygon",
|
||||
"geo_within_center",
|
||||
"geo_within_sphere",
|
||||
"geo_intersects",
|
||||
)
|
||||
STRING_OPERATORS = (
|
||||
"contains",
|
||||
"icontains",
|
||||
"startswith",
|
||||
"istartswith",
|
||||
"endswith",
|
||||
"iendswith",
|
||||
"exact",
|
||||
"iexact",
|
||||
)
|
||||
CUSTOM_OPERATORS = ("match",)
|
||||
MATCH_OPERATORS = (
|
||||
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
||||
)
|
||||
|
||||
|
||||
# TODO make this less complex
|
||||
@@ -34,11 +66,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
mongo_query = {}
|
||||
merge_query = defaultdict(list)
|
||||
for key, value in sorted(kwargs.items()):
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_query.update(value)
|
||||
continue
|
||||
|
||||
parts = key.rsplit('__')
|
||||
parts = key.rsplit("__")
|
||||
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||
parts = [part for part in parts if not part.isdigit()]
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
@@ -47,11 +79,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
op = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
if len(parts) > 1 and parts[-1] == 'not':
|
||||
if len(parts) > 1 and parts[-1] == "not":
|
||||
parts.pop()
|
||||
negate = True
|
||||
|
||||
@@ -63,8 +95,8 @@ def query(_doc_cls=None, **kwargs):
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
@@ -74,7 +106,7 @@ def query(_doc_cls=None, **kwargs):
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||
parts.append('%s._id' % field.db_field)
|
||||
parts.append("%s._id" % field.db_field)
|
||||
else:
|
||||
parts.append(field.db_field)
|
||||
|
||||
@@ -84,38 +116,17 @@ def query(_doc_cls=None, **kwargs):
|
||||
# Convert value to proper value
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
||||
singular_ops += STRING_OPERATORS
|
||||
if op in singular_ops:
|
||||
if isinstance(field, six.string_types):
|
||||
if (op in STRING_OPERATORS and
|
||||
isinstance(value, six.string_types)):
|
||||
StringField = _import_class('StringField')
|
||||
value = StringField.prepare_query_value(op, value)
|
||||
else:
|
||||
value = field
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if isinstance(field, CachedReferenceField) and value:
|
||||
value = value['_id']
|
||||
if isinstance(field, CachedReferenceField) and value:
|
||||
value = value["_id"]
|
||||
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
# Raise an error if the in/nin/all/near param is not iterable. We need a
|
||||
# special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can\'t use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object]).")
|
||||
elif not hasattr(value, '__iter__'):
|
||||
raise TypeError("The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list).")
|
||||
else:
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
||||
# Raise an error if the in/nin/all/near param is not iterable.
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
|
||||
# If we're querying a GenericReferenceField, we need to alter the
|
||||
# key depending on the value:
|
||||
@@ -123,73 +134,77 @@ def query(_doc_cls=None, **kwargs):
|
||||
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||
if isinstance(field, GenericReferenceField):
|
||||
if isinstance(value, DBRef):
|
||||
parts[-1] += '._ref'
|
||||
parts[-1] += "._ref"
|
||||
elif isinstance(value, ObjectId):
|
||||
parts[-1] += '._ref.$id'
|
||||
parts[-1] += "._ref.$id"
|
||||
|
||||
# if op and op not in COMPARISON_OPERATORS:
|
||||
if op:
|
||||
if op in GEO_OPERATORS:
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in ('match', 'elemMatch'):
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
elif op in ("match", "elemMatch"):
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
if (
|
||||
isinstance(value, dict) and
|
||||
isinstance(field, ListField) and
|
||||
isinstance(field.field, EmbeddedDocumentField)
|
||||
isinstance(value, dict)
|
||||
and isinstance(field, ListField)
|
||||
and isinstance(field.field, EmbeddedDocumentField)
|
||||
):
|
||||
value = query(field.field.document_type, **value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {'$elemMatch': value}
|
||||
value = {"$elemMatch": value}
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
NotImplementedError('Custom method "%s" has not '
|
||||
'been implemented' % op)
|
||||
NotImplementedError(
|
||||
'Custom method "%s" has not ' "been implemented" % op
|
||||
)
|
||||
elif op not in STRING_OPERATORS:
|
||||
value = {'$' + op: value}
|
||||
value = {"$" + op: value}
|
||||
|
||||
if negate:
|
||||
value = {'$not': value}
|
||||
value = {"$not": value}
|
||||
|
||||
for i, part in indices:
|
||||
parts.insert(i, part)
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
if isinstance(mongo_query[key], dict):
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
||||
"$near" in value_dict or "$nearSphere" in value_dict
|
||||
):
|
||||
value_son = SON()
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
for k, v in iteritems(value_dict):
|
||||
if k == "$maxDistance" or k == "$minDistance":
|
||||
continue
|
||||
value_son[k] = v
|
||||
# Required for MongoDB >= 2.6, may fail when combining
|
||||
# PyMongo 3+ and MongoDB < 2.6
|
||||
near_embedded = False
|
||||
for near_op in ('$near', '$nearSphere'):
|
||||
if isinstance(value_dict.get(near_op), dict) and (
|
||||
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
|
||||
for near_op in ("$near", "$nearSphere"):
|
||||
if isinstance(value_dict.get(near_op), dict):
|
||||
value_son[near_op] = SON(value_son[near_op])
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son[near_op]["$maxDistance"] = value_dict[
|
||||
"$maxDistance"
|
||||
]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son[near_op]["$minDistance"] = value_dict[
|
||||
"$minDistance"
|
||||
]
|
||||
near_embedded = True
|
||||
|
||||
if not near_embedded:
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son['$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son["$minDistance"] = value_dict["$minDistance"]
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
# Store for manually merging later
|
||||
@@ -201,10 +216,10 @@ def query(_doc_cls=None, **kwargs):
|
||||
del mongo_query[k]
|
||||
if isinstance(v, list):
|
||||
value = [{k: val} for val in v]
|
||||
if '$and' in mongo_query.keys():
|
||||
mongo_query['$and'].extend(value)
|
||||
if "$and" in mongo_query.keys():
|
||||
mongo_query["$and"].extend(value)
|
||||
else:
|
||||
mongo_query['$and'] = value
|
||||
mongo_query["$and"] = value
|
||||
|
||||
return mongo_query
|
||||
|
||||
@@ -214,37 +229,44 @@ def update(_doc_cls=None, **update):
|
||||
format.
|
||||
"""
|
||||
mongo_update = {}
|
||||
|
||||
for key, value in update.items():
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
parts = key.split('__')
|
||||
|
||||
parts = key.split("__")
|
||||
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
parts.insert(0, "set")
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in UPDATE_OPERATORS:
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
if op in ('push_all', 'pull_all'):
|
||||
op = op.replace('_all', 'All')
|
||||
elif op == 'dec':
|
||||
operator_map = {
|
||||
"push_all": "pushAll",
|
||||
"pull_all": "pullAll",
|
||||
"dec": "inc",
|
||||
"add_to_set": "addToSet",
|
||||
"set_on_insert": "setOnInsert",
|
||||
}
|
||||
if op == "dec":
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
op = 'inc'
|
||||
value = -value
|
||||
elif op == 'add_to_set':
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = 'setOnInsert'
|
||||
# If the operator doesn't found from operator map, the op value
|
||||
# will stay unchanged
|
||||
op = operator_map.get(op, op)
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
match = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
if _doc_cls:
|
||||
@@ -261,8 +283,8 @@ def update(_doc_cls=None, **update):
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
field = '$'
|
||||
if field == "S":
|
||||
field = "$"
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
else:
|
||||
@@ -270,7 +292,7 @@ def update(_doc_cls=None, **update):
|
||||
if append_field:
|
||||
appended_sub_field = False
|
||||
cleaned_fields.append(field)
|
||||
if hasattr(field, 'field'):
|
||||
if hasattr(field, "field"):
|
||||
cleaned_fields.append(field.field)
|
||||
appended_sub_field = True
|
||||
|
||||
@@ -280,63 +302,91 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
GeoJsonBaseField = _import_class('GeoJsonBaseField')
|
||||
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
||||
if isinstance(field, GeoJsonBaseField):
|
||||
value = field.to_mongo(value)
|
||||
|
||||
if op in (None, 'set', 'push', 'pull'):
|
||||
if op == "pull":
|
||||
if field.required or value is not None:
|
||||
if match in ("in", "nin") and not isinstance(value, dict):
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == "push" and isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in (None, "set", "push"):
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
elif op in ("pushAll", "pullAll"):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in ('addToSet', 'setOnInsert'):
|
||||
elif op in ("addToSet", "setOnInsert"):
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'unset':
|
||||
elif op == "unset":
|
||||
value = 1
|
||||
elif op == "inc":
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if match:
|
||||
match = '$' + match
|
||||
match = "$" + match
|
||||
value = {match: value}
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if not op:
|
||||
raise InvalidQueryError('Updates must supply an operation '
|
||||
'eg: set__FIELD=value')
|
||||
|
||||
if 'pull' in op and '.' in key:
|
||||
if "pull" in op and "." in key:
|
||||
# Dot operators don't work on pull operations
|
||||
# unless they point to a list field
|
||||
# Otherwise it uses nested dict syntax
|
||||
if op == 'pullAll':
|
||||
raise InvalidQueryError('pullAll operations only support '
|
||||
'a single field depth')
|
||||
if op == "pullAll":
|
||||
raise InvalidQueryError(
|
||||
"pullAll operations only support a single field depth"
|
||||
)
|
||||
|
||||
# Look for the last list field and use dot notation until there
|
||||
field_classes = [c.__class__ for c in cleaned_fields]
|
||||
field_classes.reverse()
|
||||
ListField = _import_class('ListField')
|
||||
if ListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
||||
# Then process as normal
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(ListField)
|
||||
key = '.'.join(parts[:last_listField])
|
||||
if ListField in field_classes:
|
||||
_check_field = ListField
|
||||
else:
|
||||
_check_field = EmbeddedDocumentListField
|
||||
|
||||
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
||||
key = ".".join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
|
||||
parts.reverse()
|
||||
for key in parts:
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {'$each': value}}
|
||||
elif op == "addToSet" and isinstance(value, list):
|
||||
value = {key: {"$each": value}}
|
||||
elif op in ("push", "pushAll"):
|
||||
if parts[-1].isdigit():
|
||||
key = ".".join(parts[0:-1])
|
||||
position = int(parts[-1])
|
||||
# $position expects an iterable. If pushing a single value,
|
||||
# wrap it in a list.
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {"$each": value, "$position": position}}
|
||||
else:
|
||||
if op == "pushAll":
|
||||
op = "push" # convert to non-deprecated keyword
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {"$each": value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
|
||||
key = "$" + op
|
||||
if key not in mongo_update:
|
||||
mongo_update[key] = value
|
||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||
@@ -347,45 +397,45 @@ def update(_doc_cls=None, **update):
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query."""
|
||||
if op == 'max_distance':
|
||||
value = {'$maxDistance': value}
|
||||
elif op == 'min_distance':
|
||||
value = {'$minDistance': value}
|
||||
if op == "max_distance":
|
||||
value = {"$maxDistance": value}
|
||||
elif op == "min_distance":
|
||||
value = {"$minDistance": value}
|
||||
elif field._geo_index == pymongo.GEO2D:
|
||||
if op == 'within_distance':
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == 'within_spherical_distance':
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == 'within_polygon':
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == 'near':
|
||||
value = {'$near': value}
|
||||
elif op == 'near_sphere':
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
else:
|
||||
raise NotImplementedError('Geo method "%s" has not been '
|
||||
'implemented for a GeoPointField' % op)
|
||||
else:
|
||||
if op == 'geo_within':
|
||||
value = {'$geoWithin': _infer_geometry(value)}
|
||||
elif op == 'geo_within_box':
|
||||
value = {'$geoWithin': {'$box': value}}
|
||||
elif op == 'geo_within_polygon':
|
||||
value = {'$geoWithin': {'$polygon': value}}
|
||||
elif op == 'geo_within_center':
|
||||
value = {'$geoWithin': {'$center': value}}
|
||||
elif op == 'geo_within_sphere':
|
||||
value = {'$geoWithin': {'$centerSphere': value}}
|
||||
elif op == 'geo_intersects':
|
||||
value = {'$geoIntersects': _infer_geometry(value)}
|
||||
elif op == 'near':
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
if op == "within_distance":
|
||||
value = {"$within": {"$center": value}}
|
||||
elif op == "within_spherical_distance":
|
||||
value = {"$within": {"$centerSphere": value}}
|
||||
elif op == "within_polygon":
|
||||
value = {"$within": {"$polygon": value}}
|
||||
elif op == "near":
|
||||
value = {"$near": value}
|
||||
elif op == "near_sphere":
|
||||
value = {"$nearSphere": value}
|
||||
elif op == "within_box":
|
||||
value = {"$within": {"$box": value}}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s '
|
||||
% (op, field._name)
|
||||
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
||||
)
|
||||
else:
|
||||
if op == "geo_within":
|
||||
value = {"$geoWithin": _infer_geometry(value)}
|
||||
elif op == "geo_within_box":
|
||||
value = {"$geoWithin": {"$box": value}}
|
||||
elif op == "geo_within_polygon":
|
||||
value = {"$geoWithin": {"$polygon": value}}
|
||||
elif op == "geo_within_center":
|
||||
value = {"$geoWithin": {"$center": value}}
|
||||
elif op == "geo_within_sphere":
|
||||
value = {"$geoWithin": {"$centerSphere": value}}
|
||||
elif op == "geo_intersects":
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
value = {"$near": _infer_geometry(value)}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s ' % (op, field._name)
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -395,33 +445,58 @@ def _infer_geometry(value):
|
||||
given value.
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
if '$geometry' in value:
|
||||
if "$geometry" in value:
|
||||
return value
|
||||
elif 'coordinates' in value and 'type' in value:
|
||||
return {'$geometry': value}
|
||||
raise InvalidQueryError('Invalid $geometry dictionary should have '
|
||||
'type and coordinates keys')
|
||||
elif "coordinates" in value and "type" in value:
|
||||
return {"$geometry": value}
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry dictionary should have type and coordinates keys"
|
||||
)
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
# TODO: should both TypeError and IndexError be alike interpreted?
|
||||
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0][0]
|
||||
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0]
|
||||
return {'$geometry': {'type': 'Point', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||
'dictionary or (nested) lists of coordinate(s)')
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry data. Can be either a "
|
||||
"dictionary or (nested) lists of coordinate(s)"
|
||||
)
|
||||
|
||||
|
||||
def _prepare_query_for_iterable(field, op, value):
|
||||
# We need a special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError(
|
||||
"When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can't use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object])."
|
||||
)
|
||||
|
||||
if not hasattr(value, "__iter__"):
|
||||
raise TypeError(
|
||||
"The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list)."
|
||||
)
|
||||
|
||||
return [field.prepare_query_value(op, v) for v in value]
|
||||
|
||||
@@ -3,7 +3,7 @@ import copy
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
__all__ = ('Q',)
|
||||
__all__ = ("Q", "QNode")
|
||||
|
||||
|
||||
class QNodeVisitor(object):
|
||||
@@ -69,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
self.document = document
|
||||
|
||||
def visit_combination(self, combination):
|
||||
operator = '$and'
|
||||
operator = "$and"
|
||||
if combination.operation == combination.OR:
|
||||
operator = '$or'
|
||||
operator = "$or"
|
||||
return {operator: combination.children}
|
||||
|
||||
def visit_query(self, query):
|
||||
@@ -96,7 +96,7 @@ class QNode(object):
|
||||
"""Combine this node with another node into a QCombination
|
||||
object.
|
||||
"""
|
||||
if getattr(other, 'empty', True):
|
||||
if getattr(other, "empty", True):
|
||||
return self
|
||||
|
||||
if self.empty:
|
||||
@@ -131,6 +131,10 @@ class QCombination(QNode):
|
||||
else:
|
||||
self.children.append(node)
|
||||
|
||||
def __repr__(self):
|
||||
op = " & " if self.operation is self.AND else " | "
|
||||
return "(%s)" % op.join([repr(node) for node in self.children])
|
||||
|
||||
def accept(self, visitor):
|
||||
for i in range(len(self.children)):
|
||||
if isinstance(self.children[i], QNode):
|
||||
@@ -151,6 +155,9 @@ class Q(QNode):
|
||||
def __init__(self, **query):
|
||||
self.query = query
|
||||
|
||||
def __repr__(self):
|
||||
return "Q(**%s)" % repr(self.query)
|
||||
|
||||
def accept(self, visitor):
|
||||
return visitor.visit_query(self)
|
||||
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
'post_save', 'pre_delete', 'post_delete')
|
||||
__all__ = (
|
||||
"pre_init",
|
||||
"post_init",
|
||||
"pre_save",
|
||||
"pre_save_post_validation",
|
||||
"post_save",
|
||||
"pre_delete",
|
||||
"post_delete",
|
||||
)
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
@@ -7,6 +14,7 @@ try:
|
||||
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
|
||||
class Namespace(object):
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
@@ -23,13 +31,16 @@ except ImportError:
|
||||
self.__doc__ = doc
|
||||
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
raise RuntimeError(
|
||||
"signalling support is unavailable "
|
||||
"because the blinker library is "
|
||||
"not installed."
|
||||
)
|
||||
|
||||
send = lambda *a, **kw: None # noqa
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
connect = (
|
||||
disconnect
|
||||
) = has_receivers_for = receivers_for = temporarily_connected_to = _fail
|
||||
del _fail
|
||||
|
||||
|
||||
@@ -37,12 +48,12 @@ except ImportError:
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
||||
pre_init = _signals.signal('pre_init')
|
||||
post_init = _signals.signal('post_init')
|
||||
pre_save = _signals.signal('pre_save')
|
||||
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||
pre_init = _signals.signal("pre_init")
|
||||
post_init = _signals.signal("post_init")
|
||||
pre_save = _signals.signal("pre_save")
|
||||
pre_save_post_validation = _signals.signal("pre_save_post_validation")
|
||||
post_save = _signals.signal("post_save")
|
||||
pre_delete = _signals.signal("pre_delete")
|
||||
post_delete = _signals.signal("post_delete")
|
||||
pre_bulk_insert = _signals.signal("pre_bulk_insert")
|
||||
post_bulk_insert = _signals.signal("post_bulk_insert")
|
||||
|
||||
3
requirements-lint.txt
Normal file
3
requirements-lint.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
black
|
||||
flake8
|
||||
flake8-import-order
|
||||
@@ -1,7 +1,5 @@
|
||||
nose
|
||||
pymongo>=2.7.1
|
||||
pymongo>=3.4
|
||||
six==1.10.0
|
||||
flake8
|
||||
flake8-import-order
|
||||
Sphinx==1.5.5
|
||||
sphinx-rtd-theme==0.2.4
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
[nosetests]
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
tests=tests
|
||||
#tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201
|
||||
ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
||||
71
setup.py
71
setup.py
@@ -8,13 +8,10 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DESCRIPTION = (
|
||||
'MongoEngine is a Python Object-Document '
|
||||
'Mapper for working with MongoDB.'
|
||||
)
|
||||
DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
||||
|
||||
try:
|
||||
with open('README.rst') as fin:
|
||||
with open("README.rst") as fin:
|
||||
LONG_DESCRIPTION = fin.read()
|
||||
except Exception:
|
||||
LONG_DESCRIPTION = None
|
||||
@@ -24,64 +21,64 @@ def get_version(version_tuple):
|
||||
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, version_tuple))
|
||||
return ".".join(map(str, version_tuple))
|
||||
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||
init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
|
||||
version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
||||
|
||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||
|
||||
CLASSIFIERS = [
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
'Topic :: Database',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
"Topic :: Database",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
|
||||
extra_opts = {
|
||||
'packages': find_packages(exclude=['tests', 'tests.*']),
|
||||
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
|
||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||
"tests_require": ["nose", "coverage==4.2", "blinker", "Pillow>=2.0.0"],
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
if 'test' in sys.argv or 'nosetests' in sys.argv:
|
||||
extra_opts['packages'] = find_packages()
|
||||
extra_opts['package_data'] = {
|
||||
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
|
||||
extra_opts["use_2to3"] = True
|
||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||
extra_opts["packages"] = find_packages()
|
||||
extra_opts["package_data"] = {
|
||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||
}
|
||||
else:
|
||||
extra_opts['tests_require'] += ['python-dateutil']
|
||||
extra_opts["tests_require"] += ["python-dateutil"]
|
||||
|
||||
setup(
|
||||
name='mongoengine',
|
||||
name="mongoengine",
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
author="Harry Marr",
|
||||
author_email="harry.marr@gmail.com",
|
||||
maintainer="Stefan Wojcik",
|
||||
maintainer_email="wojcikstefan@gmail.com",
|
||||
url="http://mongoengine.org/",
|
||||
download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
|
||||
license="MIT",
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
platforms=["any"],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7.1', 'six'],
|
||||
test_suite='nose.collector',
|
||||
install_requires=["pymongo>=3.4", "six"],
|
||||
test_suite="nose.collector",
|
||||
**extra_opts
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from all_warnings import AllWarnings
|
||||
from document import *
|
||||
from queryset import *
|
||||
from fields import *
|
||||
from .all_warnings import AllWarnings
|
||||
from .document import *
|
||||
from .queryset import *
|
||||
from .fields import *
|
||||
|
||||
@@ -9,34 +9,32 @@ import warnings
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
__all__ = ('AllWarnings', )
|
||||
__all__ = ("AllWarnings",)
|
||||
|
||||
|
||||
class AllWarnings(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message,
|
||||
"category": category})
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
|
||||
class NonAbstractBase(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {'collection': 'fail'}
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
self.assertEqual(SyntaxWarning, warning["category"])
|
||||
self.assertEqual('non_abstract_base',
|
||||
InheritedDocumentFailTest._get_collection_name())
|
||||
self.assertEqual(
|
||||
"non_abstract_base", InheritedDocumentFailTest._get_collection_name()
|
||||
)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import unittest
|
||||
|
||||
from class_methods import *
|
||||
from delta import *
|
||||
from dynamic import *
|
||||
from indexes import *
|
||||
from inheritance import *
|
||||
from instance import *
|
||||
from json_serialisation import *
|
||||
from validation import *
|
||||
from .class_methods import *
|
||||
from .delta import *
|
||||
from .dynamic import *
|
||||
from .indexes import *
|
||||
from .inheritance import *
|
||||
from .instance import *
|
||||
from .json_serialisation import *
|
||||
from .validation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -2,17 +2,17 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
__all__ = ("ClassMethodsTest",)
|
||||
|
||||
|
||||
class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
@@ -26,19 +26,19 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name"], sorted(self.Person._fields.keys())
|
||||
)
|
||||
self.assertEqual(
|
||||
["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in self.Person._fields.values()]),
|
||||
)
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db.
|
||||
@@ -50,38 +50,40 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
collection_name = "person"
|
||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
collection_name = "person"
|
||||
collection = self.Person._get_collection()
|
||||
self.assertEqual(self.db[collection_name], collection)
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
collection_name = "person"
|
||||
self.Person(name="Test").save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
meta.
|
||||
"""
|
||||
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||
self.assertEqual(self.Person._meta.get("delete_rules"), None)
|
||||
|
||||
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||
self.assertEqual(self.Person._meta['delete_rules'],
|
||||
{(Job, 'employee'): NULLIFY})
|
||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||
self.assertEqual(
|
||||
self.Person._meta["delete_rules"], {(Job, "employee"): NULLIFY}
|
||||
)
|
||||
|
||||
def test_compare_indexes(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@@ -94,23 +96,27 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
description = StringField()
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'title')]
|
||||
}
|
||||
meta = {"indexes": [("author", "title")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
|
||||
BlogPost.ensure_index(['author', 'description'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
||||
BlogPost.ensure_index(["author", "description"])
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [], "extra": [[("author", 1), ("description", 1)]]},
|
||||
)
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [[("author", 1), ("title", 1)]], "extra": []},
|
||||
)
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@@ -123,32 +129,34 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
|
||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [], "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]]},
|
||||
)
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||
self.assertEqual(
|
||||
BlogPost.compare_indexes(),
|
||||
{"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], "extra": []},
|
||||
)
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
@@ -160,32 +168,52 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithCustomField(BlogPost):
|
||||
custom = DictField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'custom')]
|
||||
}
|
||||
meta = {"indexes": [("author", "custom")]}
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []})
|
||||
self.assertEqual(
|
||||
BlogPostWithTags.compare_indexes(), {"missing": [], "extra": []}
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPostWithCustomField.compare_indexes(), {"missing": [], "extra": []}
|
||||
)
|
||||
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {
|
||||
"indexes": [
|
||||
{
|
||||
"fields": ["$a", "$b"],
|
||||
"default_language": "english",
|
||||
"weights": {"a": 10, "b": 2},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {"missing": [], "extra": []}
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
@@ -197,23 +225,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||
extra_text = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags', 'extra_text')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags", "extra_text")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -221,17 +243,21 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTagsAndExtraText.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||
[(u'_id', 1)], [('_cls', 1)]])
|
||||
self.assertEqual(BlogPost.list_indexes(), BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(
|
||||
BlogPost.list_indexes(), BlogPostWithTagsAndExtraText.list_indexes()
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPost.list_indexes(),
|
||||
[
|
||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||
[(u"_id", 1)],
|
||||
[("_cls", 1)],
|
||||
],
|
||||
)
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
@@ -239,15 +265,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||
vaccine_made = ListField(
|
||||
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
||||
)
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta["delete_rules"][(Animal, "vaccine_made")], PULL)
|
||||
self.assertEqual(Vaccine._meta["delete_rules"][(Cat, "vaccine_made")], PULL)
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@@ -255,80 +283,79 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
self.assertEqual('default_naming_test',
|
||||
DefaultNamingTest._get_collection_name())
|
||||
|
||||
self.assertEqual(
|
||||
"default_naming_test", DefaultNamingTest._get_collection_name()
|
||||
)
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {'collection': 'pimp_my_collection'}
|
||||
meta = {"collection": "pimp_my_collection"}
|
||||
|
||||
self.assertEqual('pimp_my_collection',
|
||||
CustomNamingTest._get_collection_name())
|
||||
self.assertEqual("pimp_my_collection", CustomNamingTest._get_collection_name())
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {'collection': lambda c: "DYNAMO"}
|
||||
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||
meta = {"collection": lambda c: "DYNAMO"}
|
||||
|
||||
self.assertEqual("DYNAMO", DynamicNamingTest._get_collection_name())
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
meta = {
|
||||
'abstract': True,
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
self.assertEqual('oldnamingconvention',
|
||||
OldNamingConvention._get_collection_name())
|
||||
|
||||
self.assertEqual(
|
||||
"oldnamingconvention", OldNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {'collection': 'wibble'}
|
||||
self.assertEqual('wibble',
|
||||
InheritedAbstractNamingTest._get_collection_name())
|
||||
meta = {"collection": "wibble"}
|
||||
|
||||
self.assertEqual("wibble", InheritedAbstractNamingTest._get_collection_name())
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
self.assertEqual('oldmixinnamingconvention',
|
||||
OldMixinNamingConvention._get_collection_name())
|
||||
|
||||
self.assertEqual(
|
||||
"oldmixinnamingconvention", OldMixinNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||
self.assertEqual("basedocument", MyDocument._get_collection_name())
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected.
|
||||
"""
|
||||
collection_name = 'personCollTest'
|
||||
collection_name = "personCollTest"
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
meta = {'collection': collection_name}
|
||||
meta = {"collection": collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
self.assertEqual(user_obj["name"], "Test User")
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@@ -336,7 +363,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
meta = {'collection': 'app'}
|
||||
meta = {"collection": "app"}
|
||||
|
||||
Person(name="Test User").save()
|
||||
|
||||
@@ -346,5 +373,5 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
Person.drop_collection()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,20 +1,18 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
__all__ = ("DynamicTest", )
|
||||
__all__ = ("TestDynamicDocument",)
|
||||
|
||||
|
||||
class DynamicTest(unittest.TestCase):
|
||||
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
super(TestDynamicDocument, self).setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@@ -27,8 +25,7 @@ class DynamicTest(unittest.TestCase):
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||
"age": 34})
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", "age": 34})
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
@@ -36,7 +33,7 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
self.assertFalse(hasattr(self.Person, "age"))
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
@@ -46,11 +43,11 @@ class DynamicTest(unittest.TestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
self.assertEqual(p.misc, {"hello": "world"})
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
@@ -61,23 +58,23 @@ class DynamicTest(unittest.TestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
self.assertEqual(p.misc, {"hello": "world"})
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "misc", "name"])
|
||||
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
self.assertFalse(hasattr(p, "misc"))
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||
self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "name"])
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
@@ -92,11 +89,52 @@ class DynamicTest(unittest.TestCase):
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||
self.assertEqual(sorted(p._data.keys()), ["_cls", "id", "name"])
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
self.assertEqual(sorted(p._data.keys()), ["_cls", "age", "id", "name"])
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
Person = self.Person
|
||||
|
||||
p = self.Person(name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_cls": u"Person", "_id": p.id, "name": u"Dean"})
|
||||
|
||||
p.name = "OldDean"
|
||||
p.newattr = "garbage"
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{"_cls": u"Person", "_id": p.id, "name": "OldDean", "newattr": u"garbage"},
|
||||
)
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
|
||||
class WeirdPerson(DynamicDocument):
|
||||
name = StringField()
|
||||
_name = StringField()
|
||||
|
||||
WeirdPerson.drop_collection()
|
||||
|
||||
p = WeirdPerson(name="Dean", _name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_id": p.id, "_name": u"Dean", "name": u"Dean"})
|
||||
|
||||
p.name = "OldDean"
|
||||
p._name = "NewDean"
|
||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(raw_p, {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"})
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@@ -128,26 +166,25 @@ class DynamicTest(unittest.TestCase):
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEqual(Person.objects(age__icontains="ten").count(), 2)
|
||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
self.assertEqual(1, self.Person.objects(misc__hello="world").count())
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(
|
||||
misc={'hello': {'hello2': 'world'}}
|
||||
)
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
p = self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2="world").count())
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
content = URLField()
|
||||
|
||||
@@ -157,10 +194,10 @@ class DynamicTest(unittest.TestCase):
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
||||
embedded_doc_1.validate()
|
||||
|
||||
embedded_doc_2 = Embedded(content='this is not a url')
|
||||
embedded_doc_2 = Embedded(content="this is not a url")
|
||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||
|
||||
doc.embedded_field_1 = embedded_doc_1
|
||||
@@ -169,15 +206,17 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
self.assertIn("name", Employee._fields)
|
||||
self.assertIn("salary", Employee._fields)
|
||||
self.assertEqual(
|
||||
Employee._get_collection_name(), self.Person._get_collection_name()
|
||||
)
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
@@ -189,10 +228,11 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -203,78 +243,88 @@ class DynamicTest(unittest.TestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field,
|
||||
['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_cls": "Embedded",
|
||||
self.assertEqual(
|
||||
doc.to_mongo(),
|
||||
{
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
}
|
||||
})
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
},
|
||||
)
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(doc.embedded_field.list_field, ["1", 2, {"hello": "world"}])
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
embedded_1.list_field = ["1", 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(
|
||||
doc.to_mongo(),
|
||||
{
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
)
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], "1")
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
@@ -282,9 +332,8 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(embedded_field.__class__, Embedded)
|
||||
self.assertEqual(embedded_field.string_field, "hello")
|
||||
self.assertEqual(embedded_field.int_field, 1)
|
||||
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||
{'hello': 'world'}])
|
||||
self.assertEqual(embedded_field.dict_field, {"hello": "world"})
|
||||
self.assertEqual(embedded_field.list_field, ["1", 2, {"hello": "world"}])
|
||||
|
||||
def test_dynamic_and_embedded(self):
|
||||
"""Ensure embedded documents play nicely"""
|
||||
@@ -327,10 +376,15 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||
Person(
|
||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||
).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.first().address.street_number, "1337")
|
||||
self.assertEqual(
|
||||
Person.objects.only("address__street_number").first().address.street_number,
|
||||
"1337",
|
||||
)
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||
@@ -369,5 +423,6 @@ class DynamicTest(unittest.TestCase):
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,190 +2,282 @@
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from datetime import datetime
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericReferenceField,
|
||||
IntField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.fixtures import Base
|
||||
|
||||
from mongoengine import Document, EmbeddedDocument, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||
IntField, StringField)
|
||||
|
||||
__all__ = ('InheritanceTest', )
|
||||
__all__ = ("InheritanceTest",)
|
||||
|
||||
|
||||
class InheritanceTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class InheritanceTest(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||
self.assertEqual(test_doc._cls, "DataDoc")
|
||||
self.assertEqual(test_doc.embed._cls, "EmbedData")
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
self.assertEqual(test_doc._cls, saved_doc._cls)
|
||||
self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls)
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Guppy._superclasses, ("Animal", "Animal.Fish"))
|
||||
self.assertEqual(Mammal._superclasses, ("Animal",))
|
||||
self.assertEqual(Dog._superclasses, ("Animal", "Animal.Mammal"))
|
||||
self.assertEqual(Human._superclasses, ("Animal", "Animal.Mammal"))
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ("Base",))
|
||||
self.assertEqual(Fish._superclasses, ("Base", "Base.Animal"))
|
||||
self.assertEqual(
|
||||
Guppy._superclasses, ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
)
|
||||
self.assertEqual(Mammal._superclasses, ("Base", "Base.Animal"))
|
||||
self.assertEqual(
|
||||
Dog._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
)
|
||||
self.assertEqual(
|
||||
Human._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
)
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Animal',
|
||||
'Animal.Fish',
|
||||
'Animal.Fish.Guppy',
|
||||
'Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||
'Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
Animal._subclasses,
|
||||
(
|
||||
"Animal",
|
||||
"Animal.Fish",
|
||||
"Animal.Fish.Guppy",
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Guppy"))
|
||||
self.assertEqual(Guppy._subclasses, ("Animal.Fish.Guppy",))
|
||||
self.assertEqual(
|
||||
Mammal._subclasses,
|
||||
("Animal.Mammal", "Animal.Mammal.Dog", "Animal.Mammal.Human"),
|
||||
)
|
||||
self.assertEqual(Human._subclasses, ("Animal.Mammal.Human",))
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||
'Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',
|
||||
'Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
self.assertEqual(
|
||||
Animal._subclasses,
|
||||
(
|
||||
"Base.Animal",
|
||||
"Base.Animal.Fish",
|
||||
"Base.Animal.Fish.Guppy",
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
Fish._subclasses, ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
)
|
||||
self.assertEqual(Guppy._subclasses, ("Base.Animal.Fish.Guppy",))
|
||||
self.assertEqual(
|
||||
Mammal._subclasses,
|
||||
(
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
),
|
||||
)
|
||||
self.assertEqual(Human._subclasses, ("Base.Animal.Mammal.Human",))
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||
self.assertEqual(Animal._subclasses, ("Animal",))
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Animal._subclasses, ("Animal", "Animal.Fish"))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish",))
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||
'Animal.Fish.Pike'))
|
||||
self.assertEqual(
|
||||
Animal._subclasses, ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
)
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||
self.assertEqual(Fish._superclasses, ("Animal",))
|
||||
self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Pike"))
|
||||
|
||||
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||
self.assertEqual(Pike._superclasses, ("Animal", "Animal.Fish"))
|
||||
self.assertEqual(Pike._subclasses, ("Animal.Fish.Pike",))
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys())
|
||||
)
|
||||
self.assertEqual(Employee._get_collection_name(), Person._get_collection_name())
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
self.assertEqual(
|
||||
["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys())
|
||||
)
|
||||
self.assertEqual(
|
||||
Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"]
|
||||
)
|
||||
self.assertEqual(
|
||||
Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
["_cls", "name", "age", "salary"],
|
||||
)
|
||||
self.assertEqual(Employee._get_collection_name(), Person._get_collection_name())
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
@@ -195,18 +287,12 @@ class InheritanceTest(unittest.TestCase):
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['a']
|
||||
}
|
||||
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['b']
|
||||
}
|
||||
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
@@ -218,8 +304,12 @@ class InheritanceTest(unittest.TestCase):
|
||||
C.ensure_indexes()
|
||||
|
||||
self.assertEqual(
|
||||
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||
sorted(
|
||||
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||
),
|
||||
sorted(
|
||||
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||
),
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
@@ -227,11 +317,19 @@ class InheritanceTest(unittest.TestCase):
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
@@ -254,153 +352,192 @@ class InheritanceTest(unittest.TestCase):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
self.assertIn("Document Animal may not be subclassed", str(cm.exception))
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
dog = Animal(name="dog").save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ["_id", "name"])
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertFalse('_cls' in obj)
|
||||
self.assertNotIn("_cls", obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
self.assertEqual(
|
||||
str(cm.exception),
|
||||
'Only direct subclasses of Document may set "allow_inheritance" to False',
|
||||
)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
|
||||
class FinalDocument(Document):
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
doc = Animal(name="dog")
|
||||
self.assertNotIn("_cls", doc.to_mongo())
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name="5").save()
|
||||
Home(dad=dad, address="street").save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = "garbage"
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {"abstract": True}
|
||||
creator = ReferenceField("self", dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name="John").save()
|
||||
user2 = User(name="Foo", creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = "Bar"
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
self.assertEqual(berlin._fields_ordered[0], "id")
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
self.assertEqual(berlin._fields_ordered[0], "city_id")
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
self.assertEqual(berlin._fields_ordered[0], "auto_id_0")
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
bkk = City(continent='asia')
|
||||
self.assertEqual(None, bkk.pk)
|
||||
city = City(continent="asia")
|
||||
self.assertEqual(None, city.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(bkk, 'pk', 1)
|
||||
setattr(city, "pk", 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
doc = Comment(content="test")
|
||||
self.assertNotIn("_cls", doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertTrue('_cls' in doc.to_mongo())
|
||||
doc = Comment(content="test")
|
||||
self.assertIn("_cls", doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
try:
|
||||
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
@@ -408,47 +545,55 @@ class InheritanceTest(unittest.TestCase):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {'index_background': True,
|
||||
'index_drop_dups': True,
|
||||
'index_opts': {'hello': 'world'},
|
||||
'allow_inheritance': True,
|
||||
'queryset_class': 'QuerySet',
|
||||
'db_alias': 'myDB',
|
||||
'shard_key': ('hello', 'world')}
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_drop_dups": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
"db_alias": "myDB",
|
||||
"shard_key": ("hello", "world"),
|
||||
}
|
||||
|
||||
meta_settings = {'abstract': True}
|
||||
meta_settings = {"abstract": True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {'abstract': True}
|
||||
class Human(Mammal): pass
|
||||
meta = {"abstract": True}
|
||||
|
||||
for k, v in defaults.iteritems():
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in iteritems(defaults):
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertFalse('collection' in Animal._meta)
|
||||
self.assertFalse('collection' in Mammal._meta)
|
||||
self.assertNotIn("collection", Animal._meta)
|
||||
self.assertNotIn("collection", Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
||||
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||
self.assertEqual(Human._get_collection_name(), 'human')
|
||||
self.assertEqual(Fish._get_collection_name(), "fish")
|
||||
self.assertEqual(Guppy._get_collection_name(), "fish")
|
||||
self.assertEqual(Human._get_collection_name(), "human")
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
@@ -467,7 +612,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
@@ -476,13 +621,13 @@ class InheritanceTest(unittest.TestCase):
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
@@ -493,13 +638,13 @@ class InheritanceTest(unittest.TestCase):
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name='Red Bull')
|
||||
red_bull = Drink(name="Red Bull")
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name='Beer')
|
||||
beer = AlcoholicDrink(name="Beer")
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
@@ -508,5 +653,5 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,9 +13,8 @@ __all__ = ("TestJson",)
|
||||
|
||||
|
||||
class TestJson(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
def test_json_names(self):
|
||||
"""
|
||||
@@ -25,22 +24,24 @@ class TestJson(unittest.TestCase):
|
||||
a to_json with the original class names and not the abreviated
|
||||
mongodb document keys
|
||||
"""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
string = StringField(db_field="s")
|
||||
|
||||
class Doc(Document):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
string = StringField(db_field="s")
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
||||
|
||||
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
|
||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(
|
||||
sort_keys=True, use_db_field=False, separators=(",", ":")
|
||||
)
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual( doc_json, expected_json)
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField()
|
||||
|
||||
@@ -49,22 +50,20 @@ class TestJson(unittest.TestCase):
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.string == other.string and
|
||||
self.embedded_field == other.embedded_field)
|
||||
return (
|
||||
self.string == other.string
|
||||
and self.embedded_field == other.embedded_field
|
||||
)
|
||||
|
||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
|
||||
def test_json_complex(self):
|
||||
|
||||
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
||||
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
||||
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -72,41 +71,43 @@ class TestJson(unittest.TestCase):
|
||||
pass
|
||||
|
||||
class Doc(Document):
|
||||
string_field = StringField(default='1')
|
||||
string_field = StringField(default="1")
|
||||
int_field = IntField(default=1)
|
||||
float_field = FloatField(default=1.1)
|
||||
boolean_field = BooleanField(default=True)
|
||||
datetime_field = DateTimeField(default=datetime.now)
|
||||
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||
default=lambda: EmbeddedDoc())
|
||||
embedded_document_field = EmbeddedDocumentField(
|
||||
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
||||
)
|
||||
list_field = ListField(default=lambda: [1, 2, 3])
|
||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||
objectid_field = ObjectIdField(default=ObjectId)
|
||||
reference_field = ReferenceField(Simple, default=lambda:
|
||||
Simple().save())
|
||||
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||
decimal_field = DecimalField(default=1.0)
|
||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||
url_field = URLField(default="http://mongoengine.org")
|
||||
dynamic_field = DynamicField(default=1)
|
||||
generic_reference_field = GenericReferenceField(
|
||||
default=lambda: Simple().save())
|
||||
sorted_list_field = SortedListField(IntField(),
|
||||
default=lambda: [1, 2, 3])
|
||||
default=lambda: Simple().save()
|
||||
)
|
||||
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
||||
email_field = EmailField(default="ross@example.com")
|
||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||
sequence_field = SequenceField()
|
||||
uuid_field = UUIDField(default=uuid.uuid4)
|
||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||
default=lambda: EmbeddedDoc())
|
||||
default=lambda: EmbeddedDoc()
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
import json
|
||||
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -8,49 +8,56 @@ __all__ = ("ValidatorErrorTest",)
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
error = ValidationError("root")
|
||||
self.assertEqual(error.to_dict(), {})
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
error.errors = {"1st": ValidationError("bad 1st")}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertEqual(error.to_dict()["1st"], "bad 1st")
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||
)
|
||||
}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()["1st"], dict)
|
||||
self.assertIn("2nd", error.to_dict()["1st"])
|
||||
self.assertEqual(error.to_dict()["1st"]["2nd"], "bad 2nd")
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st",
|
||||
errors={
|
||||
"2nd": ValidationError(
|
||||
"bad 2nd",
|
||||
errors={
|
||||
"3rd": ValidationError(
|
||||
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
self.assertIn("1st", error.to_dict())
|
||||
self.assertIn("2nd", error.to_dict()["1st"])
|
||||
self.assertIn("3rd", error.to_dict()["1st"]["2nd"])
|
||||
self.assertIn("4th", error.to_dict()["1st"]["2nd"]["3rd"])
|
||||
self.assertEqual(error.to_dict()["1st"]["2nd"]["3rd"]["4th"], "Inception")
|
||||
|
||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
@@ -58,25 +65,25 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(),
|
||||
{"username": "Field is required", "name": "Field is required"},
|
||||
)
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:RossC0" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {"name": "Field is required"})
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
@@ -87,6 +94,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
@@ -94,7 +102,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
|
||||
comment.content = 'test'
|
||||
comment.content = "test"
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
@@ -105,20 +113,20 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.assertEqual(comment._instance, None)
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
id = StringField(primary_key=True)
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("SubDoc:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(), {"e": {"val": "OK could not be converted to int"}}
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@@ -127,24 +135,24 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertTrue('e' in keys)
|
||||
self.assertTrue('id' in keys)
|
||||
self.assertIn("e", keys)
|
||||
self.assertIn("id", keys)
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("Doc:test" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(
|
||||
e.to_dict(), {"e": {"val": "OK could not be converted to int"}}
|
||||
)
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@@ -167,9 +175,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@@ -190,9 +199,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@@ -210,5 +220,5 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
from fields import *
|
||||
from file_tests import *
|
||||
from geo import *
|
||||
from .fields import *
|
||||
from .file_tests import *
|
||||
from .geo import *
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,26 +14,37 @@ from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
HAS_PIL = True
|
||||
except ImportError:
|
||||
HAS_PIL = False
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png")
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
|
||||
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
with open(path, "rb") as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
return bytes_io
|
||||
|
||||
|
||||
class FileTest(MongoDBTestCase):
|
||||
|
||||
def tearDown(self):
|
||||
self.db.drop_collection('fs.files')
|
||||
self.db.drop_collection('fs.chunks')
|
||||
self.db.drop_collection("fs.files")
|
||||
self.db.drop_collection("fs.chunks")
|
||||
|
||||
def test_file_field_optional(self):
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
DemoFile.objects.create()
|
||||
|
||||
def test_file_fields(self):
|
||||
@@ -45,16 +56,19 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = six.b("Hello, World!")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>")
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(
|
||||
"%s" % result.the_file,
|
||||
"<GridFSProxy: hello (%s)>" % result.the_file.grid_id,
|
||||
)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
@@ -71,7 +85,7 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete()
|
||||
@@ -79,14 +93,15 @@ class FileTest(MongoDBTestCase):
|
||||
def test_file_fields_stream(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.the_file.new_file(content_type=content_type)
|
||||
@@ -96,7 +111,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@@ -114,14 +129,15 @@ class FileTest(MongoDBTestCase):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
None
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
@@ -132,7 +148,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@@ -147,12 +163,11 @@ class FileTest(MongoDBTestCase):
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
|
||||
def test_file_fields_set(self):
|
||||
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@@ -161,7 +176,7 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
|
||||
# Try replacing file with new one
|
||||
@@ -169,12 +184,11 @@ class FileTest(MongoDBTestCase):
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_field_no_default(self):
|
||||
|
||||
class GridDocument(Document):
|
||||
the_file = FileField()
|
||||
|
||||
@@ -189,7 +203,7 @@ class FileTest(MongoDBTestCase):
|
||||
doc_a.save()
|
||||
|
||||
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||
doc_b.the_file.replace(f, filename='doc_b')
|
||||
doc_b.the_file.replace(f, filename="doc_b")
|
||||
doc_b.save()
|
||||
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||
|
||||
@@ -198,13 +212,13 @@ class FileTest(MongoDBTestCase):
|
||||
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=six.b(''))
|
||||
doc_d = GridDocument(the_file=six.b(""))
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||
|
||||
doc_e.the_file.replace(f, filename='doc_e')
|
||||
doc_e.the_file.replace(f, filename="doc_e")
|
||||
doc_e.save()
|
||||
|
||||
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||
@@ -212,11 +226,12 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
db = GridDocument._get_db()
|
||||
grid_fs = gridfs.GridFS(db)
|
||||
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
|
||||
self.assertEqual(["doc_b", "doc_e"], grid_fs.list())
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField()
|
||||
@@ -224,15 +239,15 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'))
|
||||
test_file.the_file.put(six.b("Hello, World!"))
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||
self.assertTrue(test_file.the_file.read() != data)
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -245,40 +260,43 @@ class FileTest(MongoDBTestCase):
|
||||
photo = FileField()
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar")
|
||||
marmot.photo.close()
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photo.foo, 'bar')
|
||||
self.assertEqual(marmot.photo.content_type, "image/jpeg")
|
||||
self.assertEqual(marmot.photo.foo, "bar")
|
||||
|
||||
def test_file_reassigning(self):
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||
|
||||
def test_file_boolean(self):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain')
|
||||
test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain")
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
|
||||
@@ -287,19 +305,21 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = six.b("Hello, World!")
|
||||
content_type = "text/plain"
|
||||
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
@@ -310,16 +330,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
@@ -327,40 +347,40 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b('Bonjour, World!')
|
||||
text = six.b("Bonjour, World!")
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
@@ -376,15 +396,17 @@ class FileTest(MongoDBTestCase):
|
||||
t.image.put(f)
|
||||
self.fail("Should have raised an invalidation error")
|
||||
except ValidationError as e:
|
||||
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
|
||||
self.assertEqual(
|
||||
"%s" % e, "Invalid image: cannot identify image file %s" % f
|
||||
)
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEqual(w, 371)
|
||||
@@ -394,23 +416,24 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
def test_image_field_reassigning(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = ImageField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||
|
||||
def test_image_field_resize(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
@@ -418,12 +441,12 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
@@ -433,7 +456,7 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
def test_image_field_resize_force(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37, True))
|
||||
@@ -441,12 +464,12 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
self.assertEqual(t.image.format, "PNG")
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
@@ -456,7 +479,7 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
def test_image_field_thumbnail(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
@@ -464,24 +487,23 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.thumbnail.format, 'PNG')
|
||||
self.assertEqual(t.image.thumbnail.format, "PNG")
|
||||
self.assertEqual(t.image.thumbnail.width, 92)
|
||||
self.assertEqual(t.image.thumbnail.height, 18)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('test_files', 'test_files')
|
||||
register_connection("test_files", "test_files")
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField(db_alias="test_files",
|
||||
collection_name="macumba")
|
||||
the_file = FileField(db_alias="test_files", collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -492,23 +514,21 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'),
|
||||
name="hello.txt")
|
||||
test_file.the_file.put(six.b("Hello, World!"), name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEqual(data.get('name'), 'hello.txt')
|
||||
self.assertEqual(data.get("name"), "hello.txt")
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(), six.b('Hello, World!'))
|
||||
self.assertEqual(test_file.the_file.read(), six.b("Hello, World!"))
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = six.b('HELLO, WORLD!')
|
||||
test_file.the_file = six.b("HELLO, WORLD!")
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(),
|
||||
six.b('HELLO, WORLD!'))
|
||||
self.assertEqual(test_file.the_file.read(), six.b("HELLO, WORLD!"))
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@@ -516,8 +536,8 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = six.b("Hello, World!")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type)
|
||||
@@ -532,7 +552,7 @@ class FileTest(MongoDBTestCase):
|
||||
def test_get_image_by_grid_id(self):
|
||||
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
raise SkipTest("PIL not installed")
|
||||
|
||||
class TestImage(Document):
|
||||
|
||||
@@ -542,15 +562,16 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image1.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image2.put(open(TEST_IMAGE2_PATH, 'rb'))
|
||||
t.image1.put(get_file(TEST_IMAGE_PATH))
|
||||
t.image2.put(get_file(TEST_IMAGE2_PATH))
|
||||
t.save()
|
||||
|
||||
test = TestImage.objects.first()
|
||||
grid_id = test.image1.grid_id
|
||||
|
||||
self.assertEqual(1, TestImage.objects(Q(image1=grid_id)
|
||||
or Q(image2=grid_id)).count())
|
||||
self.assertEqual(
|
||||
1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()
|
||||
)
|
||||
|
||||
def test_complex_field_filefield(self):
|
||||
"""Ensure you can add meta data to file"""
|
||||
@@ -561,22 +582,21 @@ class FileTest(MongoDBTestCase):
|
||||
photos = ListField(FileField())
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||
|
||||
photos_field = marmot._fields['photos'].field
|
||||
new_proxy = photos_field.get_proxy_obj('photos', marmot)
|
||||
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
marmot_photo.close()
|
||||
with open(TEST_IMAGE_PATH, "rb") as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields["photos"].field
|
||||
new_proxy = photos_field.get_proxy_obj("photos", marmot)
|
||||
new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar")
|
||||
|
||||
marmot.photos.append(new_proxy)
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photos[0].content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photos[0].foo, 'bar')
|
||||
self.assertEqual(marmot.photos[0].content_type, "image/jpeg")
|
||||
self.assertEqual(marmot.photos[0].foo, "bar")
|
||||
self.assertEqual(marmot.photos[0].get().length, 8313)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -4,28 +4,27 @@ import unittest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("GeoFieldTest", )
|
||||
__all__ = ("GeoFieldTest",)
|
||||
|
||||
|
||||
class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
self.fail("Should not validate the location {0}".format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
self.assertEqual(expected, e.to_dict()["loc"])
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = GeoPointField()
|
||||
|
||||
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
|
||||
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
|
||||
for coord in invalid_coords:
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
@@ -40,12 +39,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [21, 4, "a"]
|
||||
for coord in invalid_coords:
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
def test_point_validation(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": []}
|
||||
@@ -72,19 +78,16 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[1, 2]).validate()
|
||||
Location(loc={
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
81.4471435546875,
|
||||
23.61432859499169
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]}
|
||||
).validate()
|
||||
|
||||
def test_linestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = LineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -92,7 +95,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
@@ -100,16 +105,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1, 2, 3]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||
@@ -119,7 +133,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = PolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -131,7 +147,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[5, "a"]]]
|
||||
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
expected = (
|
||||
"Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[]]]
|
||||
@@ -157,7 +175,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -183,19 +201,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]],
|
||||
}
|
||||
).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -211,16 +229,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
@@ -230,7 +257,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -238,7 +265,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
@@ -250,7 +279,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
@@ -258,7 +289,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
@@ -266,17 +299,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
|
||||
self.assertEqual(geo_indicies, [{"fields": [("location", "2d")]}])
|
||||
|
||||
def test_geopoint_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
embedded documents.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
@@ -286,11 +321,12 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
|
||||
self.assertEqual(geo_indicies, [{"fields": [("venue.location", "2d")]}])
|
||||
|
||||
def test_indexes_2dsphere(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
point = PointField()
|
||||
@@ -298,13 +334,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({"fields": [("line", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("polygon", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("point", "2dsphere")]}, geo_indicies)
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
name = StringField()
|
||||
point = PointField()
|
||||
@@ -316,12 +353,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({"fields": [("venue.line", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("venue.polygon", "2dsphere")]}, geo_indicies)
|
||||
self.assertIn({"fields": [("venue.point", "2dsphere")]}, geo_indicies)
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
@@ -333,11 +369,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
Parent(name="Berlin").save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertFalse('location_2d' in info)
|
||||
self.assertNotIn("location_2d", info)
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertTrue('location_2d' in info)
|
||||
self.assertIn("location_2d", info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
@@ -349,9 +385,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||
}
|
||||
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
|
||||
@@ -359,8 +393,10 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
self.assertEqual(
|
||||
info["location_2dsphere_datetime_1"]["key"],
|
||||
[("location", "2dsphere"), ("datetime", 1)],
|
||||
)
|
||||
|
||||
# Test listing explicitly
|
||||
class Log(Document):
|
||||
@@ -368,9 +404,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
|
||||
]
|
||||
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
@@ -379,9 +413,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
self.assertEqual(
|
||||
info["location_2dsphere_datetime_1"]["key"],
|
||||
[("location", "2dsphere"), ("datetime", 1)],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
150
tests/fields/test_binary_field.py
Normal file
150
tests/fields/test_binary_field.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import six
|
||||
|
||||
from bson import Binary
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b(
|
||||
"\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5"
|
||||
)
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b("\xe6\x00\xc4\xff\x07")
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, six.binary_type(attachment_1.blob))
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
"""
|
||||
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
|
||||
class AttachmentSizeLimit(Document):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07"))
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b("\xe6\x00\xc4\xff\x07")
|
||||
_4_BYTES = six.b("\xe6\x00\xc4\xff")
|
||||
self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate)
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
"""Ensure that invalid values cannot be assigned to binary fields."""
|
||||
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, u"Im_a_unicode", ["some_str"]):
|
||||
self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate)
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.count())
|
||||
self.assertEqual(1, Attachment.objects.filter(id=att.id).count())
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
raise SkipTest("Querying by id as string is not currently supported")
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.filter(id=binary_id).count())
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
some_field = StringField()
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument.objects(some_field="test").modify(
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(doc.some_field, "test")
|
||||
if six.PY3:
|
||||
self.assertEqual(doc.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(doc.bin_field, Binary(BIN_VALUE))
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b("\xe6\x00\xc4\xff\x07")
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(n_updated, 1)
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
self.assertEqual(fetched.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(fetched.bin_field, Binary(BIN_VALUE))
|
||||
48
tests/fields/test_boolean_field.py
Normal file
48
tests/fields/test_boolean_field.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestBooleanField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
self.assertEqual(get_as_pymongo(person), {"_id": person.id, "admin": True})
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person()
|
||||
person.admin = True
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = "Yes"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = "False"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
||||
but its a known issue
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin="False")
|
||||
self.assertTrue(new_person.admin)
|
||||
|
||||
new_person = Person(admin="0")
|
||||
self.assertTrue(new_person.admin)
|
||||
394
tests/fields/test_cached_reference_field.py
Normal file
394
tests/fields/test_cached_reference_field.py
Normal file
@@ -0,0 +1,394 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
but can't save them on to_mongo.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
Ocorrence(
|
||||
person="testte", animal=Animal(name="Leopard", tag="heavy").save()
|
||||
).save()
|
||||
p = Ocorrence.objects.get()
|
||||
p.person = "new_testte"
|
||||
p.save()
|
||||
|
||||
def test_general_things(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
|
||||
self.assertEqual(a.to_mongo(fields=["tag"]), {"tag": "heavy", "_id": a.pk})
|
||||
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
name = StringField()
|
||||
salary = DecimalField()
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(PersonAuto, fields=("salary",))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal("7000.00"))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({"person.salary": 7000.00}),
|
||||
{"_id": s.pk, "group": s.group, "person": {"_id": p.pk, "salary": 7000.00}},
|
||||
)
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
group = ReferenceField(Group)
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(StringField())
|
||||
person = CachedReferenceField(Person, fields=("group",))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name="dev")
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
g2.save()
|
||||
|
||||
p1 = Person(name="Alberto", group=g1)
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Andre", group=g1)
|
||||
p2.save()
|
||||
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialData.objects._collection.find_one({"tags": "tag2"}),
|
||||
{
|
||||
"_id": s1.pk,
|
||||
"obs": "testing 123",
|
||||
"tags": ["tag1", "tag2"],
|
||||
"person": {"_id": p1.pk, "group": g1.pk},
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
|
||||
class Basket(Document):
|
||||
products = ListField(CachedReferenceField(Product, fields=["name"]))
|
||||
|
||||
Basket.drop_collection()
|
||||
product1 = Product(name="abc").save()
|
||||
product2 = Product(name="def").save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
"_id": basket.pk,
|
||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
||||
},
|
||||
)
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
"_id": basket.pk,
|
||||
"products": [
|
||||
{"_id": product1.pk, "name": product1.name},
|
||||
{"_id": product2.pk, "name": product2.name},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a2 = Person.objects.with_id(a2.id)
|
||||
self.assertEqual(a2.father.tp, a1.tp)
|
||||
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {"_id": a1.pk, "tp": u"pj"},
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {"father._id": a1.pk})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {"_id": a1.pk, "tp": u"pf"},
|
||||
},
|
||||
)
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type(
|
||||
"WrongEmbeddedDocument",
|
||||
(EmbeddedDocument,),
|
||||
{"test": CachedReferenceField(Test)},
|
||||
)
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(
|
||||
dict(a2.to_mongo()),
|
||||
{
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
},
|
||||
)
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField("self", fields=("tp",), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(
|
||||
Persone.objects._collection.find_one({"_id": a2.pk}),
|
||||
{
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
},
|
||||
)
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (("n", "Normal"), ("u", "Urgent"))
|
||||
name = StringField()
|
||||
tp = StringField(verbose_name="Type", db_field="t", choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(
|
||||
name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(
|
||||
dict(a.to_mongo(fields=["tag", "owner.tp"])),
|
||||
{"_id": a.pk, "tag": "heavy", "owner": {"t": "u"}},
|
||||
)
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
self.assertEqual(o.to_mongo()["animal"]["owner"]["t"], "u")
|
||||
|
||||
# Check to_mongo with fields
|
||||
self.assertNotIn("animal", o.to_mongo(fields=["person"]))
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tp="u"
|
||||
).first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
name = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(
|
||||
name="Leopard",
|
||||
tag="heavy",
|
||||
owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"),
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(
|
||||
dict(a.to_mongo(fields=["tag", "owner.tags"])),
|
||||
{"_id": a.pk, "tag": "heavy", "owner": {"tags": ["cool", "funny"]}},
|
||||
)
|
||||
|
||||
self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy")
|
||||
self.assertEqual(o.to_mongo()["animal"]["owner"]["tags"], ["cool", "funny"])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
)._query
|
||||
self.assertEqual(query, {"animal.owner.tags": "cool", "animal.tag": "heavy"})
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
).first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
193
tests/fields/test_complex_datetime_field.py
Normal file
193
tests/fields/test_complex_datetime_field.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import math
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
def test_complexdatetime_storage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
date_with_dots = ComplexDateTimeField(separator=".")
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped - with default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
# a date to compare.
|
||||
for i in range(1001, 3113, 33):
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
self.assertTrue(
|
||||
re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
# Test separator
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
||||
"date_with_dots"
|
||||
]
|
||||
self.assertTrue(
|
||||
re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 60)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test microsecond-level ordering/filtering
|
||||
for microsecond in (99, 999, 9999, 10000):
|
||||
LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save()
|
||||
|
||||
logs = list(LogEntry.objects.order_by("date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date < next_log.date)
|
||||
|
||||
logs = list(LogEntry.objects.order_by("-date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date > next_log.date)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
||||
)
|
||||
self.assertEqual(logs.count(), 4)
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertIsNone(log.timestamp)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertIsNone(fetched_log.timestamp)
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=NOW)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertEqual(log.timestamp, NOW)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertEqual(fetched_log.timestamp, NOW)
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertGreaterEqual(log.timestamp, NOW)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertGreaterEqual(fetched_log.timestamp, NOW)
|
||||
171
tests/fields/test_date_field.py
Normal file
171
tests/fields/test_date_field.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateField(MongoDBTestCase):
|
||||
def test_date_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
day = DateField(default=datetime.date.today)
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
self.assertEqual(person.day, person.day)
|
||||
self.assertEqual(person.day, datetime.date.today())
|
||||
self.assertEqual(person._data["day"], person.day)
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, datetime.date.today())
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = datetime.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = "ABC"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
238
tests/fields/test_datetime_field.py
Normal file
238
tests/fields/test_datetime_field.py
Normal file
@@ -0,0 +1,238 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime as dt
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import connection
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateTimeField(MongoDBTestCase):
|
||||
def test_datetime_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
created = DateTimeField(default=dt.datetime.utcnow)
|
||||
|
||||
utcnow = dt.datetime.utcnow()
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
self.assertLess(person.created - utcnow, dt.timedelta(seconds=1))
|
||||
self.assertEqual(
|
||||
person_created_t0, person.created
|
||||
) # make sure it does not change
|
||||
self.assertEqual(person._data["created"], person.created)
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
Microseconds are rounded to the nearest millisecond and pre UTC
|
||||
handling is wonky.
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = dt.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date.date(), dt.date.today())
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
||||
)
|
||||
self.assertEqual(logs.count(), 5)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = dt.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
log.time = "2019-05-16 21:42:57.897847"
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = dt.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = "ABC"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = "2019-05-16 21:GARBAGE:12"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = "2019-05-16 21:42:57.123.456"
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
|
||||
def test_parse_datetime_as_str(self):
|
||||
class DTDoc(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
date_str = "2019-03-02 22:26:01"
|
||||
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
self.assertIsInstance(dtd.date, six.string_types)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
self.assertIsInstance(dtd.date, dt.datetime)
|
||||
self.assertEqual(str(dtd.date), date_str)
|
||||
|
||||
dtd.date = "January 1st, 9999999999"
|
||||
self.assertRaises(ValidationError, dtd.validate)
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
def test_datetime_tz_aware_mark_as_changed(self):
|
||||
# Reset the connections
|
||||
connection._connection_settings = {}
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db="mongoenginetest", tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save()
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(["time"], log._changed_fields)
|
||||
104
tests/fields/test_decimal_field.py
Normal file
104
tests/fields/test_decimal_field.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
self.assertEqual(person.height, Decimal("1.89"))
|
||||
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal("0.01")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal("4.0")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = "something invalid"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
money = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(money=6).save()
|
||||
Person(money=7).save()
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count())
|
||||
self.assertEqual(2, Person.objects(money__gt=7).count())
|
||||
self.assertEqual(2, Person.objects(money__gt="7").count())
|
||||
|
||||
self.assertEqual(3, Person.objects(money__gte="7").count())
|
||||
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
float_value = DecimalField(precision=4)
|
||||
string_value = DecimalField(precision=4, force_string=True)
|
||||
|
||||
Person.drop_collection()
|
||||
values_to_store = [
|
||||
10,
|
||||
10.1,
|
||||
10.11,
|
||||
"10.111",
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.11111"),
|
||||
]
|
||||
for store_at_creation in [True, False]:
|
||||
for value in values_to_store:
|
||||
# to_python is called explicitly if values were sent in the kwargs of __init__
|
||||
if store_at_creation:
|
||||
Person(float_value=value, string_value=value).save()
|
||||
else:
|
||||
person = Person.objects.create()
|
||||
person.float_value = value
|
||||
person.string_value = value
|
||||
person.save()
|
||||
|
||||
# How its stored
|
||||
expected = [
|
||||
{"float_value": 10.0, "string_value": "10.0000"},
|
||||
{"float_value": 10.1, "string_value": "10.1000"},
|
||||
{"float_value": 10.11, "string_value": "10.1100"},
|
||||
{"float_value": 10.111, "string_value": "10.1110"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude("id").as_pymongo())
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
# How it comes out locally
|
||||
expected = [
|
||||
Decimal("10.0000"),
|
||||
Decimal("10.1000"),
|
||||
Decimal("10.1100"),
|
||||
Decimal("10.1110"),
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.1111"),
|
||||
]
|
||||
expected.extend(expected)
|
||||
for field_name in ["float_value", "string_value"]:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
self.assertEqual(expected, actual)
|
||||
341
tests/fields/test_dict_field.py
Normal file
341
tests/fields/test_dict_field.py
Normal file
@@ -0,0 +1,341 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestDictField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = {"testkey": "testvalue"}
|
||||
post = BlogPost(info=info).save()
|
||||
self.assertEqual(get_as_pymongo(post), {"_id": post.id, "info": info})
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = "my post"
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = ["test", "test"]
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"the.title": "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {1: "test"}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {"title": "test"}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"title": "dollar_sign", "details": {"te$t": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"details": {"test": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"details": {"test": 3}}
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 4)
|
||||
self.assertEqual(BlogPost.objects.filter(info__title__exact="test").count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact="test").count(), 1
|
||||
)
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact="dollar_sign").first()
|
||||
self.assertIn("te$t", post["info"]["details"])
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0
|
||||
)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact="test").count(), 0
|
||||
)
|
||||
|
||||
post = BlogPost.objects.create(info={"title": "original"})
|
||||
post.info.update({"title": "updated"})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual("updated", post.info["title"])
|
||||
|
||||
post.info.setdefault("authors", [])
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual([], post.info["authors"])
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
|
||||
class Doc(Document):
|
||||
field = DictField()
|
||||
|
||||
class ToEmbed(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
class ToEmbedParent(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class ToEmbedChild(ToEmbedParent):
|
||||
pass
|
||||
|
||||
to_embed_recursive = ToEmbed(id=1).save()
|
||||
to_embed = ToEmbed(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
self.assertEqual(
|
||||
doc.field, {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
)
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
expected = {
|
||||
"_id": 2,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {
|
||||
"_id": 1,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {},
|
||||
},
|
||||
}
|
||||
self.assertEqual(doc.field, expected)
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
def test_dictfield_complex(self):
|
||||
"""Ensure that the dict field can handle the complex types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
|
||||
class IntegerSetting(SettingBase):
|
||||
value = IntField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField()
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.mapping["nested_dict"] = {
|
||||
"number": 1,
|
||||
"string": "Hi!",
|
||||
"float": 1.001,
|
||||
"complex": IntegerSetting(value=42),
|
||||
"list": [IntegerSetting(value=42), StringSetting(value="foo")],
|
||||
}
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping["somestring"], StringSetting)
|
||||
self.assertIsInstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 1
|
||||
)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value="Boo")
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 0
|
||||
)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count(), 1
|
||||
)
|
||||
|
||||
def test_push_dict(self):
|
||||
class MyModel(Document):
|
||||
events = ListField(DictField())
|
||||
|
||||
doc = MyModel(events=[{"a": 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
|
||||
class D(Document):
|
||||
data = DictField()
|
||||
data2 = DictField(default=lambda: {})
|
||||
|
||||
d1 = D()
|
||||
d1.data["foo"] = "bar"
|
||||
d1.data2["foo"] = "bar"
|
||||
d2 = D()
|
||||
self.assertEqual(d2.data, {})
|
||||
self.assertEqual(d2.data2, {})
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
embed = Embedded(name="garbage")
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
doc.validate()
|
||||
self.assertIn("'dictionary'", str(ctx_err.exception))
|
||||
self.assertIn(
|
||||
"Only dictionaries may be used in a DictField", str(ctx_err.exception)
|
||||
)
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["someints"] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar"]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
"""Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)"""
|
||||
# Relates to Issue #1453
|
||||
class Doc(Document):
|
||||
s = StringField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping0 = DictField(ReferenceField(Doc, dbref=True))
|
||||
mapping1 = DictField(ReferenceField(Doc, dbref=False))
|
||||
mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True)))
|
||||
mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False)))
|
||||
mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True)))
|
||||
mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False)))
|
||||
mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True))))
|
||||
mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False))))
|
||||
mapping8 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))
|
||||
)
|
||||
mapping9 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
Simple.drop_collection()
|
||||
|
||||
d = Doc(s="aa").save()
|
||||
e = Simple()
|
||||
e.mapping0["someint"] = e.mapping1["someint"] = d
|
||||
e.mapping2["someint"] = e.mapping3["someint"] = [d]
|
||||
e.mapping4["someint"] = e.mapping5["someint"] = {"d": d}
|
||||
e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}]
|
||||
e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}]
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
self.assertIsInstance(s.mapping0["someint"], Doc)
|
||||
self.assertIsInstance(s.mapping1["someint"], Doc)
|
||||
self.assertIsInstance(s.mapping2["someint"][0], Doc)
|
||||
self.assertIsInstance(s.mapping3["someint"][0], Doc)
|
||||
self.assertIsInstance(s.mapping4["someint"]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping5["someint"]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
self.assertIsInstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
self.assertIsInstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
131
tests/fields/test_email_field.py
Normal file
131
tests/fields/test_email_field.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from unittest import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestEmailField(MongoDBTestCase):
|
||||
def test_generic_behavior(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
user = User(email="ross@example.com")
|
||||
user.validate()
|
||||
|
||||
user = User(email="ross@example.co.uk")
|
||||
user.validate()
|
||||
|
||||
user = User(
|
||||
email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net")
|
||||
)
|
||||
user.validate()
|
||||
|
||||
user = User(email="new-tld@example.technology")
|
||||
user.validate()
|
||||
|
||||
user = User(email="ross@example.com.")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# unicode domain
|
||||
user = User(email=u"user@пример.рф")
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email=u"user@пример")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
# Don't run this test on pypy3, which doesn't support unicode regex:
|
||||
# https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
raise SkipTest("unicode email addresses are not supported on PyPy 3")
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email="me@localhost")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
email = EmailField(domain_whitelist=["localhost"])
|
||||
|
||||
user = User(email="me@localhost")
|
||||
user.validate()
|
||||
|
||||
def test_email_domain_validation_fails_if_invalid_idn(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
invalid_idn = ".google.com"
|
||||
user = User(email="me@%s" % invalid_idn)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
user.validate()
|
||||
self.assertIn("domain failed IDN encoding", str(ctx_err.exception))
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
valid_ipv4 = "email@[127.0.0.1]"
|
||||
valid_ipv6 = "email@[2001:dB8::1]"
|
||||
invalid_ip = "email@[324.0.0.1]"
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_ip_domain=True)
|
||||
|
||||
user = User(email=valid_ipv4)
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
user.validate()
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
email = EmailField(regex=r"\w+@example.com")
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email="me@foo.com")
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email="me@example.com")
|
||||
self.assertIsNone(user.validate())
|
||||
350
tests/fields/test_embedded_document_field.py
Normal file
350
tests/fields/test_embedded_document_field.py
Normal file
@@ -0,0 +1,350 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import (
|
||||
Document,
|
||||
StringField,
|
||||
ValidationError,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
InvalidQueryError,
|
||||
LookUpError,
|
||||
IntField,
|
||||
GenericEmbeddedDocumentField,
|
||||
ListField,
|
||||
EmbeddedDocumentListField,
|
||||
ReferenceField,
|
||||
)
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
def test___init___(self):
|
||||
class MyDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
field = EmbeddedDocumentField(MyDoc)
|
||||
self.assertEqual(field.document_type_obj, MyDoc)
|
||||
|
||||
field2 = EmbeddedDocumentField("MyDoc")
|
||||
self.assertEqual(field2.document_type_obj, "MyDoc")
|
||||
|
||||
def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
EmbeddedDocumentField(dict)
|
||||
|
||||
def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self):
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
with self.assertRaises(ValidationError) as ctx:
|
||||
emb.document_type
|
||||
self.assertIn(
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField",
|
||||
str(ctx.exception),
|
||||
)
|
||||
|
||||
def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self):
|
||||
# Relates to #1661
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
|
||||
class MyFailingDoc(Document):
|
||||
emb = EmbeddedDocumentField(MyDoc)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
|
||||
class MyFailingdoc2(Document):
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
|
||||
def test_query_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
foo1 = StringField()
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = EmbeddedDocumentField(AdminSettings)
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p.id)
|
||||
only_p = Person.objects.only("settings.foo1").first()
|
||||
self.assertEqual(only_p.settings.foo1, p.settings.foo1)
|
||||
self.assertIsNone(only_p.settings.foo2)
|
||||
self.assertIsNone(only_p.name)
|
||||
|
||||
exclude_p = Person.objects.exclude("settings.foo1").first()
|
||||
self.assertIsNone(exclude_p.settings.foo1)
|
||||
self.assertEqual(exclude_p.settings.foo2, p.settings.foo2)
|
||||
self.assertEqual(exclude_p.name, p.name)
|
||||
|
||||
def test_query_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
sub_foo = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = EmbeddedDocumentField(BaseSettings)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id)
|
||||
|
||||
only_p = Person.objects.only("settings.base_foo", "settings._cls").first()
|
||||
self.assertEqual(only_p.settings.base_foo, "basefoo")
|
||||
self.assertIsNone(only_p.settings.sub_foo)
|
||||
|
||||
def test_query_list_embedded_document_with_inheritance(self):
|
||||
class Post(EmbeddedDocument):
|
||||
title = StringField(max_length=120, required=True)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
class MoviePost(Post):
|
||||
author = StringField()
|
||||
|
||||
class Record(Document):
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save()
|
||||
record_text = Record(posts=[TextPost(content="a", title="foo")]).save()
|
||||
|
||||
records = list(Record.objects(posts__author=record_movie.posts[0].author))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_movie.id)
|
||||
|
||||
records = list(Record.objects(posts__content=record_text.posts[0].content))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_text.id)
|
||||
|
||||
self.assertEqual(Record.objects(posts__title="foo").count(), 2)
|
||||
|
||||
|
||||
class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Car)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices."""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField(choices=(Dish,))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside
|
||||
a list field.
|
||||
"""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,)))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.likes = [Car(name="Fiat")]
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.likes[0], Dish)
|
||||
|
||||
def test_choices_validation_documents(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given a valid choice.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Ensure Validation Passes
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_choices_validation_documents_invalid(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given an invalid choice.
|
||||
This should throw a ValidationError exception.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class ModeratorComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Single Entry Failure
|
||||
post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
|
||||
# Mixed Entry Failure
|
||||
post = BlogPost(
|
||||
comments=[
|
||||
ModeratorComments(author="mod1", message="message1"),
|
||||
UserComments(author="user2", message="message2"),
|
||||
]
|
||||
)
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
|
||||
def test_choices_validation_documents_inheritance(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given subclass of choice.
|
||||
"""
|
||||
|
||||
class Comments(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class UserComments(Comments):
|
||||
pass
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,)))
|
||||
|
||||
# Save Valid EmbeddedDocument Type
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_query_generic_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
foo1 = StringField()
|
||||
|
||||
class NonAdminSettings(EmbeddedDocument):
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(
|
||||
choices=(AdminSettings, NonAdminSettings)
|
||||
)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(settings=AdminSettings(foo1="bar1")).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2="bar2")).save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p1.id)
|
||||
self.assertEqual(Person.objects(settings__foo2="bar2").first().id, p2.id)
|
||||
|
||||
def test_query_generic_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
sub_foo = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(choices=[BaseSettings])
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id)
|
||||
58
tests/fields/test_float_field.py
Normal file
58
tests/fields/test_float_field.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestFloatField(MongoDBTestCase):
|
||||
def test_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
float_fld = FloatField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(float_fld=None).save()
|
||||
TestDocument(float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count())
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to float fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = FloatField(min_value=0.1, max_value=3.5)
|
||||
|
||||
class BigPerson(Document):
|
||||
height = FloatField()
|
||||
|
||||
person = Person()
|
||||
person.height = 1.89
|
||||
person.validate()
|
||||
|
||||
person.height = "2.0"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.height = 4.0
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 100000 # Too big for a float value
|
||||
self.assertRaises(ValidationError, big_person.validate)
|
||||
42
tests/fields/test_int_field.py
Normal file
42
tests/fields/test_int_field.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestIntField(MongoDBTestCase):
|
||||
def test_int_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to int fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(min_value=0, max_value=110)
|
||||
|
||||
person = Person()
|
||||
person.age = 0
|
||||
person.validate()
|
||||
|
||||
person.age = 50
|
||||
person.validate()
|
||||
|
||||
person.age = 110
|
||||
person.validate()
|
||||
|
||||
person.age = -1
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = 120
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = "ten"
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
int_fld = IntField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(int_fld=None).save()
|
||||
TestDocument(int_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count())
|
||||
574
tests/fields/test_lazy_reference_field.py
Normal file
574
tests/fields/test_lazy_reference_field.py
Normal file
@@ -0,0 +1,574 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import DBRef, ObjectId
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import LazyReference
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLazyReferenceField(MongoDBTestCase):
|
||||
def test_lazy_reference_config(self):
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
||||
|
||||
def test___repr__(self):
|
||||
class Animal(Document):
|
||||
pass
|
||||
|
||||
class Ocurrence(Document):
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal()
|
||||
oc = Ocurrence(animal=animal)
|
||||
self.assertIn("LazyReference", repr(oc.animal))
|
||||
|
||||
def test___getattr___unknown_attr_raises_attribute_error(self):
|
||||
class Animal(Document):
|
||||
pass
|
||||
|
||||
class Ocurrence(Document):
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal().save()
|
||||
oc = Ocurrence(animal=animal)
|
||||
with self.assertRaises(AttributeError):
|
||||
oc.animal.not_exist
|
||||
|
||||
def test_lazy_reference_simple(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
|
||||
def test_lazy_reference_fetch_invalid_ref(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
animal.delete()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(DoesNotExist):
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class SubAnimal(Animal):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_bad_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class BadDoc(Document):
|
||||
pass
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
"foo",
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk),
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = LazyReferenceField(Member, dbref=False)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_lazy_reference_query_conversion_dbref(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = LazyReferenceField(Member, dbref=True)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_lazy_reference_passthrough(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
animal = LazyReferenceField(Animal, passthrough=False)
|
||||
animal_passthrough = LazyReferenceField(Animal, passthrough=True)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal["name"]
|
||||
with self.assertRaises(AttributeError):
|
||||
p.animal.name
|
||||
self.assertEqual(p.animal.pk, animal.pk)
|
||||
|
||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
||||
self.assertEqual(p.animal_passthrough["name"], "Leopard")
|
||||
|
||||
# Should not be able to access referenced document's methods
|
||||
with self.assertRaises(AttributeError):
|
||||
p.animal.save
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal["save"]
|
||||
|
||||
def test_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
|
||||
def test_lazy_reference_equality(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
animalref = LazyReference(Animal, animal.pk)
|
||||
self.assertEqual(animal, animalref)
|
||||
self.assertEqual(animalref, animal)
|
||||
|
||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
||||
self.assertNotEqual(animal, other_animalref)
|
||||
self.assertNotEqual(other_animalref, animal)
|
||||
|
||||
def test_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class EmbeddedOcurrence(EmbeddedDocument):
|
||||
in_list = ListField(LazyReferenceField(Animal))
|
||||
direct = LazyReferenceField(Animal)
|
||||
|
||||
class Ocurrence(Document):
|
||||
in_list = ListField(LazyReferenceField(Animal))
|
||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
||||
direct = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
occ.direct = animal1.id
|
||||
occ.in_list = [animal1.id, animal2.id]
|
||||
occ.in_embedded.direct = animal1.id
|
||||
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
||||
check_fields_type(occ)
|
||||
|
||||
|
||||
class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
def test_generic_lazy_reference_simple(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
|
||||
def test_generic_lazy_reference_choices(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
class Vegetal(Document):
|
||||
name = StringField()
|
||||
|
||||
class Mineral(Document):
|
||||
name = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal])
|
||||
thing = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Vegetal.drop_collection()
|
||||
Mineral.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard").save()
|
||||
vegetal = Vegetal(name="Oak").save()
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with self.assertRaises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
occ = Ocurrence.objects.get(living_thing=animal)
|
||||
self.assertEqual(occ, occ_animal)
|
||||
self.assertIsInstance(occ.thing, LazyReference)
|
||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
||||
|
||||
occ.thing = vegetal
|
||||
occ.living_thing = vegetal
|
||||
occ.save()
|
||||
|
||||
occ.thing = mineral
|
||||
occ.living_thing = mineral
|
||||
with self.assertRaises(ValidationError):
|
||||
occ.save()
|
||||
|
||||
def test_generic_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class SubAnimal(Animal):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)},
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{
|
||||
"_cls": "SubAnimal",
|
||||
"_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
},
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
||||
p.animal.fetch()
|
||||
|
||||
def test_generic_lazy_reference_bad_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField(choices=["Animal"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class BadDoc(Document):
|
||||
pass
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = GenericLazyReferenceField()
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_generic_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
|
||||
def test_generic_lazy_reference_accepts_string_instead_of_class(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField("Animal")
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal().save()
|
||||
Ocurrence(animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertEqual(p.animal, animal)
|
||||
|
||||
def test_generic_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class EmbeddedOcurrence(EmbeddedDocument):
|
||||
in_list = ListField(GenericLazyReferenceField())
|
||||
direct = GenericLazyReferenceField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
in_list = ListField(GenericLazyReferenceField())
|
||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
||||
direct = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
animal1_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal1._get_collection_name(), animal1.pk),
|
||||
}
|
||||
animal2_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal2._get_collection_name(), animal2.pk),
|
||||
}
|
||||
occ.direct = animal1_ref
|
||||
occ.in_list = [animal1_ref, animal2_ref]
|
||||
occ.in_embedded.direct = animal1_ref
|
||||
occ.in_embedded.in_list = [animal1_ref, animal2_ref]
|
||||
check_fields_type(occ)
|
||||
59
tests/fields/test_long_field.py
Normal file
59
tests/fields/test_long_field.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLongField(MongoDBTestCase):
|
||||
def test_long_field_is_considered_as_int64(self):
|
||||
"""
|
||||
Tests that long fields are stored as long in mongo, even if long
|
||||
value is small enough to be an int.
|
||||
"""
|
||||
|
||||
class TestLongFieldConsideredAsInt64(Document):
|
||||
some_long = LongField()
|
||||
|
||||
doc = TestLongFieldConsideredAsInt64(some_long=42).save()
|
||||
db = get_db()
|
||||
self.assertIsInstance(
|
||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||
)
|
||||
self.assertIsInstance(doc.some_long, six.integer_types)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
"""
|
||||
|
||||
class TestDocument(Document):
|
||||
value = LongField(min_value=0, max_value=110)
|
||||
|
||||
doc = TestDocument()
|
||||
doc.value = 50
|
||||
doc.validate()
|
||||
|
||||
doc.value = -1
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = 120
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = "ten"
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
|
||||
def test_long_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
long_fld = LongField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(long_fld=None).save()
|
||||
TestDocument(long_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count())
|
||||
145
tests/fields/test_map_field.py
Normal file
145
tests/fields/test_map_field.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestMapField(MongoDBTestCase):
|
||||
def test_mapfield(self):
|
||||
"""Ensure that the MapField handles the declared type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = MapField(IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
|
||||
class NoDeclaredType(Document):
|
||||
mapping = MapField()
|
||||
|
||||
def test_complex_mapfield(self):
|
||||
"""Ensure that the MapField can handle complex declared types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
|
||||
class IntegerSetting(SettingBase):
|
||||
value = IntField()
|
||||
|
||||
class Extensible(Document):
|
||||
mapping = MapField(EmbeddedDocumentField(SettingBase))
|
||||
|
||||
Extensible.drop_collection()
|
||||
|
||||
e = Extensible()
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.save()
|
||||
|
||||
e2 = Extensible.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping["somestring"], StringSetting)
|
||||
self.assertIsInstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping["someint"] = 123
|
||||
e.save()
|
||||
|
||||
def test_embedded_mapfield_db_field(self):
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field="i")
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x")
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map["DICTIONARY_KEY"] = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map["DICTIONARY_KEY"].number, 2)
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc["x"]["DICTIONARY_KEY"]["i"], 2)
|
||||
|
||||
def test_mapfield_numerical_index(self):
|
||||
"""Ensure that MapField accept numeric strings as indexes."""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(EmbeddedDocumentField(Embedded))
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map["1"] = Embedded(name="test")
|
||||
test.save()
|
||||
test.my_map["1"].name = "test updated"
|
||||
test.save()
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
"""Ensure MapField lookups succeed on Fields without a lookup
|
||||
method.
|
||||
"""
|
||||
|
||||
class Action(EmbeddedDocument):
|
||||
operation = StringField()
|
||||
object = StringField()
|
||||
|
||||
class Log(Document):
|
||||
name = StringField()
|
||||
visited = MapField(DateTimeField())
|
||||
actions = MapField(EmbeddedDocumentField(Action))
|
||||
|
||||
Log.drop_collection()
|
||||
Log(
|
||||
name="wilson",
|
||||
visited={"friends": datetime.datetime.now()},
|
||||
actions={"friends": Action(operation="drink", object="beer")},
|
||||
).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(visited__friends__exists=True).count())
|
||||
|
||||
self.assertEqual(
|
||||
1,
|
||||
Log.objects(
|
||||
actions__friends__operation="drink", actions__friends__object="beer"
|
||||
).count(),
|
||||
)
|
||||
|
||||
def test_map_field_unicode(self):
|
||||
class Info(EmbeddedDocument):
|
||||
description = StringField()
|
||||
value_list = ListField(field=StringField())
|
||||
|
||||
class BlogPost(Document):
|
||||
info_dict = MapField(field=EmbeddedDocumentField(Info))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}})
|
||||
|
||||
tree.save()
|
||||
|
||||
self.assertEqual(
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description,
|
||||
u"VALUE: éééé",
|
||||
)
|
||||
218
tests/fields/test_reference_field.py
Normal file
218
tests/fields/test_reference_field.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import SON, DBRef
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestReferenceField(MongoDBTestCase):
|
||||
def test_reference_validation(self):
|
||||
"""Ensure that invalid document objects cannot be assigned to
|
||||
reference fields.
|
||||
"""
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(User)
|
||||
|
||||
User.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
|
||||
user = User(name="Test User")
|
||||
|
||||
# Ensure that the referenced object must have been saved
|
||||
post1 = BlogPost(content="Chips and gravy taste good.")
|
||||
post1.author = user
|
||||
self.assertRaises(ValidationError, post1.save)
|
||||
|
||||
# Check that an invalid object type cannot be used
|
||||
post2 = BlogPost(content="Chips and chilli taste good.")
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
# Ensure ObjectID's are accepted as references
|
||||
user_object_id = user.pk
|
||||
post3 = BlogPost(content="Chips and curry sauce taste good.")
|
||||
post3.author = user_object_id
|
||||
post3.save()
|
||||
|
||||
# Make sure referencing a saved document of the right type works
|
||||
user.save()
|
||||
post1.author = user
|
||||
post1.save()
|
||||
|
||||
# Make sure referencing a saved document of the *wrong* type fails
|
||||
post2.save()
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
"""Make sure storing Object ID references works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField("self")
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1.pk).save()
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_dbref_reference_fields(self):
|
||||
"""Make sure storing references as bson.dbref.DBRef works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField("self", dbref=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
self.assertEqual(
|
||||
Person._get_collection().find_one({"name": "Ross"})["parent"],
|
||||
DBRef("person", p1.pk),
|
||||
)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_dbref_to_mongo(self):
|
||||
"""Make sure that calling to_mongo on a ReferenceField which
|
||||
has dbref=False, but actually actually contains a DBRef returns
|
||||
an ID of that DBRef.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
||||
self.assertEqual(
|
||||
p.to_mongo(), SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")])
|
||||
)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
col = Person._get_collection()
|
||||
data = col.find_one({"name": "Ross"})
|
||||
self.assertEqual(data["parent"], p1.pk)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_undefined_reference(self):
|
||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||
"""
|
||||
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
company = ReferenceField("Company")
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name="10gen")
|
||||
ten_gen.save()
|
||||
mongodb = Product(name="MongoDB", company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name="MongoEngine")
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
obj = Product.objects.get(company=None)
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = ReferenceField(Member, dbref=False)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_reference_query_conversion_dbref(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = ReferenceField(Member, dbref=True)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
281
tests/fields/test_sequence_field.py
Normal file
281
tests/fields/test_sequence_field.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestSequenceField(MongoDBTestCase):
|
||||
def test_sequence_field(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 11)
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 1)
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), "11")
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), "1")
|
||||
|
||||
def test_sequence_field_sequence_name(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, sequence_name="jelly")
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
|
||||
def test_multiple_sequence_fields(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
self.assertEqual(counters, range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 1000)
|
||||
|
||||
Person.counter.set_next_value(999)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"})
|
||||
self.assertEqual(c["next"], 999)
|
||||
|
||||
def test_sequence_fields_reload(self):
|
||||
class Animal(Document):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
|
||||
a = Animal(name="Boi").save()
|
||||
|
||||
self.assertEqual(a.counter, 1)
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 1)
|
||||
|
||||
a.counter = None
|
||||
self.assertEqual(a.counter, 2)
|
||||
a.save()
|
||||
|
||||
self.assertEqual(a.counter, 2)
|
||||
|
||||
a = Animal.objects.first()
|
||||
self.assertEqual(a.counter, 2)
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 2)
|
||||
|
||||
def test_multiple_sequence_fields_on_docs(self):
|
||||
class Animal(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Animal(name="Animal %s" % x).save()
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
self.assertEqual(id, range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
def test_sequence_field_value_decorator(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
p = Person(name="Person %s" % x)
|
||||
p.save()
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, map(str, range(1, 11)))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
self.assertEqual(c["next"], 10)
|
||||
|
||||
def test_embedded_sequence_field(self):
|
||||
class Comment(EmbeddedDocument):
|
||||
id = SequenceField()
|
||||
content = StringField(required=True)
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Post.drop_collection()
|
||||
|
||||
Post(
|
||||
title="MongoEngine",
|
||||
comments=[
|
||||
Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks"),
|
||||
],
|
||||
).save()
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"})
|
||||
self.assertEqual(c["next"], 2)
|
||||
post = Post.objects.first()
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
|
||||
def test_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
counter = SequenceField()
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
pass
|
||||
|
||||
class Bar(Base):
|
||||
pass
|
||||
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertTrue(
|
||||
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertFalse(
|
||||
("foo.counter" or "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertNotEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields["counter"].owner_document, Base)
|
||||
self.assertEqual(bar._fields["counter"].owner_document, Base)
|
||||
|
||||
def test_no_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
class Bar(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertFalse(
|
||||
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertTrue(
|
||||
("foo.counter" and "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
self.assertEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields["counter"].owner_document, Foo)
|
||||
self.assertEqual(bar._fields["counter"].owner_document, Bar)
|
||||
63
tests/fields/test_url_field.py
Normal file
63
tests/fields/test_url_field.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestURLField(MongoDBTestCase):
|
||||
def test_validation(self):
|
||||
"""Ensure that URLFields validate urls properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = "google"
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
|
||||
link.url = "http://www.google.com:8080"
|
||||
link.validate()
|
||||
|
||||
def test_unicode_url_validation(self):
|
||||
"""Ensure unicode URLs are validated properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = u"http://привет.com"
|
||||
|
||||
# TODO fix URL validation - this *IS* a valid URL
|
||||
# For now we just want to make sure that the error message is correct
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
link.validate()
|
||||
self.assertEqual(
|
||||
unicode(ctx_err.exception),
|
||||
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])",
|
||||
)
|
||||
|
||||
def test_url_scheme_validation(self):
|
||||
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||
"""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
class SchemeLink(Document):
|
||||
url = URLField(schemes=["ws", "irc"])
|
||||
|
||||
link = Link()
|
||||
link.url = "ws://google.com"
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
|
||||
scheme_link = SchemeLink()
|
||||
scheme_link.url = "ws://google.com"
|
||||
scheme_link.validate()
|
||||
|
||||
def test_underscore_allowed_in_domains_names(self):
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = "https://san_leandro-ca.geebo.com"
|
||||
link.validate()
|
||||
66
tests/fields/test_uuid_field.py
Normal file
66
tests/fields/test_uuid_field.py
Normal file
@@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class Person(Document):
|
||||
api_key = UUIDField(binary=False)
|
||||
|
||||
|
||||
class TestUUIDField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
uid = uuid.uuid4()
|
||||
person = Person(api_key=uid).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person), {"_id": person.id, "api_key": str(uid)}
|
||||
)
|
||||
|
||||
def test_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
"""
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object."""
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
@@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument):
|
||||
|
||||
class PickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@@ -19,7 +19,7 @@ class PickleTest(Document):
|
||||
|
||||
class NewDocumentPickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@@ -36,7 +36,7 @@ class PickleDynamicTest(DynamicDocument):
|
||||
|
||||
class PickleSignalsTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
|
||||
@@ -48,6 +48,7 @@ class PickleSignalsTest(Document):
|
||||
def post_delete(self, sender, document, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
|
||||
|
||||
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
||||
|
||||
@@ -57,4 +58,4 @@ class Mixin(object):
|
||||
|
||||
|
||||
class Base(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from transform import *
|
||||
from field_list import *
|
||||
from queryset import *
|
||||
from visitor import *
|
||||
from geo import *
|
||||
from modify import *
|
||||
from .transform import *
|
||||
from .field_list import *
|
||||
from .queryset import *
|
||||
from .visitor import *
|
||||
from .geo import *
|
||||
from .modify import *
|
||||
|
||||
@@ -7,79 +7,78 @@ __all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
|
||||
|
||||
|
||||
class QueryFieldListTest(unittest.TestCase):
|
||||
|
||||
def test_empty(self):
|
||||
q = QueryFieldList()
|
||||
self.assertFalse(q)
|
||||
|
||||
q = QueryFieldList(always_include=['_cls'])
|
||||
q = QueryFieldList(always_include=["_cls"])
|
||||
self.assertFalse(q)
|
||||
|
||||
def test_include_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
q += QueryFieldList(
|
||||
fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True
|
||||
)
|
||||
self.assertEqual(q.as_dict(), {"a": 1, "b": 1})
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"a": 1, "b": 1, "c": 1})
|
||||
|
||||
def test_include_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 1})
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"a": 1, "b": 1})
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {"a": 1})
|
||||
|
||||
def test_exclude_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0})
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {"a": 0, "b": 0})
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {"a": 0, "b": 0, "c": 0})
|
||||
|
||||
def test_exclude_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'c': 1})
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {"a": 0, "b": 0})
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"c": 1})
|
||||
|
||||
def test_always_include(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1})
|
||||
|
||||
def test_reset(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1})
|
||||
q.reset()
|
||||
self.assertFalse(q)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1})
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "b": 1, "c": 1})
|
||||
|
||||
def test_using_a_slice(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||
q += QueryFieldList(fields=["a"], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {"a": {"$slice": 5}})
|
||||
|
||||
|
||||
class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_mixing_only_exclude(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
@@ -88,32 +87,32 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
e = StringField()
|
||||
f = StringField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||
self.assertEqual(
|
||||
qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1}
|
||||
)
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1})
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1})
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1})
|
||||
|
||||
def test_slicing(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
@@ -122,24 +121,23 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
e = ListField()
|
||||
f = ListField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": 1})
|
||||
|
||||
qs = qs.fields(slice__c=[5, 1])
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
|
||||
self.assertEqual(
|
||||
qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": {"$slice": [5, 1]}}
|
||||
)
|
||||
|
||||
qs = qs.exclude('c')
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}})
|
||||
qs = qs.exclude("c")
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}})
|
||||
|
||||
def test_mix_slice_with_other_fields(self):
|
||||
class MyDoc(Document):
|
||||
@@ -148,43 +146,42 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
c = ListField()
|
||||
|
||||
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'c': {'$slice': 2}, 'a': 1})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {"c": {"$slice": 2}, "a": 1})
|
||||
|
||||
def test_only(self):
|
||||
"""Ensure that QuerySet.only only returns the requested fields.
|
||||
"""
|
||||
person = self.Person(name='test', age=25)
|
||||
person = self.Person(name="test", age=25)
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects.only('name').get()
|
||||
obj = self.Person.objects.only("name").get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
obj = self.Person.objects.only('age').get()
|
||||
obj = self.Person.objects.only("age").get()
|
||||
self.assertEqual(obj.name, None)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only('name', 'age').get()
|
||||
obj = self.Person.objects.only("name", "age").get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only(*('id', 'name',)).get()
|
||||
obj = self.Person.objects.only(*("id", "name")).get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
# Check polymorphism still works
|
||||
class Employee(self.Person):
|
||||
salary = IntField(db_field='wage')
|
||||
salary = IntField(db_field="wage")
|
||||
|
||||
employee = Employee(name='test employee', age=40, salary=30000)
|
||||
employee = Employee(name="test employee", age=40, salary=30000)
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||
self.assertTrue(isinstance(obj, Employee))
|
||||
obj = self.Person.objects(id=employee.id).only("age").get()
|
||||
self.assertIsInstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||
obj = Employee.objects(id=employee.id).only("salary").get()
|
||||
self.assertEqual(obj.salary, employee.salary)
|
||||
self.assertEqual(obj.name, None)
|
||||
|
||||
@@ -197,39 +194,52 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class VariousData(EmbeddedDocument):
|
||||
some = BooleanField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...')
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post = BlogPost(
|
||||
content="Had a good coffee today...",
|
||||
various={"test_dynamic": {"some": True}},
|
||||
)
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.only('author.name',).get()
|
||||
obj = BlogPost.objects.only("author.name").get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author.email, None)
|
||||
self.assertEqual(obj.author.name, 'Test User')
|
||||
self.assertEqual(obj.author.name, "Test User")
|
||||
self.assertEqual(obj.comments, [])
|
||||
|
||||
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
obj = BlogPost.objects.only("various.test_dynamic.some").get()
|
||||
self.assertEqual(obj.various["test_dynamic"].some, True)
|
||||
|
||||
obj = BlogPost.objects.only("content", "comments.title").get()
|
||||
self.assertEqual(obj.content, "Had a good coffee today...")
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].title, "I aggree")
|
||||
self.assertEqual(obj.comments[1].title, "Coffee")
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
self.assertEqual(obj.comments[1].text, None)
|
||||
|
||||
obj = BlogPost.objects.only('comments',).get()
|
||||
obj = BlogPost.objects.only("comments").get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].text, 'Great post!')
|
||||
self.assertEqual(obj.comments[1].text, 'I hate coffee')
|
||||
self.assertEqual(obj.comments[0].title, "I aggree")
|
||||
self.assertEqual(obj.comments[1].title, "Coffee")
|
||||
self.assertEqual(obj.comments[0].text, "Great post!")
|
||||
self.assertEqual(obj.comments[1].text, "I hate coffee")
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -249,15 +259,18 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...')
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post = BlogPost(content="Had a good coffee today...")
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.exclude('author', 'comments.text').get()
|
||||
obj = BlogPost.objects.exclude("author", "comments.text").get()
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.content, "Had a good coffee today...")
|
||||
self.assertEqual(obj.comments[0].title, "I aggree")
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@@ -276,32 +289,43 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||
|
||||
Email.drop_collection()
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.attachments = [
|
||||
Attachment(name='file1.doc', content='ABC'),
|
||||
Attachment(name='file2.doc', content='XYZ'),
|
||||
Attachment(name="file1.doc", content="ABC"),
|
||||
Attachment(name="file2.doc", content="XYZ"),
|
||||
]
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type').exclude('body').get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
obj = Email.objects.exclude("content_type").exclude("body").get()
|
||||
self.assertEqual(obj.sender, "me")
|
||||
self.assertEqual(obj.to, "you")
|
||||
self.assertEqual(obj.subject, "From Russia with Love")
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
|
||||
obj = Email.objects.only("sender", "to").exclude("body", "sender").get()
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.to, "you")
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
|
||||
self.assertEqual(obj.attachments[0].name, 'file1.doc')
|
||||
obj = (
|
||||
Email.objects.exclude("attachments.content")
|
||||
.exclude("body")
|
||||
.only("to", "attachments.name")
|
||||
.get()
|
||||
)
|
||||
self.assertEqual(obj.attachments[0].name, "file1.doc")
|
||||
self.assertEqual(obj.attachments[0].content, None)
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.to, "you")
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
@@ -309,7 +333,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
Email.drop_collection()
|
||||
|
||||
def test_all_fields(self):
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
@@ -319,21 +342,33 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
self.assertEqual(obj.body, 'Hello!')
|
||||
self.assertEqual(obj.content_type, 'text/plain')
|
||||
obj = (
|
||||
Email.objects.exclude("content_type", "body")
|
||||
.only("to", "body")
|
||||
.all_fields()
|
||||
.get()
|
||||
)
|
||||
self.assertEqual(obj.sender, "me")
|
||||
self.assertEqual(obj.to, "you")
|
||||
self.assertEqual(obj.subject, "From Russia with Love")
|
||||
self.assertEqual(obj.body, "Hello!")
|
||||
self.assertEqual(obj.content_type, "text/plain")
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_slicing_fields(self):
|
||||
"""Ensure that query slicing an array works.
|
||||
"""
|
||||
|
||||
class Numbers(Document):
|
||||
n = ListField(IntField())
|
||||
|
||||
@@ -406,13 +441,11 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
|
||||
class Base(Document):
|
||||
username = StringField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Anon(Base):
|
||||
anon = BooleanField()
|
||||
@@ -429,5 +462,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(LookUpError, Base.objects.exclude, "made_up")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -3,16 +3,16 @@ import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, needs_mongodb_v3
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
def _create_event_data(self, point_field_class=GeoPointField):
|
||||
"""Create some sample data re-used in many of the tests below."""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
@@ -28,15 +28,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
event1 = Event.objects.create(
|
||||
title="Coltrane Motion @ Double Door",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=1),
|
||||
location=[-87.677137, 41.909889])
|
||||
location=[-87.677137, 41.909889],
|
||||
)
|
||||
event2 = Event.objects.create(
|
||||
title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=10),
|
||||
location=[-122.4194155, 37.7749295])
|
||||
location=[-122.4194155, 37.7749295],
|
||||
)
|
||||
event3 = Event.objects.create(
|
||||
title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.datetime.now(),
|
||||
location=[-87.686638, 41.900474])
|
||||
location=[-87.686638, 41.900474],
|
||||
)
|
||||
|
||||
return event1, event2, event3
|
||||
|
||||
@@ -65,14 +68,10 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# $minDistance was added in MongoDB v2.6, but continued being buggy
|
||||
# until v3.0; skip for older versions
|
||||
@needs_mongodb_v3
|
||||
def test_near_and_min_distance(self):
|
||||
"""Ensure the "min_distance" operator works alongside the "near"
|
||||
operator.
|
||||
@@ -81,8 +80,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events at least 10 degrees away of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__min_distance=10)
|
||||
events = self.Event.objects(location__near=point, location__min_distance=10)
|
||||
self.assertEqual(events.count(), 2)
|
||||
|
||||
def test_within_distance(self):
|
||||
@@ -91,31 +89,27 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 5]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||
point_and_distance = [[-73.9509714, 40.7237134], 1]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# ensure ordering is respected by "within_distance"
|
||||
point_and_distance = [[-87.67892, 41.9120459], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
@@ -148,7 +142,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
@@ -157,9 +151,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Make sure the "near" operator works with a PointField, which
|
||||
corresponds to a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
@@ -178,26 +170,23 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Ensure the "max_distance" operator works alongside the "near"
|
||||
operator with a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 10km of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10000)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10000)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events within 1km of greenpoint, broolyn, nyc, ny
|
||||
events = self.Event.objects(location__near=[-73.9509714, 40.7237134],
|
||||
location__max_distance=1000)
|
||||
events = self.Event.objects(
|
||||
location__near=[-73.9509714, 40.7237134], location__max_distance=1000
|
||||
)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__max_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__max_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
@@ -206,9 +195,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Ensure the "geo_within_box" operator works with a 2dsphere
|
||||
index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
@@ -220,9 +207,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Ensure the "geo_within_polygon" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
polygon = [
|
||||
(-87.694445, 41.912114),
|
||||
@@ -238,35 +223,29 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__geo_within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
# $minDistance was added in MongoDB v2.6, but continued being buggy
|
||||
# until v3.0; skip for older versions
|
||||
@needs_mongodb_v3
|
||||
def test_2dsphere_near_and_min_max_distance(self):
|
||||
"""Ensure "min_distace" and "max_distance" operators work well
|
||||
together with the "near" operator in a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# ensure min_distance and max_distance combine well
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=1000,
|
||||
location__max_distance=10000
|
||||
location__max_distance=10000,
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# ensure ordering is respected by "near" with "min_distance"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__min_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
@@ -275,24 +254,22 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Make sure the "geo_within_center" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 2]
|
||||
events = self.Event.objects(
|
||||
location__geo_within_center=point_and_distance)
|
||||
events = self.Event.objects(location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
def _test_embedded(self, point_field_class):
|
||||
"""Helper test method ensuring given point field class works
|
||||
well in an embedded document.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = point_field_class()
|
||||
name = StringField()
|
||||
@@ -306,12 +283,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
venue=venue1).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
venue=venue2).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
venue=venue1).save()
|
||||
event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save()
|
||||
event2 = Event(
|
||||
title="Coltrane Motion @ Bottom of the Hill", venue=venue2
|
||||
).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
@@ -328,10 +304,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
"""Make sure PointField works properly in an embedded document."""
|
||||
self._test_embedded(point_field_class=PointField)
|
||||
|
||||
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
|
||||
@needs_mongodb_v3
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working."""
|
||||
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
@@ -351,26 +326,26 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 37.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||
location__max_distance=60 / earth_radius)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Test query works with max_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__max_distance=60 / earth_radius)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius
|
||||
)
|
||||
close_point = points.first()
|
||||
self.assertEqual(points.count(), 1)
|
||||
|
||||
# Test query works with min_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__min_distance=60 / earth_radius)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius
|
||||
)
|
||||
self.assertEqual(points.count(), 1)
|
||||
far_point = points.first()
|
||||
self.assertNotEqual(close_point, far_point)
|
||||
@@ -392,10 +367,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# Finds only one point because only the first point is within 60km of
|
||||
# the reference point to the south.
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 36.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 1)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
@@ -421,8 +393,10 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -433,8 +407,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [40, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]}
|
||||
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -444,8 +417,10 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -476,8 +451,10 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -488,8 +465,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [41, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]}
|
||||
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -499,8 +475,10 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
@@ -510,17 +488,35 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
|
||||
def test_aspymongo_with_only(self):
|
||||
"""Ensure as_pymongo works with only"""
|
||||
|
||||
class Place(Document):
|
||||
location = PointField()
|
||||
|
||||
Place.drop_collection()
|
||||
p = Place(location=[24.946861267089844, 60.16311983618494])
|
||||
p.save()
|
||||
qs = Place.objects().only("location")
|
||||
self.assertDictEqual(
|
||||
qs.as_pymongo()[0]["location"],
|
||||
{
|
||||
u"type": u"Point",
|
||||
u"coordinates": [24.946861267089844, 60.16311983618494],
|
||||
},
|
||||
)
|
||||
|
||||
def test_2dsphere_point_sets_correctly(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
Location.drop_collection()
|
||||
|
||||
Location(loc=[1,2]).save()
|
||||
Location(loc=[1, 2]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]})
|
||||
|
||||
Location.objects.update(set__loc=[2,1])
|
||||
Location.objects.update(set__loc=[2, 1])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]})
|
||||
|
||||
@@ -532,11 +528,15 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(line=[[1, 2], [2, 2]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
|
||||
self.assertEqual(
|
||||
loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}
|
||||
)
|
||||
|
||||
Location.objects.update(set__line=[[2, 1], [1, 2]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]})
|
||||
self.assertEqual(
|
||||
loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}
|
||||
)
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
@@ -546,12 +546,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
self.assertEqual(
|
||||
loc["poly"],
|
||||
{"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]},
|
||||
)
|
||||
|
||||
Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]})
|
||||
self.assertEqual(
|
||||
loc["poly"],
|
||||
{"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import connect, Document, IntField
|
||||
from mongoengine import connect, Document, IntField, StringField, ListField
|
||||
|
||||
__all__ = ("FindAndModifyTest",)
|
||||
|
||||
@@ -11,7 +11,6 @@ class Doc(Document):
|
||||
|
||||
|
||||
class FindAndModifyTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
Doc.drop_collection()
|
||||
@@ -82,9 +81,14 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
|
||||
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
|
||||
self.assertEqual(old_doc.to_json(), doc.to_json())
|
||||
self.assertDbEqual([
|
||||
{"_id": 0, "value": 3}, {"_id": 1, "value": 2},
|
||||
{"_id": 2, "value": 1}, {"_id": 3, "value": -1}])
|
||||
self.assertDbEqual(
|
||||
[
|
||||
{"_id": 0, "value": 3},
|
||||
{"_id": 1, "value": 2},
|
||||
{"_id": 2, "value": 1},
|
||||
{"_id": 3, "value": -1},
|
||||
]
|
||||
)
|
||||
|
||||
def test_modify_with_fields(self):
|
||||
Doc(id=0, value=0).save()
|
||||
@@ -94,6 +98,34 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
def test_modify_with_push(self):
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
if __name__ == '__main__':
|
||||
BlogPost.drop_collection()
|
||||
|
||||
blog = BlogPost.objects.create()
|
||||
|
||||
# Push a new tag via modify with new=False (default).
|
||||
BlogPost(id=blog.id).modify(push__tags="code")
|
||||
self.assertEqual(blog.tags, [])
|
||||
blog.reload()
|
||||
self.assertEqual(blog.tags, ["code"])
|
||||
|
||||
# Push a new tag via modify with new=True.
|
||||
blog = BlogPost.objects(id=blog.id).modify(push__tags="java", new=True)
|
||||
self.assertEqual(blog.tags, ["code", "java"])
|
||||
|
||||
# Push a new tag with a positional argument.
|
||||
blog = BlogPost.objects(id=blog.id).modify(push__tags__0="python", new=True)
|
||||
self.assertEqual(blog.tags, ["python", "code", "java"])
|
||||
|
||||
# Push multiple new tags with a positional argument.
|
||||
blog = BlogPost.objects(id=blog.id).modify(
|
||||
push__tags__1=["go", "rust"], new=True
|
||||
)
|
||||
self.assertEqual(blog.tags, ["python", "go", "rust", "code", "java"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -4,29 +4,28 @@ from pymongo.mongo_client import MongoClient
|
||||
from mongoengine import Document, StringField, IntField
|
||||
from mongoengine.connection import connect
|
||||
|
||||
__author__ = 'stas'
|
||||
__author__ = "stas"
|
||||
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
|
||||
class TestQuerysetPickable(unittest.TestCase):
|
||||
"""
|
||||
Test for adding pickling support for QuerySet instances
|
||||
See issue https://github.com/MongoEngine/mongoengine/issues/442
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestQuerysetPickable, self).setUp()
|
||||
|
||||
connection = connect(db="test") #type: pymongo.mongo_client.MongoClient
|
||||
connection = connect(db="test") # type: pymongo.mongo_client.MongoClient
|
||||
|
||||
connection.drop_database("test")
|
||||
|
||||
self.john = Person.objects.create(
|
||||
name="John",
|
||||
age=21
|
||||
)
|
||||
|
||||
self.john = Person.objects.create(name="John", age=21)
|
||||
|
||||
def test_picke_simple_qs(self):
|
||||
|
||||
@@ -46,22 +45,16 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
|
||||
self.assertEqual(qs.count(), loadedQs.count())
|
||||
|
||||
#can update loadedQs
|
||||
# can update loadedQs
|
||||
loadedQs.update(age=23)
|
||||
|
||||
#check
|
||||
# check
|
||||
self.assertEqual(Person.objects.first().age, 23)
|
||||
|
||||
def test_pickle_support_filtration(self):
|
||||
Person.objects.create(
|
||||
name="Alice",
|
||||
age=22
|
||||
)
|
||||
Person.objects.create(name="Alice", age=22)
|
||||
|
||||
Person.objects.create(
|
||||
name="Bob",
|
||||
age=23
|
||||
)
|
||||
Person.objects.create(name="Bob", age=23)
|
||||
|
||||
qs = Person.objects.filter(age__gte=22)
|
||||
self.assertEqual(qs.count(), 2)
|
||||
@@ -70,9 +63,3 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
|
||||
self.assertEqual(loaded.count(), 2)
|
||||
self.assertEqual(loaded.filter(name="Bob").first().age, 23)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,7 @@
|
||||
import unittest
|
||||
|
||||
from bson.son import SON
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q, transform
|
||||
|
||||
@@ -7,79 +9,117 @@ __all__ = ("TransformTest",)
|
||||
|
||||
|
||||
class TransformTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
def test_transform_query(self):
|
||||
"""Ensure that the _transform_query function operates correctly.
|
||||
"""
|
||||
self.assertEqual(transform.query(name='test', age=30),
|
||||
{'name': 'test', 'age': 30})
|
||||
self.assertEqual(transform.query(age__lt=30),
|
||||
{'age': {'$lt': 30}})
|
||||
self.assertEqual(transform.query(age__gt=20, age__lt=50),
|
||||
{'age': {'$gt': 20, '$lt': 50}})
|
||||
self.assertEqual(transform.query(age=20, age__gt=50),
|
||||
{'$and': [{'age': {'$gt': 50}}, {'age': 20}]})
|
||||
self.assertEqual(transform.query(friend__age__gte=30),
|
||||
{'friend.age': {'$gte': 30}})
|
||||
self.assertEqual(transform.query(name__exists=True),
|
||||
{'name': {'$exists': True}})
|
||||
self.assertEqual(
|
||||
transform.query(name="test", age=30), {"name": "test", "age": 30}
|
||||
)
|
||||
self.assertEqual(transform.query(age__lt=30), {"age": {"$lt": 30}})
|
||||
self.assertEqual(
|
||||
transform.query(age__gt=20, age__lt=50), {"age": {"$gt": 20, "$lt": 50}}
|
||||
)
|
||||
self.assertEqual(
|
||||
transform.query(age=20, age__gt=50),
|
||||
{"$and": [{"age": {"$gt": 50}}, {"age": 20}]},
|
||||
)
|
||||
self.assertEqual(
|
||||
transform.query(friend__age__gte=30), {"friend.age": {"$gte": 30}}
|
||||
)
|
||||
self.assertEqual(
|
||||
transform.query(name__exists=True), {"name": {"$exists": True}}
|
||||
)
|
||||
|
||||
def test_transform_update(self):
|
||||
class LisDoc(Document):
|
||||
foo = ListField(StringField())
|
||||
|
||||
class DicDoc(Document):
|
||||
dictField = DictField()
|
||||
|
||||
class Doc(Document):
|
||||
pass
|
||||
|
||||
LisDoc.drop_collection()
|
||||
DicDoc.drop_collection()
|
||||
Doc.drop_collection()
|
||||
|
||||
DicDoc().save()
|
||||
doc = Doc().save()
|
||||
|
||||
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
|
||||
for k, v in (
|
||||
("set", "$set"),
|
||||
("set_on_insert", "$setOnInsert"),
|
||||
("push", "$push"),
|
||||
):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
self.assertTrue(isinstance(update[v]["dictField.test"], dict))
|
||||
self.assertIsInstance(update[v]["dictField.test"], dict)
|
||||
|
||||
# Update special cases
|
||||
update = transform.update(DicDoc, unset__dictField__test=doc)
|
||||
self.assertEqual(update["$unset"]["dictField.test"], 1)
|
||||
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
|
||||
self.assertIsInstance(update["$pull"]["dictField"]["test"], dict)
|
||||
|
||||
update = transform.update(LisDoc, pull__foo__in=["a"])
|
||||
self.assertEqual(update, {"$pull": {"foo": {"$in": ["a"]}}})
|
||||
|
||||
def test_transform_update_push(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, push__tags=["mongo", "db"])
|
||||
self.assertEqual(update, {"$push": {"tags": ["mongo", "db"]}})
|
||||
|
||||
update = transform.update(BlogPost, push_all__tags=["mongo", "db"])
|
||||
self.assertEqual(update, {"$push": {"tags": {"$each": ["mongo", "db"]}}})
|
||||
|
||||
def test_transform_update_no_operator_default_to_set(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, tags=["mongo", "db"])
|
||||
self.assertEqual(update, {"$set": {"tags": ["mongo", "db"]}})
|
||||
|
||||
def test_query_field_name(self):
|
||||
"""Ensure that the correct field name is used when querying.
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField(db_field='commentContent')
|
||||
content = StringField(db_field="commentContent")
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(db_field='postTitle')
|
||||
comments = ListField(EmbeddedDocumentField(Comment),
|
||||
db_field='postComments')
|
||||
title = StringField(db_field="postTitle")
|
||||
comments = ListField(
|
||||
EmbeddedDocumentField(Comment), db_field="postComments"
|
||||
)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
|
||||
data = {"title": "Post 1", "comments": [Comment(content="test")]}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('postTitle' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertFalse('title' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
|
||||
self.assertIn("postTitle", BlogPost.objects(title=data["title"])._query)
|
||||
self.assertFalse("title" in BlogPost.objects(title=data["title"])._query)
|
||||
self.assertEqual(BlogPost.objects(title=data["title"]).count(), 1)
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
|
||||
self.assertIn("_id", BlogPost.objects(pk=post.id)._query)
|
||||
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
|
||||
|
||||
self.assertTrue('postComments.commentContent' in
|
||||
BlogPost.objects(comments__content='test')._query)
|
||||
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
|
||||
self.assertIn(
|
||||
"postComments.commentContent",
|
||||
BlogPost.objects(comments__content="test")._query,
|
||||
)
|
||||
self.assertEqual(BlogPost.objects(comments__content="test").count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -87,18 +127,19 @@ class TransformTest(unittest.TestCase):
|
||||
"""Ensure that the correct "primary key" field name is used when
|
||||
querying
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(primary_key=True, db_field='postTitle')
|
||||
title = StringField(primary_key=True, db_field="postTitle")
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {'title': 'Post 1'}
|
||||
data = {"title": "Post 1"}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
|
||||
self.assertIn("_id", BlogPost.objects(pk=data["title"])._query)
|
||||
self.assertIn("_id", BlogPost.objects(title=data["title"])._query)
|
||||
self.assertEqual(BlogPost.objects(pk=data["title"]).count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -130,78 +171,125 @@ class TransformTest(unittest.TestCase):
|
||||
"""
|
||||
Test raw plays nicely
|
||||
"""
|
||||
|
||||
class Foo(Document):
|
||||
name = StringField()
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': False
|
||||
}
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
|
||||
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
|
||||
query = Foo.objects(__raw__={"$nor": [{"name": "bar"}]})._query
|
||||
self.assertEqual(query, {"$nor": [{"name": "bar"}]})
|
||||
|
||||
q1 = {'$or': [{'a': 1}, {'b': 1}]}
|
||||
q1 = {"$or": [{"a": 1}, {"b": 1}]}
|
||||
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||
self.assertEqual(query, {"$or": [{"a": 1}, {"b": 1}], "c": 1})
|
||||
|
||||
def test_raw_and_merging(self):
|
||||
class Doc(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
raw_query = Doc.objects(__raw__={
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})._query
|
||||
raw_query = Doc.objects(
|
||||
__raw__={
|
||||
"deleted": False,
|
||||
"scraped": "yes",
|
||||
"$nor": [
|
||||
{"views.extracted": "no"},
|
||||
{"attachments.views.extracted": "no"},
|
||||
],
|
||||
}
|
||||
)._query
|
||||
|
||||
self.assertEqual(raw_query, {
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})
|
||||
self.assertEqual(
|
||||
raw_query,
|
||||
{
|
||||
"deleted": False,
|
||||
"scraped": "yes",
|
||||
"$nor": [
|
||||
{"views.extracted": "no"},
|
||||
{"attachments.views.extracted": "no"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_geojson_PointField(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
update = transform.update(Location, set__loc=[1, 2])
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
self.assertEqual(
|
||||
update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}}
|
||||
)
|
||||
|
||||
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]})
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
update = transform.update(
|
||||
Location, set__loc={"type": "Point", "coordinates": [1, 2]}
|
||||
)
|
||||
self.assertEqual(
|
||||
update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}}
|
||||
)
|
||||
|
||||
def test_geojson_LineStringField(self):
|
||||
class Location(Document):
|
||||
line = LineStringField()
|
||||
|
||||
update = transform.update(Location, set__line=[[1, 2], [2, 2]])
|
||||
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
|
||||
self.assertEqual(
|
||||
update,
|
||||
{"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}},
|
||||
)
|
||||
|
||||
update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
|
||||
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
|
||||
update = transform.update(
|
||||
Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}
|
||||
)
|
||||
self.assertEqual(
|
||||
update,
|
||||
{"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}},
|
||||
)
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
poly = PolygonField()
|
||||
|
||||
update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
|
||||
update = transform.update(
|
||||
Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]
|
||||
)
|
||||
self.assertEqual(
|
||||
update,
|
||||
{
|
||||
"$set": {
|
||||
"poly": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
|
||||
update = transform.update(
|
||||
Location,
|
||||
set__poly={
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
update,
|
||||
{
|
||||
"$set": {
|
||||
"poly": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_type(self):
|
||||
class Doc(Document):
|
||||
df = DynamicField()
|
||||
|
||||
Doc(df=True).save()
|
||||
Doc(df=7).save()
|
||||
Doc(df="df").save()
|
||||
@@ -226,7 +314,7 @@ class TransformTest(unittest.TestCase):
|
||||
self.assertEqual(1, Doc.objects(item__type__="axe").count())
|
||||
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
|
||||
|
||||
Doc.objects(id=doc.id).update(set__item__type__='sword')
|
||||
Doc.objects(id=doc.id).update(set__item__type__="sword")
|
||||
self.assertEqual(1, Doc.objects(item__type__="sword").count())
|
||||
self.assertEqual(0, Doc.objects(item__type__="axe").count())
|
||||
|
||||
@@ -241,6 +329,45 @@ class TransformTest(unittest.TestCase):
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
def test_update_pull_for_list_fields(self):
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
class Word(EmbeddedDocument):
|
||||
word = StringField()
|
||||
index = IntField()
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
heading = ListField(StringField())
|
||||
text = EmbeddedDocumentListField(Word)
|
||||
|
||||
class MainDoc(Document):
|
||||
title = StringField()
|
||||
content = EmbeddedDocumentField(SubDoc)
|
||||
|
||||
word = Word(word="abc", index=1)
|
||||
update = transform.update(MainDoc, pull__content__text=word)
|
||||
self.assertEqual(
|
||||
update, {"$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])}}
|
||||
)
|
||||
|
||||
update = transform.update(MainDoc, pull__content__heading="xyz")
|
||||
self.assertEqual(update, {"$pull": {"content.heading": "xyz"}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__text__word__in=["foo", "bar"])
|
||||
self.assertEqual(
|
||||
update, {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}}
|
||||
)
|
||||
|
||||
update = transform.update(
|
||||
MainDoc, pull__content__text__word__nin=["foo", "bar"]
|
||||
)
|
||||
self.assertEqual(
|
||||
update, {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}}
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -12,14 +12,13 @@ __all__ = ("QTest",)
|
||||
|
||||
|
||||
class QTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
@@ -30,22 +29,22 @@ class QTest(unittest.TestCase):
|
||||
q1 = Q()
|
||||
q2 = Q(age__gte=18)
|
||||
q3 = Q()
|
||||
q4 = Q(name='test')
|
||||
q4 = Q(name="test")
|
||||
q5 = Q()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]}
|
||||
query = {"$or": [{"age": {"$gte": 18}}, {"name": "test"}]}
|
||||
self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query)
|
||||
|
||||
query = {'age': {'$gte': 18}, 'name': 'test'}
|
||||
query = {"age": {"$gte": 18}, "name": "test"}
|
||||
self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query)
|
||||
|
||||
def test_q_with_dbref(self):
|
||||
"""Ensure Q objects handle DBRefs correctly"""
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
pass
|
||||
@@ -62,15 +61,18 @@ class QTest(unittest.TestCase):
|
||||
def test_and_combination(self):
|
||||
"""Ensure that Q-objects correctly AND together.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = StringField()
|
||||
|
||||
query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||
self.assertEqual(query, {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]})
|
||||
|
||||
query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||
self.assertEqual(
|
||||
query, {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]}
|
||||
)
|
||||
|
||||
# Check normal cases work without an error
|
||||
query = Q(x__lt=7) & Q(x__gt=3)
|
||||
@@ -78,69 +80,74 @@ class QTest(unittest.TestCase):
|
||||
q1 = Q(x__lt=7)
|
||||
q2 = Q(x__gt=3)
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}})
|
||||
self.assertEqual(query, {"x": {"$lt": 7, "$gt": 3}})
|
||||
|
||||
# More complex nested example
|
||||
query = Q(x__lt=100) & Q(y__ne='NotMyString')
|
||||
query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100)
|
||||
query = Q(x__lt=100) & Q(y__ne="NotMyString")
|
||||
query &= Q(y__in=["a", "b", "c"]) & Q(x__gt=-100)
|
||||
mongo_query = {
|
||||
'x': {'$lt': 100, '$gt': -100},
|
||||
'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']},
|
||||
"x": {"$lt": 100, "$gt": -100},
|
||||
"y": {"$ne": "NotMyString", "$in": ["a", "b", "c"]},
|
||||
}
|
||||
self.assertEqual(query.to_query(TestDoc), mongo_query)
|
||||
|
||||
def test_or_combination(self):
|
||||
"""Ensure that Q-objects correctly OR together.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
q1 = Q(x__lt=3)
|
||||
q2 = Q(x__gt=7)
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
self.assertEqual(query, {
|
||||
'$or': [
|
||||
{'x': {'$lt': 3}},
|
||||
{'x': {'$gt': 7}},
|
||||
]
|
||||
})
|
||||
self.assertEqual(query, {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]})
|
||||
|
||||
def test_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ANDing ORed components.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
query = (Q(x__gt=0) | Q(x__exists=False))
|
||||
query = Q(x__gt=0) | Q(x__exists=False)
|
||||
query &= Q(x__lt=100)
|
||||
self.assertEqual(query.to_query(TestDoc), {'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}},
|
||||
{'x': {'$exists': False}}]},
|
||||
{'x': {'$lt': 100}}]
|
||||
})
|
||||
self.assertEqual(
|
||||
query.to_query(TestDoc),
|
||||
{
|
||||
"$and": [
|
||||
{"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]},
|
||||
{"x": {"$lt": 100}},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
q1 = (Q(x__gt=0) | Q(x__exists=False))
|
||||
q2 = (Q(x__lt=100) | Q(y=True))
|
||||
q1 = Q(x__gt=0) | Q(x__exists=False)
|
||||
q2 = Q(x__lt=100) | Q(y=True)
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
|
||||
TestDoc(x=101).save()
|
||||
TestDoc(x=10).save()
|
||||
TestDoc(y=True).save()
|
||||
|
||||
self.assertEqual(query, {
|
||||
'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||
]
|
||||
})
|
||||
self.assertEqual(
|
||||
query,
|
||||
{
|
||||
"$and": [
|
||||
{"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]},
|
||||
{"$or": [{"x": {"$lt": 100}}, {"y": True}]},
|
||||
]
|
||||
},
|
||||
)
|
||||
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
|
||||
|
||||
def test_or_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
@@ -151,18 +158,29 @@ class QTest(unittest.TestCase):
|
||||
TestDoc(x=99, y=False).save()
|
||||
TestDoc(x=101, y=False).save()
|
||||
|
||||
q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)))
|
||||
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
|
||||
q1 = Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))
|
||||
q2 = Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
|
||||
self.assertEqual(query, {
|
||||
'$or': [
|
||||
{'$and': [{'x': {'$gt': 0}},
|
||||
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
|
||||
{'$and': [{'x': {'$lt': 100}},
|
||||
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
|
||||
]
|
||||
})
|
||||
self.assertEqual(
|
||||
query,
|
||||
{
|
||||
"$or": [
|
||||
{
|
||||
"$and": [
|
||||
{"x": {"$gt": 0}},
|
||||
{"$or": [{"y": True}, {"y": {"$exists": False}}]},
|
||||
]
|
||||
},
|
||||
{
|
||||
"$and": [
|
||||
{"x": {"$lt": 100}},
|
||||
{"$or": [{"y": False}, {"y": {"$exists": False}}]},
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
|
||||
|
||||
def test_multiple_occurence_in_field(self):
|
||||
@@ -170,8 +188,8 @@ class QTest(unittest.TestCase):
|
||||
name = StringField(max_length=40)
|
||||
title = StringField(max_length=40)
|
||||
|
||||
q1 = Q(name__contains='te') | Q(title__contains='te')
|
||||
q2 = Q(name__contains='12') | Q(title__contains='12')
|
||||
q1 = Q(name__contains="te") | Q(title__contains="te")
|
||||
q2 = Q(name__contains="12") | Q(title__contains="12")
|
||||
|
||||
q3 = q1 & q2
|
||||
|
||||
@@ -180,7 +198,6 @@ class QTest(unittest.TestCase):
|
||||
self.assertEqual(query["$and"][1], q2.to_query(Test))
|
||||
|
||||
def test_q_clone(self):
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
@@ -196,7 +213,7 @@ class QTest(unittest.TestCase):
|
||||
|
||||
test2 = test.clone()
|
||||
self.assertEqual(test2.count(), 3)
|
||||
self.assertFalse(test2 == test)
|
||||
self.assertNotEqual(test2, test)
|
||||
|
||||
test3 = test2.filter(x=6)
|
||||
self.assertEqual(test3.count(), 1)
|
||||
@@ -205,6 +222,7 @@ class QTest(unittest.TestCase):
|
||||
def test_q(self):
|
||||
"""Ensure that Q objects may be used to query for documents.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
publish_date = DateTimeField()
|
||||
@@ -212,22 +230,26 @@ class QTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False)
|
||||
post1 = BlogPost(
|
||||
title="Test 1", publish_date=datetime.datetime(2010, 1, 8), published=False
|
||||
)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True)
|
||||
post2 = BlogPost(
|
||||
title="Test 2", publish_date=datetime.datetime(2010, 1, 15), published=True
|
||||
)
|
||||
post2.save()
|
||||
|
||||
post3 = BlogPost(title='Test 3', published=True)
|
||||
post3 = BlogPost(title="Test 3", published=True)
|
||||
post3.save()
|
||||
|
||||
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8))
|
||||
post4 = BlogPost(title="Test 4", publish_date=datetime.datetime(2010, 1, 8))
|
||||
post4.save()
|
||||
|
||||
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15))
|
||||
post5 = BlogPost(title="Test 1", publish_date=datetime.datetime(2010, 1, 15))
|
||||
post5.save()
|
||||
|
||||
post6 = BlogPost(title='Test 1', published=False)
|
||||
post6 = BlogPost(title="Test 1", published=False)
|
||||
post6.save()
|
||||
|
||||
# Check ObjectId lookup works
|
||||
@@ -235,13 +257,13 @@ class QTest(unittest.TestCase):
|
||||
self.assertEqual(obj, post1)
|
||||
|
||||
# Check Q object combination with one does not exist
|
||||
q = BlogPost.objects(Q(title='Test 5') | Q(published=True))
|
||||
q = BlogPost.objects(Q(title="Test 5") | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post2, post3)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
q = BlogPost.objects(Q(title='Test 1') | Q(published=True))
|
||||
q = BlogPost.objects(Q(title="Test 1") | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
published_posts = (post1, post2, post3, post5, post6)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
@@ -259,74 +281,91 @@ class QTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Check the 'in' operator
|
||||
self.Person(name='user1', age=20).save()
|
||||
self.Person(name='user2', age=20).save()
|
||||
self.Person(name='user3', age=30).save()
|
||||
self.Person(name='user4', age=40).save()
|
||||
self.Person(name="user1", age=20).save()
|
||||
self.Person(name="user2", age=20).save()
|
||||
self.Person(name="user3", age=30).save()
|
||||
self.Person(name="user4", age=40).save()
|
||||
|
||||
self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2)
|
||||
self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3)
|
||||
|
||||
# Test invalid query objs
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects('user1')
|
||||
self.Person.objects("user1")
|
||||
|
||||
# filter should fail, too
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects.filter('user1')
|
||||
|
||||
self.Person.objects.filter("user1")
|
||||
|
||||
def test_q_regex(self):
|
||||
"""Ensure that Q objects can be queried using regexes.
|
||||
"""
|
||||
person = self.Person(name='Guido van Rossum')
|
||||
person = self.Person(name="Guido van Rossum")
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
|
||||
obj = self.Person.objects(Q(name=re.compile("^Gui"))).first()
|
||||
self.assertEqual(obj, person)
|
||||
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
|
||||
obj = self.Person.objects(Q(name=re.compile("^gui"))).first()
|
||||
self.assertEqual(obj, None)
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
|
||||
obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first()
|
||||
self.assertEqual(obj, person)
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first()
|
||||
obj = self.Person.objects(Q(name__not=re.compile("^bob"))).first()
|
||||
self.assertEqual(obj, person)
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
|
||||
obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first()
|
||||
self.assertEqual(obj, None)
|
||||
|
||||
def test_q_repr(self):
|
||||
self.assertEqual(repr(Q()), "Q(**{})")
|
||||
self.assertEqual(repr(Q(name="test")), "Q(**{'name': 'test'})")
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name="test") & Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))",
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name="test") | Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))",
|
||||
)
|
||||
|
||||
def test_q_lists(self):
|
||||
"""Ensure that Q objects query ListFields correctly.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost(tags=['python', 'mongo']).save()
|
||||
BlogPost(tags=['python']).save()
|
||||
BlogPost(tags=["python", "mongo"]).save()
|
||||
BlogPost(tags=["python"]).save()
|
||||
|
||||
self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1)
|
||||
self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2)
|
||||
self.assertEqual(BlogPost.objects(Q(tags="mongo")).count(), 1)
|
||||
self.assertEqual(BlogPost.objects(Q(tags="python")).count(), 2)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_q_merge_queries_edge_case(self):
|
||||
|
||||
class User(Document):
|
||||
email = EmailField(required=False)
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
pk = ObjectId()
|
||||
User(email='example@example.com', pk=pk).save()
|
||||
User(email="example@example.com", pk=pk).save()
|
||||
|
||||
self.assertEqual(1, User.objects.filter(Q(email='example@example.com') |
|
||||
Q(name='John Doe')).limit(2).filter(pk=pk).count())
|
||||
self.assertEqual(
|
||||
1,
|
||||
User.objects.filter(Q(email="example@example.com") | Q(name="John Doe"))
|
||||
.limit(2)
|
||||
.filter(pk=pk)
|
||||
.count(),
|
||||
)
|
||||
|
||||
def test_chained_q_or_filtering(self):
|
||||
|
||||
class Post(EmbeddedDocument):
|
||||
name = StringField(required=True)
|
||||
|
||||
@@ -339,9 +378,16 @@ class QTest(unittest.TestCase):
|
||||
Item(postables=[Post(name="a"), Post(name="c")]).save()
|
||||
Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save()
|
||||
|
||||
self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2)
|
||||
self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2)
|
||||
self.assertEqual(
|
||||
Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2
|
||||
)
|
||||
self.assertEqual(
|
||||
Item.objects.filter(postables__name="a")
|
||||
.filter(postables__name="b")
|
||||
.count(),
|
||||
2,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
14
tests/test_common.py
Normal file
14
tests/test_common.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine import Document
|
||||
|
||||
|
||||
class TestCommon(unittest.TestCase):
|
||||
def test__import_class(self):
|
||||
doc_cls = _import_class("Document")
|
||||
self.assertIs(doc_cls, Document)
|
||||
|
||||
def test__import_class_raise_if_not_known(self):
|
||||
with self.assertRaises(ValueError):
|
||||
_import_class("UnknownClass")
|
||||
@@ -1,5 +1,10 @@
|
||||
import datetime
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import OperationFailure, InvalidName
|
||||
from pymongo import ReadPreference
|
||||
|
||||
from mongoengine import Document
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
@@ -11,23 +16,35 @@ import pymongo
|
||||
from bson.tz_util import utc
|
||||
|
||||
from mongoengine import (
|
||||
connect, register_connection,
|
||||
Document, DateTimeField
|
||||
connect,
|
||||
register_connection,
|
||||
Document,
|
||||
DateTimeField,
|
||||
disconnect_all,
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import (MongoEngineConnectionError, get_db,
|
||||
get_connection)
|
||||
from mongoengine.connection import (
|
||||
ConnectionFailure,
|
||||
get_db,
|
||||
get_connection,
|
||||
disconnect,
|
||||
DEFAULT_DATABASE_NAME,
|
||||
)
|
||||
|
||||
|
||||
def get_tz_awareness(connection):
|
||||
if not IS_PYMONGO_3:
|
||||
return connection.tz_aware
|
||||
else:
|
||||
return connection.codec_options.tz_aware
|
||||
return connection.codec_options.tz_aware
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
disconnect_all()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
disconnect_all()
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
@@ -36,18 +53,174 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def test_connect(self):
|
||||
"""Ensure that the connect() method works properly."""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "mongoenginetest")
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
connect("mongoenginetest2", alias="testdb")
|
||||
conn = get_connection("testdb")
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_disconnect_works_properly(self):
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
class History2(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "db2"}
|
||||
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
History1.drop_collection()
|
||||
History2.drop_collection()
|
||||
|
||||
h = History1(name="default").save()
|
||||
h1 = History2(name="db1").save()
|
||||
|
||||
self.assertEqual(
|
||||
list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}]
|
||||
)
|
||||
|
||||
disconnect("db1")
|
||||
disconnect("db2")
|
||||
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
list(History1.objects().as_pymongo())
|
||||
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
list(History2.objects().as_pymongo())
|
||||
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
self.assertEqual(
|
||||
list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}]
|
||||
)
|
||||
|
||||
def test_connect_different_documents_to_different_database(self):
|
||||
class History(Document):
|
||||
name = StringField()
|
||||
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
class History2(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "db2"}
|
||||
|
||||
connect()
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
History.drop_collection()
|
||||
History1.drop_collection()
|
||||
History2.drop_collection()
|
||||
|
||||
h = History(name="default").save()
|
||||
h1 = History1(name="db1").save()
|
||||
h2 = History2(name="db2").save()
|
||||
|
||||
self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME)
|
||||
self.assertEqual(History1._collection.database.name, "db1")
|
||||
self.assertEqual(History2._collection.database.name, "db2")
|
||||
|
||||
self.assertEqual(
|
||||
list(History.objects().as_pymongo()), [{"_id": h.id, "name": "default"}]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(History1.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(History2.objects().as_pymongo()), [{"_id": h2.id, "name": "db2"}]
|
||||
)
|
||||
|
||||
def test_connect_fails_if_connect_2_times_with_default_alias(self):
|
||||
connect("mongoenginetest")
|
||||
|
||||
with self.assertRaises(ConnectionFailure) as ctx_err:
|
||||
connect("mongoenginetest2")
|
||||
self.assertEqual(
|
||||
"A different connection with alias `default` was already registered. Use disconnect() first",
|
||||
str(ctx_err.exception),
|
||||
)
|
||||
|
||||
def test_connect_fails_if_connect_2_times_with_custom_alias(self):
|
||||
connect("mongoenginetest", alias="alias1")
|
||||
|
||||
with self.assertRaises(ConnectionFailure) as ctx_err:
|
||||
connect("mongoenginetest2", alias="alias1")
|
||||
|
||||
self.assertEqual(
|
||||
"A different connection with alias `alias1` was already registered. Use disconnect() first",
|
||||
str(ctx_err.exception),
|
||||
)
|
||||
|
||||
def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(
|
||||
self
|
||||
):
|
||||
"""Intended to keep the detecton function simple but robust"""
|
||||
db_name = "mongoenginetest"
|
||||
db_alias = "alias1"
|
||||
connect(db=db_name, alias=db_alias, host="localhost", port=27017)
|
||||
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias)
|
||||
|
||||
def test_connect_passes_silently_connect_multiple_times_with_same_config(self):
|
||||
# test default connection to `test`
|
||||
connect()
|
||||
connect()
|
||||
self.assertEqual(len(mongoengine.connection._connections), 1)
|
||||
connect("test01", alias="test01")
|
||||
connect("test01", alias="test01")
|
||||
self.assertEqual(len(mongoengine.connection._connections), 2)
|
||||
connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02")
|
||||
connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02")
|
||||
self.assertEqual(len(mongoengine.connection._connections), 3)
|
||||
|
||||
def test_connect_with_invalid_db_name(self):
|
||||
"""Ensure that connect() method fails fast if db name is invalid
|
||||
"""
|
||||
with self.assertRaises(InvalidName):
|
||||
connect("mongomock://localhost")
|
||||
|
||||
def test_connect_with_db_name_external(self):
|
||||
"""Ensure that connect() works if db name is $external
|
||||
"""
|
||||
"""Ensure that the connect() method works properly."""
|
||||
connect("$external")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "$external")
|
||||
|
||||
connect("$external", alias="testdb")
|
||||
conn = get_connection("testdb")
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_with_invalid_db_name_type(self):
|
||||
"""Ensure that connect() method fails fast if db name has invalid type
|
||||
"""
|
||||
with self.assertRaises(TypeError):
|
||||
non_string_db_name = ["e. g. list instead of a string"]
|
||||
connect(non_string_db_name)
|
||||
|
||||
def test_connect_in_mocking(self):
|
||||
"""Ensure that the connect() method works properly in mocking.
|
||||
@@ -55,35 +228,68 @@ class ConnectionTest(unittest.TestCase):
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
raise SkipTest("you need mongomock installed to run this testcase")
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
connect("mongoenginetest", host="mongomock://localhost")
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2")
|
||||
conn = get_connection("testdb2")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
"mongoenginetest3",
|
||||
host="mongodb://localhost",
|
||||
is_mock=True,
|
||||
alias="testdb3",
|
||||
)
|
||||
conn = get_connection("testdb3")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest4', is_mock=True, alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect("mongoenginetest4", is_mock=True, alias="testdb4")
|
||||
conn = get_connection("testdb4")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
host="mongodb://localhost:27017/mongoenginetest5",
|
||||
is_mock=True,
|
||||
alias="testdb5",
|
||||
)
|
||||
conn = get_connection("testdb5")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6")
|
||||
conn = get_connection("testdb6")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
|
||||
conn = get_connection('testdb7')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
host="mongomock://localhost:27017/mongoenginetest7",
|
||||
is_mock=True,
|
||||
alias="testdb7",
|
||||
)
|
||||
conn = get_connection("testdb7")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_default_database_with_mocking(self):
|
||||
"""Ensure that the default database is correctly set when using mongomock.
|
||||
"""
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest("you need mongomock installed to run this testcase")
|
||||
|
||||
disconnect_all()
|
||||
|
||||
class SomeDocument(Document):
|
||||
pass
|
||||
|
||||
conn = connect(host="mongomock://localhost:27017/mongoenginetest")
|
||||
some_document = SomeDocument()
|
||||
# database won't exist until we save a document
|
||||
some_document.save()
|
||||
self.assertEqual(conn.get_default_database().name, "mongoenginetest")
|
||||
self.assertEqual(conn.database_names()[0], "mongoenginetest")
|
||||
|
||||
def test_connect_with_host_list(self):
|
||||
"""Ensure that the connect() method works when host is a list
|
||||
@@ -93,118 +299,241 @@ class ConnectionTest(unittest.TestCase):
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
raise SkipTest("you need mongomock installed to run this testcase")
|
||||
|
||||
connect(host=['mongomock://localhost'])
|
||||
connect(host=["mongomock://localhost"])
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2")
|
||||
conn = get_connection("testdb2")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(host=["localhost"], is_mock=True, alias="testdb3")
|
||||
conn = get_connection("testdb3")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
host=["mongomock://localhost:27017", "mongomock://localhost:27018"],
|
||||
alias="testdb4",
|
||||
)
|
||||
conn = get_connection("testdb4")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
host=["mongodb://localhost:27017", "mongodb://localhost:27018"],
|
||||
is_mock=True,
|
||||
alias="testdb5",
|
||||
)
|
||||
conn = get_connection("testdb5")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
connect(
|
||||
host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6"
|
||||
)
|
||||
conn = get_connection("testdb6")
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_disconnect(self):
|
||||
"""Ensure that the disconnect() method works properly
|
||||
"""
|
||||
conn1 = connect('mongoenginetest')
|
||||
mongoengine.connection.disconnect()
|
||||
conn2 = connect('mongoenginetest')
|
||||
self.assertTrue(conn1 is not conn2)
|
||||
def test_disconnect_cleans_globals(self):
|
||||
"""Ensure that the disconnect() method cleans the globals objects"""
|
||||
connections = mongoengine.connection._connections
|
||||
dbs = mongoengine.connection._dbs
|
||||
connection_settings = mongoengine.connection._connection_settings
|
||||
|
||||
connect("mongoenginetest")
|
||||
|
||||
self.assertEqual(len(connections), 1)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 1)
|
||||
|
||||
class TestDoc(Document):
|
||||
pass
|
||||
|
||||
TestDoc.drop_collection() # triggers the db
|
||||
self.assertEqual(len(dbs), 1)
|
||||
|
||||
disconnect()
|
||||
self.assertEqual(len(connections), 0)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 0)
|
||||
|
||||
def test_disconnect_cleans_cached_collection_attribute_in_document(self):
|
||||
"""Ensure that the disconnect() method works properly"""
|
||||
conn1 = connect("mongoenginetest")
|
||||
|
||||
class History(Document):
|
||||
pass
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
|
||||
History.drop_collection()
|
||||
|
||||
History.objects.first() # will trigger the caching of _collection attribute
|
||||
self.assertIsNotNone(History._collection)
|
||||
|
||||
disconnect()
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
|
||||
with self.assertRaises(ConnectionFailure) as ctx_err:
|
||||
History.objects.first()
|
||||
self.assertEqual(
|
||||
"You have not defined a default connection", str(ctx_err.exception)
|
||||
)
|
||||
|
||||
def test_connect_disconnect_works_on_same_document(self):
|
||||
"""Ensure that the connect/disconnect works properly with a single Document"""
|
||||
db1 = "db1"
|
||||
db2 = "db2"
|
||||
|
||||
# Ensure freshness of the 2 databases through pymongo
|
||||
client = MongoClient("localhost", 27017)
|
||||
client.drop_database(db1)
|
||||
client.drop_database(db2)
|
||||
|
||||
# Save in db1
|
||||
connect(db1)
|
||||
|
||||
class User(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
user1 = User(name="John is in db1").save()
|
||||
disconnect()
|
||||
|
||||
# Make sure save doesnt work at this stage
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
User(name="Wont work").save()
|
||||
|
||||
# Save in db2
|
||||
connect(db2)
|
||||
user2 = User(name="Bob is in db2").save()
|
||||
disconnect()
|
||||
|
||||
db1_users = list(client[db1].user.find())
|
||||
self.assertEqual(db1_users, [{"_id": user1.id, "name": "John is in db1"}])
|
||||
db2_users = list(client[db2].user.find())
|
||||
self.assertEqual(db2_users, [{"_id": user2.id, "name": "Bob is in db2"}])
|
||||
|
||||
def test_disconnect_silently_pass_if_alias_does_not_exist(self):
|
||||
connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(connections), 0)
|
||||
disconnect(alias="not_exist")
|
||||
|
||||
def test_disconnect_all(self):
|
||||
connections = mongoengine.connection._connections
|
||||
dbs = mongoengine.connection._dbs
|
||||
connection_settings = mongoengine.connection._connection_settings
|
||||
|
||||
connect("mongoenginetest")
|
||||
connect("mongoenginetest2", alias="db1")
|
||||
|
||||
class History(Document):
|
||||
pass
|
||||
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
History.drop_collection() # will trigger the caching of _collection attribute
|
||||
History.objects.first()
|
||||
History1.drop_collection()
|
||||
History1.objects.first()
|
||||
|
||||
self.assertIsNotNone(History._collection)
|
||||
self.assertIsNotNone(History1._collection)
|
||||
|
||||
self.assertEqual(len(connections), 2)
|
||||
self.assertEqual(len(dbs), 2)
|
||||
self.assertEqual(len(connection_settings), 2)
|
||||
|
||||
disconnect_all()
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
self.assertIsNone(History1._collection)
|
||||
|
||||
self.assertEqual(len(connections), 0)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 0)
|
||||
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
History.objects.first()
|
||||
|
||||
with self.assertRaises(ConnectionFailure):
|
||||
History1.objects.first()
|
||||
|
||||
def test_disconnect_all_silently_pass_if_no_connection_exist(self):
|
||||
disconnect_all()
|
||||
|
||||
def test_sharing_connections(self):
|
||||
"""Ensure that connections are shared when the connection settings are exactly the same
|
||||
"""
|
||||
connect('mongoenginetests', alias='testdb1')
|
||||
expected_connection = get_connection('testdb1')
|
||||
connect("mongoenginetests", alias="testdb1")
|
||||
expected_connection = get_connection("testdb1")
|
||||
|
||||
connect('mongoenginetests', alias='testdb2')
|
||||
actual_connection = get_connection('testdb2')
|
||||
connect("mongoenginetests", alias="testdb2")
|
||||
actual_connection = get_connection("testdb2")
|
||||
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
if IS_PYMONGO_3:
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
expected_connection.server_info()
|
||||
expected_connection.server_info()
|
||||
|
||||
self.assertEqual(expected_connection, actual_connection)
|
||||
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with URIs."""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
c = connect(db="mongoenginetest", alias="admin")
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
|
||||
c.admin.add_user("admin", "password")
|
||||
c.admin.command("createUser", "admin", pwd="password", roles=["root"])
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"])
|
||||
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertRaises(
|
||||
MongoEngineConnectionError, connect, 'testdb_uri_bad',
|
||||
host='mongodb://test:password@localhost'
|
||||
)
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
connect(
|
||||
"testdb_uri", host="mongodb://username:password@localhost/mongoenginetest"
|
||||
)
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "mongoenginetest")
|
||||
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
|
||||
def test_connect_uri_without_db(self):
|
||||
"""Ensure connect() method works properly if the URI doesn't
|
||||
include a database name.
|
||||
"""
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
connect("mongoenginetest", host="mongodb://localhost/")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "mongoenginetest")
|
||||
|
||||
def test_connect_uri_default_db(self):
|
||||
"""Ensure connect() defaults to the right database name if
|
||||
the URI and the database_name don't explicitly specify it.
|
||||
"""
|
||||
connect(host='mongodb://localhost/')
|
||||
connect(host="mongodb://localhost/")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'test')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "test")
|
||||
|
||||
def test_uri_without_credentials_doesnt_override_conn_settings(self):
|
||||
"""Ensure connect() uses the username & password params if the URI
|
||||
doesn't explicitly specify them.
|
||||
"""
|
||||
c = connect(host='mongodb://localhost/mongoenginetest',
|
||||
username='user',
|
||||
password='pass')
|
||||
c = connect(
|
||||
host="mongodb://localhost/mongoenginetest", username="user", password="pass"
|
||||
)
|
||||
|
||||
# OperationFailure means that mongoengine attempted authentication
|
||||
# w/ the provided username/password and failed - that's the desired
|
||||
@@ -216,143 +545,118 @@ class ConnectionTest(unittest.TestCase):
|
||||
option in the URI.
|
||||
"""
|
||||
# Create users
|
||||
c = connect('mongoenginetest')
|
||||
c.admin.system.users.remove({})
|
||||
c.admin.add_user('username2', 'password')
|
||||
c = connect("mongoenginetest")
|
||||
|
||||
c.admin.system.users.delete_many({})
|
||||
c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"])
|
||||
|
||||
# Authentication fails without "authSource"
|
||||
if IS_PYMONGO_3:
|
||||
test_conn = connect(
|
||||
'mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest'
|
||||
)
|
||||
self.assertRaises(OperationFailure, test_conn.server_info)
|
||||
else:
|
||||
self.assertRaises(
|
||||
MongoEngineConnectionError,
|
||||
connect, 'mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest'
|
||||
)
|
||||
self.assertRaises(MongoEngineConnectionError, get_db, 'test1')
|
||||
test_conn = connect(
|
||||
"mongoenginetest",
|
||||
alias="test1",
|
||||
host="mongodb://username2:password@localhost/mongoenginetest",
|
||||
)
|
||||
self.assertRaises(OperationFailure, test_conn.server_info)
|
||||
|
||||
# Authentication succeeds with "authSource"
|
||||
authd_conn = connect(
|
||||
'mongoenginetest', alias='test2',
|
||||
host=('mongodb://username2:password@localhost/'
|
||||
'mongoenginetest?authSource=admin')
|
||||
"mongoenginetest",
|
||||
alias="test2",
|
||||
host=(
|
||||
"mongodb://username2:password@localhost/"
|
||||
"mongoenginetest?authSource=admin"
|
||||
),
|
||||
)
|
||||
db = get_db('test2')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
db = get_db("test2")
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "mongoenginetest")
|
||||
|
||||
# Clear all users
|
||||
authd_conn.admin.system.users.remove({})
|
||||
authd_conn.admin.system.users.delete_many({})
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
register_connection("testdb", "mongoenginetest2")
|
||||
|
||||
self.assertRaises(MongoEngineConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertRaises(ConnectionFailure, get_connection)
|
||||
conn = get_connection("testdb")
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
db = get_db("testdb")
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "mongoenginetest2")
|
||||
|
||||
def test_register_connection_defaults(self):
|
||||
"""Ensure that defaults are used when the host and port are None.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest', host=None, port=None)
|
||||
register_connection("testdb", "mongoenginetest", host=None, port=None)
|
||||
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
conn = get_connection("testdb")
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connection_kwargs(self):
|
||||
"""Ensure that connection kwargs get passed to pymongo."""
|
||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||
conn = get_connection('t1')
|
||||
connect("mongoenginetest", alias="t1", tz_aware=True)
|
||||
conn = get_connection("t1")
|
||||
|
||||
self.assertTrue(get_tz_awareness(conn))
|
||||
|
||||
connect('mongoenginetest2', alias='t2')
|
||||
conn = get_connection('t2')
|
||||
connect("mongoenginetest2", alias="t2")
|
||||
conn = get_connection("t2")
|
||||
self.assertFalse(get_tz_awareness(conn))
|
||||
|
||||
def test_connection_pool_via_kwarg(self):
|
||||
"""Ensure we can specify a max connection pool size using
|
||||
a connection kwarg.
|
||||
"""
|
||||
# Use "max_pool_size" or "maxpoolsize" depending on PyMongo version
|
||||
# (former was changed to the latter as described in
|
||||
# https://jira.mongodb.org/browse/PYTHON-854).
|
||||
# TODO remove once PyMongo < 3.0 support is dropped
|
||||
if pymongo.version_tuple[0] >= 3:
|
||||
pool_size_kwargs = {'maxpoolsize': 100}
|
||||
else:
|
||||
pool_size_kwargs = {'max_pool_size': 100}
|
||||
pool_size_kwargs = {"maxpoolsize": 100}
|
||||
|
||||
conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs)
|
||||
conn = connect(
|
||||
"mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs
|
||||
)
|
||||
self.assertEqual(conn.max_pool_size, 100)
|
||||
|
||||
def test_connection_pool_via_uri(self):
|
||||
"""Ensure we can specify a max connection pool size using
|
||||
an option in a connection URI.
|
||||
"""
|
||||
if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9:
|
||||
raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+')
|
||||
|
||||
conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri')
|
||||
conn = connect(
|
||||
host="mongodb://localhost/test?maxpoolsize=100",
|
||||
alias="max_pool_size_via_uri",
|
||||
)
|
||||
self.assertEqual(conn.max_pool_size, 100)
|
||||
|
||||
def test_write_concern(self):
|
||||
"""Ensure write concern can be specified in connect() via
|
||||
a kwarg or as part of the connection URI.
|
||||
"""
|
||||
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
|
||||
conn2 = connect('testing', alias='conn2', w=1, j=True)
|
||||
if IS_PYMONGO_3:
|
||||
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
|
||||
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
|
||||
else:
|
||||
self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True})
|
||||
self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True})
|
||||
conn1 = connect(alias="conn1", host="mongodb://localhost/testing?w=1&j=true")
|
||||
conn2 = connect("testing", alias="conn2", w=1, j=True)
|
||||
self.assertEqual(conn1.write_concern.document, {"w": 1, "j": True})
|
||||
self.assertEqual(conn2.write_concern.document, {"w": 1, "j": True})
|
||||
|
||||
def test_connect_with_replicaset_via_uri(self):
|
||||
"""Ensure connect() works when specifying a replicaSet via the
|
||||
MongoDB URI.
|
||||
"""
|
||||
if IS_PYMONGO_3:
|
||||
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
# "localhost:27017 is not a member of replica set local-rs"
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
|
||||
c = connect(host="mongodb://localhost/test?replicaSet=local-rs")
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "test")
|
||||
|
||||
def test_connect_with_replicaset_via_kwargs(self):
|
||||
"""Ensure connect() works when specifying a replicaSet via the
|
||||
connection kwargs
|
||||
"""
|
||||
if IS_PYMONGO_3:
|
||||
c = connect(replicaset='local-rs')
|
||||
self.assertEqual(c._MongoClient__options.replica_set_name,
|
||||
'local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
# "localhost:27017 is not a member of replica set local-rs"
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
c = connect(replicaset='local-rs')
|
||||
c = connect(replicaset="local-rs")
|
||||
self.assertEqual(c._MongoClient__options.replica_set_name, "local-rs")
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, "test")
|
||||
|
||||
def test_datetime(self):
|
||||
connect('mongoenginetest', tz_aware=True)
|
||||
def test_connect_tz_aware(self):
|
||||
connect("mongoenginetest", tz_aware=True)
|
||||
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||
|
||||
class DateDoc(Document):
|
||||
@@ -364,27 +668,40 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
def test_read_preference_from_parse(self):
|
||||
conn = connect(
|
||||
host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred"
|
||||
)
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED)
|
||||
|
||||
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
|
||||
def test_multiple_connection_settings(self):
|
||||
connect("mongoenginetest", alias="t1", host="localhost")
|
||||
|
||||
connect("mongoenginetest2", alias="t2", host="127.0.0.1")
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
else:
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
mongo_connections['t1'].server_info()
|
||||
mongo_connections['t2'].server_info()
|
||||
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
|
||||
self.assertIn("t1", mongo_connections.keys())
|
||||
self.assertIn("t2", mongo_connections.keys())
|
||||
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
mongo_connections["t1"].server_info()
|
||||
mongo_connections["t2"].server_info()
|
||||
self.assertEqual(mongo_connections["t1"].address[0], "localhost")
|
||||
self.assertEqual(mongo_connections["t2"].address[0], "127.0.0.1")
|
||||
|
||||
def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self):
|
||||
c1 = connect(alias="testdb1", db="testdb1")
|
||||
c2 = connect(alias="testdb2", db="testdb2")
|
||||
self.assertIs(c1, c2)
|
||||
|
||||
def test_connect_2_databases_uses_different_client_if_different_parameters(self):
|
||||
c1 = connect(alias="testdb1", db="testdb1", username="u1")
|
||||
c2 = connect(alias="testdb2", db="testdb2", username="u2")
|
||||
self.assertIsNot(c1, c2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -2,16 +2,20 @@ import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||
no_sub_classes, no_dereference,
|
||||
query_counter)
|
||||
from mongoengine.context_managers import (
|
||||
switch_db,
|
||||
switch_collection,
|
||||
no_sub_classes,
|
||||
no_dereference,
|
||||
query_counter,
|
||||
)
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
|
||||
class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
def test_switch_db_context_manager(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
connect("mongoenginetest")
|
||||
register_connection("testdb-1", "mongoenginetest2")
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
@@ -21,7 +25,7 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group(name="hello - default").save()
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
|
||||
with switch_db(Group, 'testdb-1') as Group:
|
||||
with switch_db(Group, "testdb-1") as Group:
|
||||
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
|
||||
@@ -35,20 +39,21 @@ class ContextManagersTest(unittest.TestCase):
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
|
||||
def test_switch_collection_context_manager(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
connect("mongoenginetest")
|
||||
register_connection(alias="testdb-1", db="mongoenginetest2")
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
Group.drop_collection()
|
||||
with switch_collection(Group, 'group1') as Group:
|
||||
Group.drop_collection()
|
||||
Group.drop_collection() # drops in default
|
||||
|
||||
with switch_collection(Group, "group1") as Group:
|
||||
Group.drop_collection() # drops in group1
|
||||
|
||||
Group(name="hello - group").save()
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
|
||||
with switch_collection(Group, 'group1') as Group:
|
||||
with switch_collection(Group, "group1") as Group:
|
||||
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
|
||||
@@ -64,7 +69,7 @@ class ContextManagersTest(unittest.TestCase):
|
||||
def test_no_dereference_context_manager_object_id(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
@@ -78,31 +83,31 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
User(name="user %s" % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
Group(ref=user, members=User.objects, generic=user).save()
|
||||
|
||||
with no_dereference(Group) as NoDeRefGroup:
|
||||
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||
self.assertTrue(Group._fields["members"]._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference)
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertNotIsInstance(m, User)
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertIsInstance(m, User)
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_dereference_context_manager_dbref(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
@@ -116,33 +121,29 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
User(name="user %s" % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
Group(ref=user, members=User.objects, generic=user).save()
|
||||
|
||||
with no_dereference(Group) as NoDeRefGroup:
|
||||
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||
self.assertTrue(Group._fields["members"]._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference)
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
self.assertTrue(all([not isinstance(m, User) for m in group.members]))
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
self.assertTrue(all([isinstance(m, User) for m in group.members]))
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
y = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
@@ -152,29 +153,29 @@ class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
A.drop_collection()
|
||||
|
||||
A(x=10, y=20).save()
|
||||
A(x=15, y=30).save()
|
||||
B(x=20, y=40).save()
|
||||
B(x=30, y=50).save()
|
||||
C(x=40, y=60).save()
|
||||
A(x=10).save()
|
||||
A(x=15).save()
|
||||
B(x=20).save()
|
||||
B(x=30).save()
|
||||
C(x=40).save()
|
||||
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
with no_sub_classes(A) as A:
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A.objects.count(), 2)
|
||||
|
||||
for obj in A.objects:
|
||||
self.assertEqual(obj.__class__, A)
|
||||
|
||||
with no_sub_classes(B) as B:
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B.objects.count(), 2)
|
||||
|
||||
for obj in B.objects:
|
||||
self.assertEqual(obj.__class__, B)
|
||||
|
||||
with no_sub_classes(C) as C:
|
||||
with no_sub_classes(C):
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
for obj in C.objects:
|
||||
@@ -185,18 +186,141 @@ class ContextManagersTest(unittest.TestCase):
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
def test_query_counter(self):
|
||||
connect('mongoenginetest')
|
||||
def test_no_sub_classes_modification_to_document_class_are_temporary(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
self.assertEqual(A._subclasses, ("A", "A.B"))
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A._subclasses, ("A",))
|
||||
self.assertEqual(A._subclasses, ("A", "A.B"))
|
||||
|
||||
self.assertEqual(B._subclasses, ("A.B",))
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B._subclasses, ("A.B",))
|
||||
self.assertEqual(B._subclasses, ("A.B",))
|
||||
|
||||
def test_no_subclass_context_manager_does_not_swallow_exception(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with no_sub_classes(User):
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_does_not_swallow_exception(self):
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with query_counter() as q:
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_temporarily_modifies_profiling_level(self):
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
db.test.find({})
|
||||
|
||||
initial_profiling_level = db.profiling_level()
|
||||
|
||||
try:
|
||||
NEW_LEVEL = 1
|
||||
db.set_profiling_level(NEW_LEVEL)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
with query_counter() as q:
|
||||
self.assertEqual(db.profiling_level(), 2)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
except Exception:
|
||||
db.set_profiling_level(
|
||||
initial_profiling_level
|
||||
) # Ensures it gets reseted no matter the outcome of the test
|
||||
raise
|
||||
|
||||
def test_query_counter(self):
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
def issue_1_count_query():
|
||||
count_documents(collection, {})
|
||||
|
||||
def issue_1_insert_query():
|
||||
collection.insert_one({"test": "garbage"})
|
||||
|
||||
def issue_1_find_query():
|
||||
collection.find_one()
|
||||
|
||||
counter = 0
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, counter)
|
||||
self.assertEqual(
|
||||
q, counter
|
||||
) # Ensures previous count query did not get counted
|
||||
|
||||
for _ in range(10):
|
||||
issue_1_insert_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(4):
|
||||
issue_1_find_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(3):
|
||||
issue_1_count_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
self.assertEqual(int(q), counter) # test __int__
|
||||
self.assertEqual(repr(q), str(int(q))) # test __repr__
|
||||
self.assertGreater(q, -1) # test __gt__
|
||||
self.assertGreaterEqual(q, int(q)) # test __gte__
|
||||
self.assertNotEqual(q, -1)
|
||||
self.assertLess(q, 1000)
|
||||
self.assertLessEqual(q, int(q))
|
||||
|
||||
def test_query_counter_counts_getmore_queries(self):
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
many_docs = [{"test": "garbage %s" % i} for i in range(150)]
|
||||
collection.insert_many(
|
||||
many_docs
|
||||
) # first batch of documents contains 101 documents
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(0, q)
|
||||
self.assertEqual(q, 0)
|
||||
list(collection.find())
|
||||
self.assertEqual(q, 2) # 1st select + 1 getmore
|
||||
|
||||
for i in range(1, 51):
|
||||
db.test.find({}).count()
|
||||
def test_query_counter_ignores_particular_queries(self):
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
self.assertEqual(50, q)
|
||||
collection = db.query_counter
|
||||
collection.insert_many([{"test": "garbage %s" % i} for i in range(10)])
|
||||
|
||||
if __name__ == '__main__':
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
cursor = collection.find()
|
||||
self.assertEqual(q, 0) # cursor wasn't opened yet
|
||||
_ = next(cursor) # opens the cursor and fires the find query
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
cursor.close() # issues a `killcursors` query that is ignored by the context
|
||||
self.assertEqual(q, 1)
|
||||
_ = (
|
||||
db.system.indexes.find_one()
|
||||
) # queries on db.system.indexes are ignored as well
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,6 +1,360 @@
|
||||
import unittest
|
||||
from six import iterkeys
|
||||
|
||||
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict
|
||||
|
||||
|
||||
class DocumentStub(object):
|
||||
def __init__(self):
|
||||
self._changed_fields = []
|
||||
|
||||
def _mark_as_changed(self, key):
|
||||
self._changed_fields.append(key)
|
||||
|
||||
|
||||
class TestBaseDict(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _get_basedict(dict_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseDict(dict_items, instance=None, name="my_name")
|
||||
base_list._instance = (
|
||||
fake_doc
|
||||
) # hack to inject the mock, it does not work in the constructor
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
class MyDoc(Document):
|
||||
pass
|
||||
|
||||
dict_items = {"k": "v"}
|
||||
doc = MyDoc()
|
||||
base_dict = BaseDict(dict_items, instance=doc, name="my_name")
|
||||
self.assertIsInstance(base_dict._instance, Document)
|
||||
self.assertEqual(base_dict._name, "my_name")
|
||||
self.assertEqual(base_dict, dict_items)
|
||||
|
||||
def test_setdefault_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.setdefault("k", "v")
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
|
||||
def test_popitems_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
self.assertEqual(base_dict.popitem(), ("k", "v"))
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
self.assertEqual(base_dict.pop("k"), "v")
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
|
||||
def test_pop_calls_does_not_mark_as_changed_when_it_fails(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict.pop("X")
|
||||
self.assertFalse(base_dict._instance._changed_fields)
|
||||
|
||||
def test_clear_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
base_dict.clear()
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_dict, {})
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
del base_dict["k"]
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"])
|
||||
self.assertEqual(base_dict, {})
|
||||
|
||||
def test___getitem____KeyError(self):
|
||||
base_dict = self._get_basedict({})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict["new"]
|
||||
|
||||
def test___getitem____simple_value(self):
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
base_dict["k"] = "v"
|
||||
|
||||
def test___getitem____sublist_gets_converted_to_BaseList(self):
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
sub_list = base_dict["k"]
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_dict._instance)
|
||||
self.assertEqual(sub_list._name, "my_name.k")
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.1"])
|
||||
|
||||
def test___getitem____subdict_gets_converted_to_BaseDict(self):
|
||||
base_dict = self._get_basedict({"k": {"subk": "subv"}})
|
||||
sub_dict = base_dict["k"]
|
||||
self.assertEqual(sub_dict, {"subk": "subv"})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_dict._instance)
|
||||
self.assertEqual(sub_dict._name, "my_name.k")
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict["subk"] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.subk"])
|
||||
|
||||
def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self):
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
sub_list = base_dict.get("k")
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
|
||||
def test_get_returns_the_same_as___getitem__(self):
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
get_ = base_dict.get("k")
|
||||
getitem_ = base_dict["k"]
|
||||
self.assertEqual(get_, getitem_)
|
||||
|
||||
def test_get_default(self):
|
||||
base_dict = self._get_basedict({})
|
||||
sentinel = object()
|
||||
self.assertEqual(base_dict.get("new"), None)
|
||||
self.assertIs(base_dict.get("new", sentinel), sentinel)
|
||||
|
||||
def test___setitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict["k"] = "v"
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"])
|
||||
self.assertEqual(base_dict, {"k": "v"})
|
||||
|
||||
def test_update_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.update({"k": "v"})
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test___setattr____not_tracked_by_changes(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = "test"
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
def test___delattr____tracked_by_changes(self):
|
||||
# This is probably a bug as __setattr__ is not tracked
|
||||
# This is even bad because it could be that there is an attribute
|
||||
# with the same name as a key
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = "test"
|
||||
del base_dict.a_new_attr
|
||||
self.assertEqual(base_dict._instance._changed_fields, ["my_name.a_new_attr"])
|
||||
|
||||
|
||||
class TestBaseList(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _get_baselist(list_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseList(list_items, instance=None, name="my_name")
|
||||
base_list._instance = (
|
||||
fake_doc
|
||||
) # hack to inject the mock, it does not work in the constructor
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
class MyDoc(Document):
|
||||
pass
|
||||
|
||||
list_items = [True]
|
||||
doc = MyDoc()
|
||||
base_list = BaseList(list_items, instance=doc, name="my_name")
|
||||
self.assertIsInstance(base_list._instance, Document)
|
||||
self.assertEqual(base_list._name, "my_name")
|
||||
self.assertEqual(base_list, list_items)
|
||||
|
||||
def test___iter__(self):
|
||||
values = [True, False, True, False]
|
||||
base_list = BaseList(values, instance=None, name="my_name")
|
||||
self.assertEqual(values, list(base_list))
|
||||
|
||||
def test___iter___allow_modification_while_iterating_withou_error(self):
|
||||
# regular list allows for this, thus this subclass must comply to that
|
||||
base_list = BaseList([True, False, True, False], instance=None, name="my_name")
|
||||
for idx, val in enumerate(base_list):
|
||||
if val:
|
||||
base_list.pop(idx)
|
||||
|
||||
def test_append_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
base_list.append(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_subclass_append(self):
|
||||
# Due to the way mark_as_changed_wrapper is implemented
|
||||
# it is good to test subclasses
|
||||
class SubBaseList(BaseList):
|
||||
pass
|
||||
|
||||
base_list = SubBaseList([], instance=None, name="my_name")
|
||||
base_list.append(True)
|
||||
|
||||
def test___getitem__using_simple_index(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[0], 0)
|
||||
self.assertEqual(base_list[1], 1)
|
||||
self.assertEqual(base_list[-1], 2)
|
||||
|
||||
def test___getitem__using_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[1:3], [1, 2])
|
||||
self.assertEqual(base_list[0:3:2], [0, 2])
|
||||
|
||||
def test___getitem___using_slice_returns_list(self):
|
||||
# Bug: using slice does not properly handles the instance
|
||||
# and mark_as_changed behaviour.
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
sliced = base_list[1:3]
|
||||
self.assertEqual(sliced, [1, 2])
|
||||
self.assertIsInstance(sliced, list)
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
def test___getitem__sublist_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist([[1, 2], [3, 4]])
|
||||
sub_list = base_list[0]
|
||||
self.assertEqual(sub_list, [1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_list._instance)
|
||||
self.assertEqual(sub_list._name, "my_name.0")
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name.0.1"])
|
||||
|
||||
def test___getitem__subdict_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist([{"subk": "subv"}])
|
||||
sub_dict = base_list[0]
|
||||
self.assertEqual(sub_dict, {"subk": "subv"})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_list._instance)
|
||||
self.assertEqual(sub_dict._name, "my_name.0")
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict["subk"] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name.0.subk"])
|
||||
|
||||
def test_extend_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.extend([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_insert_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.insert(0, True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_remove_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.remove(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_remove_not_mark_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(ValueError):
|
||||
base_list.remove(False)
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.pop()
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_reverse_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.reverse()
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
del base_list[0]
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test___setitem___calls_with_full_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list[:] = [
|
||||
0,
|
||||
1,
|
||||
] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [0, 1])
|
||||
|
||||
def test___setitem___calls_with_partial_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:2] = [
|
||||
1,
|
||||
0,
|
||||
] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [1, 0, 2])
|
||||
|
||||
def test___setitem___calls_with_step_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [-1, 1, -2])
|
||||
|
||||
def test___setitem___with_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2, 3, 4, 5])
|
||||
base_list[0:6:2] = [None, None, None]
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [None, 1, None, 3, None, 5])
|
||||
|
||||
def test___setitem___item_0_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list[0] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [False])
|
||||
|
||||
def test___setitem___item_1_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, True])
|
||||
base_list[1] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name.1"])
|
||||
self.assertEqual(base_list, [True, False])
|
||||
|
||||
def test___delslice___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1])
|
||||
del base_list[0:1]
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
self.assertEqual(base_list, [1])
|
||||
|
||||
def test___iadd___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list += [False]
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test___imul___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
base_list *= 2
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_sort_calls_not_marked_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(TypeError):
|
||||
base_list.sort(key=1)
|
||||
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
def test_sort_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.sort()
|
||||
self.assertEqual(base_list._instance._changed_fields, ["my_name"])
|
||||
|
||||
def test_sort_calls_with_key(self):
|
||||
base_list = self._get_baselist([1, 2, 11])
|
||||
base_list.sort(key=lambda i: str(i))
|
||||
self.assertEqual(base_list, [1, 11, 2])
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
@@ -14,6 +368,20 @@ class TestStrictDict(unittest.TestCase):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
||||
|
||||
def test_iterkeys(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(list(iterkeys(d)), ["a"])
|
||||
|
||||
def test_len(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(len(d), 1)
|
||||
|
||||
def test_pop(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertIn("a", d)
|
||||
d.pop("a")
|
||||
self.assertNotIn("a", d)
|
||||
|
||||
def test_repr(self):
|
||||
d = self.dtype(a=1, b=2, c=3)
|
||||
self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}')
|
||||
@@ -47,7 +415,7 @@ class TestStrictDict(unittest.TestCase):
|
||||
d = self.dtype()
|
||||
d.a = 1
|
||||
self.assertEqual(d.a, 1)
|
||||
self.assertRaises(AttributeError, getattr, d, 'b')
|
||||
self.assertRaises(AttributeError, getattr, d, "b")
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
d = self.dtype()
|
||||
@@ -61,59 +429,20 @@ class TestStrictDict(unittest.TestCase):
|
||||
|
||||
def test_get(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(d.get('a'), 1)
|
||||
self.assertEqual(d.get('b', 'bla'), 'bla')
|
||||
self.assertEqual(d.get("a"), 1)
|
||||
self.assertEqual(d.get("b", "bla"), "bla")
|
||||
|
||||
def test_items(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(d.items(), [('a', 1)])
|
||||
self.assertEqual(d.items(), [("a", 1)])
|
||||
d = self.dtype(a=1, b=2)
|
||||
self.assertEqual(d.items(), [('a', 1), ('b', 2)])
|
||||
self.assertEqual(d.items(), [("a", 1), ("b", 2)])
|
||||
|
||||
def test_mappings_protocol(self):
|
||||
d = self.dtype(a=1, b=2)
|
||||
assert dict(d) == {'a': 1, 'b': 2}
|
||||
assert dict(**d) == {'a': 1, 'b': 2}
|
||||
self.assertEqual(dict(d), {"a": 1, "b": 2})
|
||||
self.assertEqual(dict(**d), {"a": 1, "b": 2})
|
||||
|
||||
|
||||
class TestSemiSrictDict(TestStrictDict):
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return SemiStrictDict.create(*args, **kwargs)
|
||||
|
||||
def test_init_fails_on_nonexisting_attrs(self):
|
||||
# disable irrelevant test
|
||||
pass
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
# disable irrelevant test
|
||||
pass
|
||||
|
||||
def test_setattr_getattr_nonexisting_attr_succeeds(self):
|
||||
d = self.dtype()
|
||||
d.x = 1
|
||||
self.assertEqual(d.x, 1)
|
||||
|
||||
def test_init_succeeds_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
|
||||
|
||||
def test_iter_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
|
||||
|
||||
def test_iteritems_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
|
||||
|
||||
def tets_cmp_with_strict_dicts(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
|
||||
self.assertEqual(d, dd)
|
||||
|
||||
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
|
||||
d = self.dtype(a=1, b=1, c=1, x=2)
|
||||
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
|
||||
self.assertEqual(d, dd)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user