mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-25 11:19:39 -05:00
Compare commits
1800 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
75523c06f6 | ||
|
|
7e2aee53db | ||
|
|
9c74730461 | ||
|
|
977d2822bc | ||
|
|
31f93285d8 | ||
|
|
da9002a7fd | ||
|
|
1bf7af7027 | ||
|
|
1145a8cf26 | ||
|
|
5e918297f8 | ||
|
|
69adad70c8 | ||
|
|
d3905f1e80 | ||
|
|
1a1ff52725 | ||
|
|
731958fdaa | ||
|
|
8a19c8eeb0 | ||
|
|
4e7368f7b6 | ||
|
|
b00f1009a6 | ||
|
|
2d3ecaaf3c | ||
|
|
339049c785 | ||
|
|
d0481ed18c | ||
|
|
bcee66c7a4 | ||
|
|
7e993ca50e | ||
|
|
638dc845c0 | ||
|
|
4aea0fea8c | ||
|
|
2ba94df9a8 | ||
|
|
5723d87768 | ||
|
|
24b1f4028f | ||
|
|
984c863ff6 | ||
|
|
0f207c2fa7 | ||
|
|
58e70c982e | ||
|
|
4cb94a1759 | ||
|
|
3e568f7bb5 | ||
|
|
b69c6bc97a | ||
|
|
9132ab8f33 | ||
|
|
45858d5107 | ||
|
|
1e332977c5 | ||
|
|
7b70ffab5f | ||
|
|
9b367e5d08 | ||
|
|
243cac0389 | ||
|
|
8205812c84 | ||
|
|
94279b74c9 | ||
|
|
8a588db429 | ||
|
|
5150807ab7 | ||
|
|
225ddc8eeb | ||
|
|
a6965fb3c4 | ||
|
|
90354305c4 | ||
|
|
220d98a85c | ||
|
|
7fb4155ebe | ||
|
|
a39e6e8a6a | ||
|
|
97fc15ded3 | ||
|
|
8bee2e3976 | ||
|
|
e89c1742fb | ||
|
|
6680fbb644 | ||
|
|
4cec643e08 | ||
|
|
03bd51893e | ||
|
|
66b0e381ec | ||
|
|
fd70adf19d | ||
|
|
eb8422cb51 | ||
|
|
1008d880c9 | ||
|
|
9cb1c21cd8 | ||
|
|
5149cb0609 | ||
|
|
08adf4eb6f | ||
|
|
62f38d00f3 | ||
|
|
43a55c8c82 | ||
|
|
d09eb64a41 | ||
|
|
8bbbc1b9ef | ||
|
|
cc367bfed2 | ||
|
|
b18aa831ac | ||
|
|
6205fbe1c4 | ||
|
|
879a54524c | ||
|
|
36678692be | ||
|
|
de6285e5f8 | ||
|
|
7ff7409f56 | ||
|
|
50b3636c86 | ||
|
|
1f72a3f62f | ||
|
|
aea796bd6d | ||
|
|
edcddc3183 | ||
|
|
45a24a4720 | ||
|
|
bed95105f3 | ||
|
|
a39fdb4226 | ||
|
|
9d629b03b3 | ||
|
|
4eeb87cb95 | ||
|
|
3ce7e43b46 | ||
|
|
a3a995ef77 | ||
|
|
a386b45a03 | ||
|
|
74bd2ba2c0 | ||
|
|
695f467126 | ||
|
|
6270b46951 | ||
|
|
a3ad131e6a | ||
|
|
e2d5287cc6 | ||
|
|
1c39d8089c | ||
|
|
2230b9e9ab | ||
|
|
339d7b1c96 | ||
|
|
4e8c955555 | ||
|
|
221c466c18 | ||
|
|
2c8e029811 | ||
|
|
019825bfcb | ||
|
|
8e5ea47d5e | ||
|
|
425ac7f379 | ||
|
|
f0caef4759 | ||
|
|
c56a76f264 | ||
|
|
429886e6a6 | ||
|
|
339ab57df7 | ||
|
|
cb6d98a357 | ||
|
|
e746b44f3b | ||
|
|
bc63ba6713 | ||
|
|
ea8661ab03 | ||
|
|
bc9a5c9435 | ||
|
|
365ffa29fa | ||
|
|
b3aeee6a63 | ||
|
|
d503dc77c3 | ||
|
|
fee364ee4a | ||
|
|
680a8d0fce | ||
|
|
0944d72e32 | ||
|
|
6809ded468 | ||
|
|
64d07a65dc | ||
|
|
745abb57a8 | ||
|
|
dfe5083451 | ||
|
|
9377e208e8 | ||
|
|
40f38e6c6d | ||
|
|
3ee0717d84 | ||
|
|
47155ce338 | ||
|
|
611080b739 | ||
|
|
416d1badda | ||
|
|
0ef5d3ad92 | ||
|
|
efb8784b91 | ||
|
|
a1a6f476e0 | ||
|
|
ccd0667f04 | ||
|
|
38cf825816 | ||
|
|
a8dc8e7190 | ||
|
|
76aca6cf38 | ||
|
|
89c31a018f | ||
|
|
e54f55b6d0 | ||
|
|
fff7cb607c | ||
|
|
54c2478869 | ||
|
|
a7795092b3 | ||
|
|
538fb8b42e | ||
|
|
1f0cd58d7d | ||
|
|
78b1386a1c | ||
|
|
aba7f8db5c | ||
|
|
d7fadffbfd | ||
|
|
22c7f5d85d | ||
|
|
c18d8daece | ||
|
|
d91c4b33f3 | ||
|
|
2ad6f21b9c | ||
|
|
554170a84e | ||
|
|
d43a6e551d | ||
|
|
02cb6d1be7 | ||
|
|
45b1eca48b | ||
|
|
6dacd44f1f | ||
|
|
1b97472368 | ||
|
|
d467352029 | ||
|
|
a0256b607e | ||
|
|
847fceaf10 | ||
|
|
9e831a22df | ||
|
|
768a5ea237 | ||
|
|
36403ecbae | ||
|
|
4620ebaf30 | ||
|
|
c907da84c1 | ||
|
|
9b5e39415e | ||
|
|
2679a22464 | ||
|
|
8bae21025b | ||
|
|
4120adc546 | ||
|
|
30c891abfc | ||
|
|
b8317c2c29 | ||
|
|
39253cfd02 | ||
|
|
7c0b8b151c | ||
|
|
8e1b8923af | ||
|
|
52eb876a08 | ||
|
|
a820b9c09e | ||
|
|
bcfe6ca707 | ||
|
|
4aa3e04df0 | ||
|
|
f8f08ae337 | ||
|
|
c8fc6b5237 | ||
|
|
52909e8117 | ||
|
|
c72bf57ccb | ||
|
|
d3c21cf97f | ||
|
|
942edd9336 | ||
|
|
8fa6c98254 | ||
|
|
73c6bfce44 | ||
|
|
c105909933 | ||
|
|
13baf4f30a | ||
|
|
da5fd16338 | ||
|
|
83a52bd204 | ||
|
|
fe4bd6a127 | ||
|
|
d193d91e6a | ||
|
|
a2f9ef2e74 | ||
|
|
3f63eab68c | ||
|
|
9b6ed7a63a | ||
|
|
e2f8efb521 | ||
|
|
ce29283a52 | ||
|
|
dcf9d59b06 | ||
|
|
794f9cf5b9 | ||
|
|
9954bb9410 | ||
|
|
e57be4a704 | ||
|
|
ffaecc066f | ||
|
|
94f398a7f6 | ||
|
|
65d670a995 | ||
|
|
15ed040533 | ||
|
|
5d3f44ffee | ||
|
|
9ee4be621b | ||
|
|
d33b0d2254 | ||
|
|
1a20c4bef5 | ||
|
|
b350ab1b59 | ||
|
|
687e8a1f6a | ||
|
|
64b9605871 | ||
|
|
8320473606 | ||
|
|
88228ab853 | ||
|
|
dcfb269909 | ||
|
|
4a1ec5adf7 | ||
|
|
56cdc14cc1 | ||
|
|
b8959036bf | ||
|
|
ab24177c89 | ||
|
|
4ffc9cc72f | ||
|
|
7f62ec28e3 | ||
|
|
d42d784aeb | ||
|
|
ce84b3b385 | ||
|
|
74fbcb03a1 | ||
|
|
b1aa70787c | ||
|
|
8675143cc1 | ||
|
|
75e23106fc | ||
|
|
2ad89b5b22 | ||
|
|
36074c9c35 | ||
|
|
05560c5730 | ||
|
|
6ba4db6ff9 | ||
|
|
6353885f9c | ||
|
|
833ebf8c0c | ||
|
|
0662255b27 | ||
|
|
fde4ea8c4c | ||
|
|
132815496c | ||
|
|
a7a6abe3d2 | ||
|
|
2f617aa40f | ||
|
|
9b50ea4c22 | ||
|
|
cde8dd8b53 | ||
|
|
8411537f87 | ||
|
|
479cf1a042 | ||
|
|
8fa00972bd | ||
|
|
5d5eb45b5a | ||
|
|
87beed48c9 | ||
|
|
cf7cc6c637 | ||
|
|
3d45a068e4 | ||
|
|
01ce658883 | ||
|
|
92d648c3a3 | ||
|
|
17fa3c8d7c | ||
|
|
c1ae4e3905 | ||
|
|
d819cbc20e | ||
|
|
f255397bbd | ||
|
|
2f0929e90e | ||
|
|
6785033a21 | ||
|
|
0345b7720c | ||
|
|
7163c33b2a | ||
|
|
934df3c5f7 | ||
|
|
2888b18819 | ||
|
|
c01081255b | ||
|
|
2e606dc166 | ||
|
|
835c5a1d3a | ||
|
|
8580aea43f | ||
|
|
db4f2db236 | ||
|
|
7e9cef6075 | ||
|
|
75612781da | ||
|
|
f5fb4e563d | ||
|
|
1ecb57e795 | ||
|
|
c4a0df26fc | ||
|
|
8ff5142149 | ||
|
|
716976453a | ||
|
|
f07dec6062 | ||
|
|
ffc96890ac | ||
|
|
a8fd703d1d | ||
|
|
4592cc85a5 | ||
|
|
4a835c38d8 | ||
|
|
ef72a07acb | ||
|
|
246b9c4a02 | ||
|
|
c18a77bc9b | ||
|
|
3d7e2b1aa5 | ||
|
|
28f18fbc42 | ||
|
|
ba361a8a27 | ||
|
|
fc2ce6e488 | ||
|
|
d7f77a572a | ||
|
|
64e28fd01a | ||
|
|
714d5e5184 | ||
|
|
640500c82d | ||
|
|
8bf661c1ab | ||
|
|
1d29e435d5 | ||
|
|
6eac48633b | ||
|
|
743fae1ba7 | ||
|
|
b3565451ff | ||
|
|
4a93681870 | ||
|
|
d83b0484d8 | ||
|
|
c0d67dbc58 | ||
|
|
3a8ea4b4c9 | ||
|
|
4b14a099df | ||
|
|
dae7cbfb85 | ||
|
|
0c62b80e3a | ||
|
|
678963e6dd | ||
|
|
6d84c718fd | ||
|
|
b8e1ed8967 | ||
|
|
d87633433a | ||
|
|
fe33adbba0 | ||
|
|
baa84cf481 | ||
|
|
ecd828008e | ||
|
|
2b8c607b78 | ||
|
|
df684f591a | ||
|
|
cb5b51bde3 | ||
|
|
7f27419215 | ||
|
|
312cd077d0 | ||
|
|
eac059ca85 | ||
|
|
782dd4cb17 | ||
|
|
f7b60f2c52 | ||
|
|
ca28e52698 | ||
|
|
0c2c12d536 | ||
|
|
113c40c243 | ||
|
|
0688f46d8b | ||
|
|
2fdcdba889 | ||
|
|
6a39148e5f | ||
|
|
22dfb40fd5 | ||
|
|
2b5a86ce53 | ||
|
|
e77016ea9b | ||
|
|
9988a61da7 | ||
|
|
f34fb8eec3 | ||
|
|
7853357065 | ||
|
|
6f1befc43c | ||
|
|
c18386b9b5 | ||
|
|
d5ba2e6716 | ||
|
|
b30f8c245e | ||
|
|
74c86f1b6b | ||
|
|
cf9d599536 | ||
|
|
14a67fd6c2 | ||
|
|
19f1225249 | ||
|
|
7f33f82b60 | ||
|
|
6880c0a967 | ||
|
|
814f4157db | ||
|
|
0f5e53526e | ||
|
|
413da01c5c | ||
|
|
a73d231bd4 | ||
|
|
4f2392faac | ||
|
|
2321dcec6c | ||
|
|
c2cf7ba758 | ||
|
|
239dd4aa60 | ||
|
|
a653b2e777 | ||
|
|
d8faee7e93 | ||
|
|
69417425e9 | ||
|
|
e8574a49a7 | ||
|
|
fe624cd218 | ||
|
|
1f10a66c74 | ||
|
|
a8f1cd26cd | ||
|
|
a497a6b7f5 | ||
|
|
9dc144f2b5 | ||
|
|
7d50f3cf21 | ||
|
|
315af4911c | ||
|
|
35704c69c7 | ||
|
|
a24628c771 | ||
|
|
e9748a160a | ||
|
|
7bc78e104f | ||
|
|
6f0dccfec9 | ||
|
|
76d6981dab | ||
|
|
5df37c52dd | ||
|
|
c78b7a6928 | ||
|
|
7a2ccc075c | ||
|
|
237054c23e | ||
|
|
ac1d641bd5 | ||
|
|
3545b6e98a | ||
|
|
d3a56e00ea | ||
|
|
e9f8578c25 | ||
|
|
dccfc436be | ||
|
|
1e85c8587b | ||
|
|
b8f92ab054 | ||
|
|
766ed31f8e | ||
|
|
cad78e115d | ||
|
|
c2def3eb9d | ||
|
|
ad7ebf1cd5 | ||
|
|
b599c4f6a9 | ||
|
|
439539f56d | ||
|
|
237bcb92c9 | ||
|
|
ce02a23dbb | ||
|
|
8e81512735 | ||
|
|
c69f0394a8 | ||
|
|
d7ca9e05de | ||
|
|
64534ff810 | ||
|
|
d0164a6c28 | ||
|
|
0f898ddf4a | ||
|
|
e903382034 | ||
|
|
0d225450da | ||
|
|
c077a64484 | ||
|
|
6c16094b42 | ||
|
|
5aa80746f9 | ||
|
|
cc64717818 | ||
|
|
6acd892116 | ||
|
|
3955408aa4 | ||
|
|
3de2468df3 | ||
|
|
b1d983fbc3 | ||
|
|
5f443d2593 | ||
|
|
436158f596 | ||
|
|
dcc56fc138 | ||
|
|
0eef10079b | ||
|
|
2b839dfb19 | ||
|
|
491b678d6e | ||
|
|
151dce006d | ||
|
|
d4f538b4aa | ||
|
|
a727439c57 | ||
|
|
f779107749 | ||
|
|
4a5c8f41fa | ||
|
|
bf458e22e8 | ||
|
|
9b8088fca2 | ||
|
|
68435aa335 | ||
|
|
afe5465044 | ||
|
|
9decf3cf14 | ||
|
|
b31c3cfd2f | ||
|
|
1306c7381c | ||
|
|
dbd2025e71 | ||
|
|
ac17b84a7a | ||
|
|
9756b7b653 | ||
|
|
ee38d93e3b | ||
|
|
ee5c7d0ef4 | ||
|
|
f19f4abe0c | ||
|
|
7c4a854bfd | ||
|
|
04322b56a4 | ||
|
|
45b4ac3e9e | ||
|
|
362ed9b088 | ||
|
|
8bf347dd09 | ||
|
|
d449f0c2fc | ||
|
|
6dab514817 | ||
|
|
8ce0d416c2 | ||
|
|
dd88641763 | ||
|
|
fb52f34ef9 | ||
|
|
561c2f2d1f | ||
|
|
4b48c1046e | ||
|
|
3e0f2fbddc | ||
|
|
c5eb025186 | ||
|
|
23bfc3c3b0 | ||
|
|
813c7a46f1 | ||
|
|
6b475468fc | ||
|
|
053ff9506a | ||
|
|
11a699ed47 | ||
|
|
b3c6cacdad | ||
|
|
4875b158fd | ||
|
|
6bb04dc56d | ||
|
|
2dc038edc7 | ||
|
|
8597c3e95d | ||
|
|
5c0094fd43 | ||
|
|
23d67a5bd3 | ||
|
|
3a26f09307 | ||
|
|
2592e606cc | ||
|
|
11f2b95b4d | ||
|
|
c171a01b7d | ||
|
|
2671519386 | ||
|
|
19750cf499 | ||
|
|
711f80b1fb | ||
|
|
1ffa0f396a | ||
|
|
991a51d55e | ||
|
|
e052a7869d | ||
|
|
d57f35e4e8 | ||
|
|
2cb7030b04 | ||
|
|
a53f17c1b9 | ||
|
|
326549568f | ||
|
|
c0577abb89 | ||
|
|
a65e93a9b3 | ||
|
|
cadf14c338 | ||
|
|
7b49f1f437 | ||
|
|
2214540a51 | ||
|
|
256b7b1543 | ||
|
|
ebc213395d | ||
|
|
7af581f0ff | ||
|
|
aeb944b281 | ||
|
|
43105ddd2f | ||
|
|
f2b3cfb8f0 | ||
|
|
3302dacdc3 | ||
|
|
5f07ef04d2 | ||
|
|
4c69a0b721 | ||
|
|
2a538abf80 | ||
|
|
3236b65d9e | ||
|
|
79cd17a5ba | ||
|
|
06a08dcf6e | ||
|
|
de29b44c0d | ||
|
|
dc4ca81270 | ||
|
|
dd3dc0a058 | ||
|
|
30c6389382 | ||
|
|
45effbbcde | ||
|
|
ffa06ca75e | ||
|
|
903a4c93eb | ||
|
|
a8ae6c86e2 | ||
|
|
976445c1f0 | ||
|
|
9cf1141794 | ||
|
|
b095bee229 | ||
|
|
3c3ecc5342 | ||
|
|
8b50b99977 | ||
|
|
f369b74c94 | ||
|
|
7b11f276a8 | ||
|
|
fe35173ab5 | ||
|
|
4bd879c787 | ||
|
|
fcbc5ed5d0 | ||
|
|
2bdc541183 | ||
|
|
4b08eea39d | ||
|
|
c777cfe5b9 | ||
|
|
6c9227faac | ||
|
|
e860d0aa83 | ||
|
|
b5681a0255 | ||
|
|
ddd2f96b85 | ||
|
|
b56b778573 | ||
|
|
cf7fc906bb | ||
|
|
693b43af2e | ||
|
|
0539e1ea15 | ||
|
|
c5c37296e9 | ||
|
|
6030fa1d68 | ||
|
|
2a5cba0178 | ||
|
|
9a77089c6d | ||
|
|
5f79895a97 | ||
|
|
19f5da77b2 | ||
|
|
2cc7278865 | ||
|
|
60f31608b9 | ||
|
|
763f71a05c | ||
|
|
e3921cd6a8 | ||
|
|
54a5c145cc | ||
|
|
86fd0dcab1 | ||
|
|
6b04c92297 | ||
|
|
12da77f037 | ||
|
|
071926aada | ||
|
|
33d048e623 | ||
|
|
135640dd58 | ||
|
|
d8ddf66921 | ||
|
|
274fce5236 | ||
|
|
1046065f46 | ||
|
|
60243ad901 | ||
|
|
d62c49eb2f | ||
|
|
7e3313f48c | ||
|
|
ea4c16cc2a | ||
|
|
4fb5ce550e | ||
|
|
1bb6eb7141 | ||
|
|
89e3e85d1e | ||
|
|
dfde340447 | ||
|
|
e7239c7c68 | ||
|
|
f7ef2ed4f5 | ||
|
|
56f6de3510 | ||
|
|
cf86af7a23 | ||
|
|
fe32a743db | ||
|
|
93b750dbf1 | ||
|
|
4337f594f6 | ||
|
|
17fc24fc1b | ||
|
|
44771bde71 | ||
|
|
018e9ef88f | ||
|
|
7d99a9a9c3 | ||
|
|
c4078800e3 | ||
|
|
d87f0f3c15 | ||
|
|
6e9b504a9d | ||
|
|
7397210729 | ||
|
|
7830ddd4e9 | ||
|
|
b711ee5257 | ||
|
|
c7b6253e04 | ||
|
|
85dcb6c61f | ||
|
|
323ded1814 | ||
|
|
595de0c5a1 | ||
|
|
8c3195937b | ||
|
|
8a8be7fb2d | ||
|
|
ec68da051d | ||
|
|
29f13d687c | ||
|
|
980e83b23c | ||
|
|
668ccf89fd | ||
|
|
3ec02db2f6 | ||
|
|
93f7da3ed9 | ||
|
|
3f88778013 | ||
|
|
499d026b5c | ||
|
|
e0a1189430 | ||
|
|
e2905eb999 | ||
|
|
8b6f2c1e70 | ||
|
|
b7e4e53519 | ||
|
|
80eee255f7 | ||
|
|
60a4a63f56 | ||
|
|
82b80e60e6 | ||
|
|
915d0359bf | ||
|
|
10f1a77c1c | ||
|
|
ea141577d0 | ||
|
|
d0a1151a33 | ||
|
|
bc461997f8 | ||
|
|
d8051203c1 | ||
|
|
af71407ca6 | ||
|
|
b275c53e5a | ||
|
|
7d9fcac0c7 | ||
|
|
cb9a90d1e7 | ||
|
|
ec083214ef | ||
|
|
4a4e4719b3 | ||
|
|
a0ff489be0 | ||
|
|
147aae318a | ||
|
|
d8e61a485e | ||
|
|
c2be329495 | ||
|
|
2552d27f6f | ||
|
|
9db5e45c26 | ||
|
|
497321799c | ||
|
|
2a6fc723d0 | ||
|
|
74f88eb952 | ||
|
|
9a0a99a21f | ||
|
|
b35b731b6d | ||
|
|
e8d2b95aaa | ||
|
|
6be1ddfe87 | ||
|
|
5b518d4a4c | ||
|
|
1c6db468e1 | ||
|
|
25c914606e | ||
|
|
44cb2d9807 | ||
|
|
f90a66af1e | ||
|
|
519e36379b | ||
|
|
4726598deb | ||
|
|
1e57e7e70b | ||
|
|
6f1777d37d | ||
|
|
d6d9066eea | ||
|
|
2c7e7f859b | ||
|
|
97e5d23d98 | ||
|
|
a8cbef7bd4 | ||
|
|
78f1ee175b | ||
|
|
569143a7ee | ||
|
|
3e8f0c3aae | ||
|
|
11620ba2b6 | ||
|
|
073ee7e963 | ||
|
|
9620689bd0 | ||
|
|
b8cbda10f1 | ||
|
|
5a145d7f8e | ||
|
|
0ba2fa296a | ||
|
|
2a5fc22dd7 | ||
|
|
e739c4d627 | ||
|
|
7e350b2f90 | ||
|
|
6d5592c1be | ||
|
|
87066c5d93 | ||
|
|
39ab2eb10f | ||
|
|
9241638686 | ||
|
|
048f12948d | ||
|
|
ed4a46d585 | ||
|
|
4857a853b3 | ||
|
|
cfda0a17b1 | ||
|
|
84a1c560cc | ||
|
|
cf8e130bb8 | ||
|
|
33070e3c51 | ||
|
|
d4a646c973 | ||
|
|
38ad546634 | ||
|
|
001094afa5 | ||
|
|
dd4e170fb0 | ||
|
|
10b3b6fe1e | ||
|
|
c15e88a3d3 | ||
|
|
673ccb5024 | ||
|
|
7297cb5c3f | ||
|
|
f995c44d0b | ||
|
|
f81a7479c7 | ||
|
|
f5ab723ac2 | ||
|
|
2dddc79a47 | ||
|
|
b28cd881de | ||
|
|
b013efadda | ||
|
|
a61566063b | ||
|
|
9f6ec38ac5 | ||
|
|
5110b975e9 | ||
|
|
56ad93cdb3 | ||
|
|
fb7d1d94ab | ||
|
|
78de1c2bc2 | ||
|
|
8ad21b68ef | ||
|
|
58f7d02460 | ||
|
|
7c641af280 | ||
|
|
8ebb62188b | ||
|
|
a1b8f736c2 | ||
|
|
0347ff5304 | ||
|
|
037f38ac6b | ||
|
|
5370e67444 | ||
|
|
cb518a0cca | ||
|
|
87db9124d0 | ||
|
|
6efe4ab08d | ||
|
|
8476b5e01f | ||
|
|
27c5749b21 | ||
|
|
f1eb553487 | ||
|
|
4378a6b0c7 | ||
|
|
d5ca8e9c96 | ||
|
|
bbcf7ba8a7 | ||
|
|
f29f77a1d5 | ||
|
|
a3f8b2272c | ||
|
|
008a61823d | ||
|
|
a60f1a3e92 | ||
|
|
ebb6e669e2 | ||
|
|
fdd61d3caf | ||
|
|
72ac272962 | ||
|
|
4395737cc5 | ||
|
|
f3aef2c10b | ||
|
|
df1b75d88a | ||
|
|
0b5fb69664 | ||
|
|
ff2a75476b | ||
|
|
ea515c199c | ||
|
|
12f0cdb484 | ||
|
|
f2cd220e22 | ||
|
|
a0a3629e4c | ||
|
|
8263c6b725 | ||
|
|
b8b3620ade | ||
|
|
e55faa02d5 | ||
|
|
99fc0d1f81 | ||
|
|
19fe7ce214 | ||
|
|
ca26588c32 | ||
|
|
e2c807e303 | ||
|
|
64efadfc81 | ||
|
|
fb90eede52 | ||
|
|
48fda987fb | ||
|
|
8e85fd57b6 | ||
|
|
3f475aed03 | ||
|
|
12a11766d9 | ||
|
|
0e90700ce9 | ||
|
|
b10be8d321 | ||
|
|
8a648a5e41 | ||
|
|
fcf861f5eb | ||
|
|
1efcf386e2 | ||
|
|
38010117e5 | ||
|
|
c217bf2445 | ||
|
|
671269dca7 | ||
|
|
b7f202d645 | ||
|
|
f0f12ca83f | ||
|
|
b14d8f0051 | ||
|
|
2e013e7b43 | ||
|
|
ff6c8d5822 | ||
|
|
a2b987352f | ||
|
|
5651beffb2 | ||
|
|
5fd8c56324 | ||
|
|
8abef1d8cc | ||
|
|
0c8c74c0ac | ||
|
|
0b40414d23 | ||
|
|
d4b8190f55 | ||
|
|
0ce7ea0b61 | ||
|
|
817c4cb9d6 | ||
|
|
d05b894d69 | ||
|
|
9962c849ed | ||
|
|
8313dc8abe | ||
|
|
2781730778 | ||
|
|
985e98c699 | ||
|
|
d244af28e3 | ||
|
|
488ac3b94a | ||
|
|
b49426e35c | ||
|
|
a81bac1193 | ||
|
|
7fe80b7a5f | ||
|
|
a6e3ab2dbe | ||
|
|
a4f0f38300 | ||
|
|
1a5b7244dd | ||
|
|
dff9f91d4c | ||
|
|
59d1c1dcdc | ||
|
|
2cff936b5b | ||
|
|
d9dc644cb6 | ||
|
|
2280d04fd2 | ||
|
|
1c8cb69cf3 | ||
|
|
e33c3789b7 | ||
|
|
8d85800e2f | ||
|
|
c08c1d30ad | ||
|
|
3c00e1ecdb | ||
|
|
83947e31aa | ||
|
|
b4f90fbbb3 | ||
|
|
0f55f91586 | ||
|
|
7d0a9b11a0 | ||
|
|
9167261714 | ||
|
|
57fae34ff6 | ||
|
|
1cbc74761a | ||
|
|
961578385d | ||
|
|
0dc6bed7ad | ||
|
|
c78c615372 | ||
|
|
04bdec3889 | ||
|
|
6af3d7c98f | ||
|
|
73be817c10 | ||
|
|
faf78fc254 | ||
|
|
2c85c370e6 | ||
|
|
3a38a095d8 | ||
|
|
e754b13340 | ||
|
|
900c28caba | ||
|
|
c5ce197ed7 | ||
|
|
9573ff0932 | ||
|
|
f554963ae7 | ||
|
|
961619c156 | ||
|
|
4ecadab53c | ||
|
|
744501a65d | ||
|
|
44cc66888b | ||
|
|
0695909b6c | ||
|
|
7a4fa38725 | ||
|
|
9f360d8af6 | ||
|
|
5dad6b8b17 | ||
|
|
5b6df6ed2e | ||
|
|
82d1a75d80 | ||
|
|
50429207c5 | ||
|
|
589bc1f1aa | ||
|
|
824dcefc1a | ||
|
|
3f8c952237 | ||
|
|
077db58de0 | ||
|
|
3c527fd112 | ||
|
|
cd1f6ad7b0 | ||
|
|
3af7e98216 | ||
|
|
cb363d6321 | ||
|
|
39656152d3 | ||
|
|
22c88e5269 | ||
|
|
89550e8345 | ||
|
|
9846c4df18 | ||
|
|
924d1cb71b | ||
|
|
44236f611e | ||
|
|
012dea5a0c | ||
|
|
820c9b704f | ||
|
|
ed92926ec4 | ||
|
|
bc560ee76d | ||
|
|
b6c4130e4b | ||
|
|
b0ca391bb4 | ||
|
|
45a6b1d386 | ||
|
|
4626ffcbc5 | ||
|
|
c3a9cc94fa | ||
|
|
a8eb8bb8d7 | ||
|
|
b14c9aa68c | ||
|
|
b03db7ad36 | ||
|
|
cc706a1195 | ||
|
|
b1028f49d6 | ||
|
|
6d0cc96cc8 | ||
|
|
969a91e751 | ||
|
|
c352bf82dd | ||
|
|
a305527ba2 | ||
|
|
c0e35e89e9 | ||
|
|
bce44866c2 | ||
|
|
f47470a9ad | ||
|
|
9ad9fe275d | ||
|
|
33448c98c0 | ||
|
|
90e389f2fa | ||
|
|
af7acd7473 | ||
|
|
dfa0794281 | ||
|
|
a36d42df84 | ||
|
|
269dded046 | ||
|
|
826ccc2760 | ||
|
|
7190dc17a7 | ||
|
|
5e332bb88c | ||
|
|
fb21622bfe | ||
|
|
191c38db8f | ||
|
|
71132fe992 | ||
|
|
d1d568a9d3 | ||
|
|
68d4fb3b59 | ||
|
|
b4c26682c7 | ||
|
|
e2cfb53ec4 | ||
|
|
274b17a860 | ||
|
|
97bcf1111b | ||
|
|
3b9d221258 | ||
|
|
d8bcb8bcb6 | ||
|
|
5cb0f1761a | ||
|
|
516b551528 | ||
|
|
f43d4d3971 | ||
|
|
e0dd70027b | ||
|
|
29bb391bfe | ||
|
|
79efd94d6f | ||
|
|
f16870c59e | ||
|
|
c690bc18a0 | ||
|
|
394f24c29f | ||
|
|
f43ef3ad59 | ||
|
|
4c71c5b088 | ||
|
|
54d0b70f01 | ||
|
|
5a0f07a6b2 | ||
|
|
a4bf967f65 | ||
|
|
77feb0db3a | ||
|
|
33c634c0e2 | ||
|
|
be24e25ae4 | ||
|
|
c2a8214290 | ||
|
|
4d0cfc95e4 | ||
|
|
972c103538 | ||
|
|
2f62f51dc2 | ||
|
|
56ee173c07 | ||
|
|
774c633d5c | ||
|
|
93dd35fde3 | ||
|
|
3ced8c7a1e | ||
|
|
4a390b5824 | ||
|
|
666e4d282f | ||
|
|
ed6ca613ff | ||
|
|
285364e12a | ||
|
|
9a46a91652 | ||
|
|
4e4078f3da | ||
|
|
41c9290ba8 | ||
|
|
4460fe013f | ||
|
|
558a5d6554 | ||
|
|
388f2f441f | ||
|
|
11e8af4c46 | ||
|
|
387893e1ef | ||
|
|
78dc1bf9ec | ||
|
|
12638096b1 | ||
|
|
3e82199c44 | ||
|
|
b4bcf5c032 | ||
|
|
785dc15cd9 | ||
|
|
31f3425354 | ||
|
|
afdd92c903 | ||
|
|
1462300eda | ||
|
|
d0417d09db | ||
|
|
39c8da8305 | ||
|
|
c9af9277ae | ||
|
|
5e2be34f7b | ||
|
|
73a2476a79 | ||
|
|
f61032cc74 | ||
|
|
f6956388c7 | ||
|
|
7e9b303e8b | ||
|
|
f617dedfa2 | ||
|
|
942c26c581 | ||
|
|
a00d90398e | ||
|
|
61e6d855ec | ||
|
|
6b59f53273 | ||
|
|
798457e7e2 | ||
|
|
6176eeb024 | ||
|
|
56252a707a | ||
|
|
9c649d743c | ||
|
|
00b1ca5454 | ||
|
|
f2e1648556 | ||
|
|
b1945edf04 | ||
|
|
9f20db0ed3 | ||
|
|
852756f099 | ||
|
|
452617ef30 | ||
|
|
4e972835e5 | ||
|
|
5e4d983b79 | ||
|
|
0de8065212 | ||
|
|
1a6e677c06 | ||
|
|
8307828528 | ||
|
|
12a1f261db | ||
|
|
79e400ce6f | ||
|
|
d51de1bd79 | ||
|
|
6b3f1aa038 | ||
|
|
7d7803a07e | ||
|
|
559c574fd6 | ||
|
|
5e0131acd9 | ||
|
|
d560b7a143 | ||
|
|
15e5366dbb | ||
|
|
6ccfdd6f2e | ||
|
|
d2b79990cb | ||
|
|
01bb84e40b | ||
|
|
f66b422f68 | ||
|
|
abab970f08 | ||
|
|
edaa6de71d | ||
|
|
85a65127cf | ||
|
|
99035190f4 | ||
|
|
f1611fbafd | ||
|
|
e42ff2fb8b | ||
|
|
bc93071167 | ||
|
|
410fa58d47 | ||
|
|
06fd03fbde | ||
|
|
d169456c78 | ||
|
|
7785aa4904 | ||
|
|
33798fe47e | ||
|
|
0522b15cfd | ||
|
|
8cfd3995d0 | ||
|
|
84cdd8bb78 | ||
|
|
ad0177235d | ||
|
|
e5782151a1 | ||
|
|
adf4dafd01 | ||
|
|
4214ef4a9f | ||
|
|
1df0ad202f | ||
|
|
e35cbba8b2 | ||
|
|
e6fa660c8f | ||
|
|
8aefdb71bb | ||
|
|
5da8c6fe7b | ||
|
|
520697e988 | ||
|
|
7fe6fd3462 | ||
|
|
85b3941539 | ||
|
|
6f5ea7bb48 | ||
|
|
e9a2b101d8 | ||
|
|
c01faff135 | ||
|
|
bcee0007a5 | ||
|
|
f5ec956e08 | ||
|
|
56926d55ba | ||
|
|
55cfe9e9e7 | ||
|
|
31bdd97a56 | ||
|
|
eb60cbdd6b | ||
|
|
39ccf7bbcf | ||
|
|
f92ee32c01 | ||
|
|
5aecf7e61c | ||
|
|
20435450f3 | ||
|
|
13e5fb4143 | ||
|
|
bdc6434839 | ||
|
|
796609de37 | ||
|
|
8759e8dd73 | ||
|
|
f8bf54189e | ||
|
|
3e2988f998 | ||
|
|
2e5571f0a9 | ||
|
|
c97bb900a3 | ||
|
|
b1ad5ef205 | ||
|
|
9994b6f9c2 | ||
|
|
a8a590a942 | ||
|
|
4e13fb3b8c | ||
|
|
24f331c208 | ||
|
|
16d0fc38f9 | ||
|
|
5e4cac52d6 | ||
|
|
b489a2d849 | ||
|
|
7c5707e0c0 | ||
|
|
946699a335 | ||
|
|
4888e2d476 | ||
|
|
44b2c02034 | ||
|
|
c150c7f84e | ||
|
|
97503a68d8 | ||
|
|
126a2d870e | ||
|
|
02bad8cfb9 | ||
|
|
d9465c7f9d | ||
|
|
ead3168d80 | ||
|
|
a71bba307e | ||
|
|
d2a652891c | ||
|
|
a70ac42717 | ||
|
|
a7bcf105dc | ||
|
|
8be02b4e74 | ||
|
|
9ba9cda1c0 | ||
|
|
4310282dc3 | ||
|
|
c2c08391cc | ||
|
|
bc9d077b9d | ||
|
|
fe0f739bd5 | ||
|
|
e5b11a34f6 | ||
|
|
1df7a4df91 | ||
|
|
d401c143ec | ||
|
|
00a59baa92 | ||
|
|
327c83ce32 | ||
|
|
3371102e64 | ||
|
|
aec396e214 | ||
|
|
2b52b5c264 | ||
|
|
19c24a85a1 | ||
|
|
c147903f1e | ||
|
|
9dedc5b8fa | ||
|
|
d781cbe743 | ||
|
|
37bd2017b0 | ||
|
|
2de8070156 | ||
|
|
f70377c59b | ||
|
|
6fc4151de5 | ||
|
|
1fa001aad3 | ||
|
|
b84e03c58b | ||
|
|
e9dac25ff4 | ||
|
|
611787dbb6 | ||
|
|
bfbfb1d2a8 | ||
|
|
d9662f7fa5 | ||
|
|
9e44944b1d | ||
|
|
4de9a7ff89 | ||
|
|
32a663c5d7 | ||
|
|
3bee5ed35a | ||
|
|
bee5d6b7eb | ||
|
|
00ed9b07b6 | ||
|
|
2279bba838 | ||
|
|
57f5343c77 | ||
|
|
da8262a9b5 | ||
|
|
f0cf4a23e4 | ||
|
|
489c81c378 | ||
|
|
730344e326 | ||
|
|
7e6b1d3638 | ||
|
|
15f65cd711 | ||
|
|
dba205dafb | ||
|
|
5ae149a1b6 | ||
|
|
4bb2307007 | ||
|
|
be0088aec6 | ||
|
|
c56710ae0c | ||
|
|
1a420bc002 | ||
|
|
545e4f7af4 | ||
|
|
d2a148ae7d | ||
|
|
580591a69e | ||
|
|
409b438776 | ||
|
|
549175b56d | ||
|
|
0e3f5006b1 | ||
|
|
54043a0ae5 | ||
|
|
36fdc8cd9e | ||
|
|
87cf3b2289 | ||
|
|
adb4071fdb | ||
|
|
2a20f5e6e2 | ||
|
|
00f7ae3d66 | ||
|
|
f1f4e7ca8e | ||
|
|
6d7b3b8bfa | ||
|
|
7ebccf564d | ||
|
|
0421a1aa6c | ||
|
|
c118ab9a3c | ||
|
|
02a12cf724 | ||
|
|
f28ca41b7b | ||
|
|
6e677cf3cd | ||
|
|
d30a23f7ef | ||
|
|
88fea6f25d | ||
|
|
fc0b5bd738 | ||
|
|
5174f9939c | ||
|
|
8f9a489c7e | ||
|
|
fc72efac04 | ||
|
|
72f57cf671 | ||
|
|
85b95d1e96 | ||
|
|
35dee43f0b | ||
|
|
fb683bf230 | ||
|
|
a852f581ba | ||
|
|
cc417f1499 | ||
|
|
7f9da4c4fb | ||
|
|
31d3f9abee | ||
|
|
f9670e9833 | ||
|
|
465af8c1a4 | ||
|
|
ffe743e233 | ||
|
|
6b09731a55 | ||
|
|
182a94e0c7 | ||
|
|
2adaedfd1a | ||
|
|
5074326471 | ||
|
|
4807a16a0f | ||
|
|
af044f1002 | ||
|
|
cdf77c8796 | ||
|
|
e68bedf7eb | ||
|
|
5e21e7fa8e | ||
|
|
f49b39b216 | ||
|
|
0d24292f52 | ||
|
|
f3b7016be8 | ||
|
|
0f77c831c9 | ||
|
|
be48e57453 | ||
|
|
3b45ca18af | ||
|
|
da1b22c148 | ||
|
|
9dab21f972 | ||
|
|
89a5f92ace | ||
|
|
7be705f6a1 | ||
|
|
8e60566311 | ||
|
|
33e5bb7d0a | ||
|
|
0cf63cd715 | ||
|
|
5dc7bf5b0e | ||
|
|
c4c66aa640 | ||
|
|
f64be72a98 | ||
|
|
a3ed2bdcac | ||
|
|
996b8bedac | ||
|
|
a05a785e22 | ||
|
|
b470602317 | ||
|
|
cf8ab02d0e | ||
|
|
60043fff59 | ||
|
|
16c0189b80 | ||
|
|
36c30f9e11 | ||
|
|
12a8582a9a | ||
|
|
13b91e5b91 | ||
|
|
d02b253242 | ||
|
|
16528c4c89 | ||
|
|
6442e174b3 | ||
|
|
fd325c1797 | ||
|
|
12491d1302 | ||
|
|
b7a4613310 | ||
|
|
39f5fca89b | ||
|
|
2902262503 | ||
|
|
b49393357a | ||
|
|
cc1a69eac0 | ||
|
|
13d498658c | ||
|
|
cad93b2dd1 | ||
|
|
f0b8bac221 | ||
|
|
13ef843edb | ||
|
|
ca9c96647e | ||
|
|
902ef3cd1e | ||
|
|
0b69bcddcc | ||
|
|
9089fc7ad3 | ||
|
|
6d866ae62b | ||
|
|
9fa82c2ddb | ||
|
|
0ca29cd677 | ||
|
|
54c9e200a0 | ||
|
|
fc67525dcb | ||
|
|
37e292cab9 | ||
|
|
e391abd23d | ||
|
|
947986277a | ||
|
|
b2a10f269c | ||
|
|
dc076d25d6 | ||
|
|
845408244b | ||
|
|
e06c82297d | ||
|
|
459be74a7c | ||
|
|
37e81275b5 | ||
|
|
8417b0ec3f | ||
|
|
7d834ee088 | ||
|
|
eb119b7443 | ||
|
|
cc342cbae3 | ||
|
|
75ae26fd28 | ||
|
|
70e6585669 | ||
|
|
94f58f4608 | ||
|
|
5478a8d49a | ||
|
|
23180622e8 | ||
|
|
62187fbbdf | ||
|
|
bd6b04f95e | ||
|
|
b315d6e171 | ||
|
|
35bb3c9eb1 | ||
|
|
84e7850e91 | ||
|
|
4b40d75d1d | ||
|
|
5423019a14 | ||
|
|
e8c5c610b7 | ||
|
|
3f0cef59b8 | ||
|
|
867c3595ff | ||
|
|
631dd58c1f | ||
|
|
ba235b26b7 | ||
|
|
e54e850241 | ||
|
|
d0cb7a79f9 | ||
|
|
40c85c512c | ||
|
|
ca5eb7b2b6 | ||
|
|
cfd24de72a | ||
|
|
54acfe3e39 | ||
|
|
574a6ab5f4 | ||
|
|
39070d32bd | ||
|
|
9aa3d2d87a | ||
|
|
02926516b9 | ||
|
|
215f561623 | ||
|
|
e2c2f5d757 | ||
|
|
d887405ab3 | ||
|
|
00deb75195 | ||
|
|
b228b0f42a | ||
|
|
689eb426ea | ||
|
|
3d5ff23433 | ||
|
|
1a24f34499 | ||
|
|
8459b40743 | ||
|
|
75cb5d2d4c | ||
|
|
12ad6af8c3 | ||
|
|
cf24e1014a | ||
|
|
bd1b40dd94 | ||
|
|
95d4bfb2bd | ||
|
|
23caac9d09 | ||
|
|
ece4f6e32d | ||
|
|
5e7d1ba827 | ||
|
|
a88214eea6 | ||
|
|
7ec5646338 | ||
|
|
c020bea41e | ||
|
|
e6f79a6fa3 | ||
|
|
0ab430ea82 | ||
|
|
3d95657b8a | ||
|
|
726157a062 | ||
|
|
f8793f3ec8 | ||
|
|
09929beeb9 | ||
|
|
2a1b2c18fc | ||
|
|
0cc3df71d2 | ||
|
|
e124c211ac | ||
|
|
dc2f62dc9d | ||
|
|
38921f1254 | ||
|
|
4fec9a493e | ||
|
|
71c5adda79 | ||
|
|
cffa731106 | ||
|
|
c7f75fe58f | ||
|
|
2eed5143fe | ||
|
|
6e4ea518d9 | ||
|
|
a898d722d6 | ||
|
|
904358bb00 | ||
|
|
6605b87c5c | ||
|
|
64688ca5e1 | ||
|
|
e9a1a06bda | ||
|
|
a8da28f877 | ||
|
|
70b2bd6ccf | ||
|
|
8ed5d52ddf | ||
|
|
f7af0741fe | ||
|
|
3ec4afb02f | ||
|
|
3f77b73a61 | ||
|
|
9e62d8a3a3 | ||
|
|
9ef21241bf | ||
|
|
5e77adf7e6 | ||
|
|
4df0a46701 | ||
|
|
f186404628 | ||
|
|
8e3ec91f3c | ||
|
|
2605addf34 | ||
|
|
1ab3e57b83 | ||
|
|
2f36ae5112 | ||
|
|
acc19ca65e | ||
|
|
ea213e2dfd | ||
|
|
02cf3264a3 | ||
|
|
a0b1186558 | ||
|
|
27e47718bb | ||
|
|
f78dd209bd | ||
|
|
2f8b479fdd | ||
|
|
b4e0b51f5b | ||
|
|
eedce4dcfd | ||
|
|
006be92180 | ||
|
|
1fae004785 | ||
|
|
239a88cd24 | ||
|
|
22b432a6ae | ||
|
|
c88566a4ae | ||
|
|
5f8e371793 | ||
|
|
94d9ac03ea | ||
|
|
897ac97423 | ||
|
|
24aeae6de9 | ||
|
|
ce941db3be | ||
|
|
5ff91ee47f | ||
|
|
ce1f55ffd1 | ||
|
|
8700e2df69 | ||
|
|
f4df84b609 | ||
|
|
ba473123ba | ||
|
|
98a54ef38f | ||
|
|
7fdc9c7cb8 | ||
|
|
dc3b1566d7 | ||
|
|
5429c4d557 | ||
|
|
dabcea6ba7 | ||
|
|
e91790f5ac | ||
|
|
51076d4ced | ||
|
|
1cb37fe2d2 | ||
|
|
61a9f0647b | ||
|
|
ac2ab62050 | ||
|
|
c50efac00e | ||
|
|
bf16e61a1f | ||
|
|
d464633c70 | ||
|
|
b78d0ec30b | ||
|
|
da09602834 | ||
|
|
5ead4967a5 | ||
|
|
8bb7ce2062 | ||
|
|
0068c75e31 | ||
|
|
5de7fa9d48 | ||
|
|
3dc3592783 | ||
|
|
43a082a51a | ||
|
|
4c264673df | ||
|
|
d537d73c6a | ||
|
|
5c227ecc57 | ||
|
|
b03fa4fdf2 | ||
|
|
38219a22ca | ||
|
|
9d6a5efa72 | ||
|
|
aaa0520a6d | ||
|
|
eb0f231a80 | ||
|
|
17f3da5a37 | ||
|
|
608039b7e4 | ||
|
|
bb424cc3d6 | ||
|
|
9eaf0f9530 | ||
|
|
b44bb552e0 | ||
|
|
c86ff27bef | ||
|
|
be6bb5f039 | ||
|
|
e40b73f420 | ||
|
|
5b3d8a3aaf | ||
|
|
fd4051c04a | ||
|
|
5f0fa24c2a | ||
|
|
0d676e1957 | ||
|
|
8539e365fb | ||
|
|
dd25ea748b | ||
|
|
2b74c1c47b | ||
|
|
9961746f1f | ||
|
|
b1c0334947 | ||
|
|
25a41bd293 | ||
|
|
ac25beddda | ||
|
|
9d38186404 | ||
|
|
51d9ffbb4e | ||
|
|
e23d514d89 | ||
|
|
0697116a21 | ||
|
|
165bb00040 | ||
|
|
07947199c0 | ||
|
|
92aadb4743 | ||
|
|
bd89de6f4d | ||
|
|
dca567b5fa | ||
|
|
a015c2f566 | ||
|
|
946de2e7e3 | ||
|
|
690c486bb2 | ||
|
|
d9d0676bed | ||
|
|
9c8a410e50 | ||
|
|
22296de03c | ||
|
|
9228f9c339 | ||
|
|
609d4477ae | ||
|
|
fc0cfe3133 | ||
|
|
ebe1aa33f3 | ||
|
|
e451adbfdb | ||
|
|
852c64d9f4 | ||
|
|
d96b6dbefb | ||
|
|
fbbd57a4ae | ||
|
|
e5cc990111 | ||
|
|
78482313d8 | ||
|
|
5b601695e1 | ||
|
|
d6a67335ed | ||
|
|
4f7b68c882 | ||
|
|
1359782d30 | ||
|
|
966bd5fd33 | ||
|
|
759264e1d5 | ||
|
|
65c0f3d786 | ||
|
|
f1535b4b54 | ||
|
|
fcb5f51a78 | ||
|
|
50fc98dcb3 | ||
|
|
c1a1fe1047 | ||
|
|
b3a60f70ff | ||
|
|
5ee0033983 | ||
|
|
a28e653386 | ||
|
|
8d568526be | ||
|
|
6643008baf | ||
|
|
44a19e03e8 | ||
|
|
b5207d91ba | ||
|
|
f1d54675c3 | ||
|
|
b057b6006c | ||
|
|
f92a62da4e | ||
|
|
27c695a393 | ||
|
|
7945c1a431 | ||
|
|
7c31cb9993 | ||
|
|
f6d46cae79 | ||
|
|
0e49615a40 | ||
|
|
534792cd1a | ||
|
|
89891d11d7 | ||
|
|
cb86ece0c4 | ||
|
|
e7976fab46 | ||
|
|
11cb40702d | ||
|
|
877b7799df | ||
|
|
7f35d462b6 | ||
|
|
fff5c2387b | ||
|
|
3de6f6faf8 | ||
|
|
e6f80497a9 | ||
|
|
6c122e4ed0 | ||
|
|
1836056bd1 | ||
|
|
cdc95d8fd2 | ||
|
|
d3c3339d5e | ||
|
|
50992d3411 | ||
|
|
2ae3e7f5b9 | ||
|
|
99ea193237 | ||
|
|
3427f7bbea | ||
|
|
44704be22b | ||
|
|
356dec636f | ||
|
|
355f181574 | ||
|
|
b365f9adc1 | ||
|
|
af1282c268 | ||
|
|
9ccceb2199 | ||
|
|
bdea6440a8 | ||
|
|
4e1f147b37 | ||
|
|
30d0d340c2 | ||
|
|
d99748638d | ||
|
|
889089b4cd | ||
|
|
e7de6f62b6 | ||
|
|
f02eac8ac6 | ||
|
|
b30c282a13 | ||
|
|
c2135de941 | ||
|
|
e2890d1363 | ||
|
|
0dc2abbdd8 | ||
|
|
deeed4b65b | ||
|
|
30e4ee855c | ||
|
|
2ab1560aed | ||
|
|
d5b7d440fe | ||
|
|
df88b1ddd2 | ||
|
|
e368488933 | ||
|
|
37f0f7a0b7 | ||
|
|
e2b887b449 | ||
|
|
d653b8f9e6 | ||
|
|
b8f74af41c | ||
|
|
07f78bb7b8 | ||
|
|
1679d820a7 | ||
|
|
b3bbb0b156 | ||
|
|
8afd73394d | ||
|
|
2afab2aec8 | ||
|
|
f41e4c62d4 | ||
|
|
c804064155 | ||
|
|
eeee36844d | ||
|
|
d4d4495f2b | ||
|
|
0eee2d8ba5 | ||
|
|
fbfe00bfec | ||
|
|
6e70bc3b5d | ||
|
|
2dde6ae663 | ||
|
|
9bda43a670 | ||
|
|
ba49535bba | ||
|
|
21b69f06e5 | ||
|
|
1390b01f07 | ||
|
|
b9dc0da249 | ||
|
|
6615bbb532 | ||
|
|
b8ef6b3888 | ||
|
|
1fc9f74e60 | ||
|
|
ded092ed23 | ||
|
|
007b7294d9 | ||
|
|
b3fcfdfc96 | ||
|
|
151461508f | ||
|
|
9affc583a3 | ||
|
|
cac72df7ba | ||
|
|
a6c81d8168 | ||
|
|
7aa3e49e8c | ||
|
|
ab378f5332 | ||
|
|
24b0d7f8e6 | ||
|
|
5561a9621b | ||
|
|
3512ab7515 | ||
|
|
12df4abc80 | ||
|
|
f768aef0b9 | ||
|
|
a6dcef4467 | ||
|
|
699a6a42be | ||
|
|
9380e376c6 | ||
|
|
04dd51d089 | ||
|
|
fe19f81bcd | ||
|
|
e3711f121f | ||
|
|
075fd2fd9b | ||
|
|
662cfde691 | ||
|
|
0a6264228a | ||
|
|
3f67c6d220 | ||
|
|
4fa6fc10f8 | ||
|
|
ada811a868 | ||
|
|
00132c6afa | ||
|
|
da1af693a2 | ||
|
|
a9d9c03cc0 | ||
|
|
0e1522db12 | ||
|
|
df04d6b58d | ||
|
|
b098b6d148 | ||
|
|
24bc6a1a8a | ||
|
|
64abf6d785 | ||
|
|
f156722456 | ||
|
|
d641a3fa48 | ||
|
|
29760defd0 | ||
|
|
5eb822089d | ||
|
|
53414ec1df | ||
|
|
cd3f0ba739 | ||
|
|
74152d2409 | ||
|
|
8f7a4a31c8 | ||
|
|
c5f11c1625 | ||
|
|
f0eb295696 | ||
|
|
c86fe0690b | ||
|
|
2f088d03bb | ||
|
|
1195dd445a | ||
|
|
71c5835909 | ||
|
|
423e042492 | ||
|
|
37c87b4375 | ||
|
|
43666e1f63 | ||
|
|
d5dd64acd2 | ||
|
|
b0c3ea0934 | ||
|
|
ab446590a2 | ||
|
|
3478ff17f8 | ||
|
|
932e7c8912 | ||
|
|
6e7a769d79 | ||
|
|
8602de6ce1 | ||
|
|
faee71943f | ||
|
|
0e2a27ad41 | ||
|
|
c064e8970f | ||
|
|
b8ef7ae39d | ||
|
|
702f3de061 | ||
|
|
f278d7f6d0 | ||
|
|
221c1b523b | ||
|
|
d00b6a2e85 | ||
|
|
e03c285f14 | ||
|
|
0ec1f334c0 | ||
|
|
b637cbeabc | ||
|
|
943f873855 | ||
|
|
49cebb400a | ||
|
|
0ed9225c05 | ||
|
|
991fbdad3a | ||
|
|
34e95cd25b | ||
|
|
8bc06fef34 | ||
|
|
6aba5c3661 | ||
|
|
876bd49ee5 | ||
|
|
9435c5a380 | ||
|
|
94c915e23a | ||
|
|
9ac8641c13 | ||
|
|
0fe06cf2df | ||
|
|
f872f994f1 | ||
|
|
87c2ff73e8 | ||
|
|
27bb4c9bb8 | ||
|
|
c10e0fd7bc | ||
|
|
48d302020c | ||
|
|
b31b3ccd23 | ||
|
|
ac647c5ee8 | ||
|
|
3cec891aa1 | ||
|
|
3633b9724b | ||
|
|
420b5c093f | ||
|
|
9bb55dd746 | ||
|
|
1759ad3587 | ||
|
|
956693b7ca | ||
|
|
7b2117c019 | ||
|
|
d48fe26a35 | ||
|
|
7fd5fca0cf | ||
|
|
37e215a4ea | ||
|
|
ab5400efad | ||
|
|
258ecd476c | ||
|
|
378938812c | ||
|
|
60b494abeb | ||
|
|
34be1dc1d7 | ||
|
|
d89a4620f0 | ||
|
|
dea83b5720 | ||
|
|
d9ebe3e0fb | ||
|
|
135dde247f | ||
|
|
eb7a667202 | ||
|
|
b3cc9967f5 | ||
|
|
7276cea3d5 | ||
|
|
c0c996622e | ||
|
|
5556555bca | ||
|
|
55a84494c9 | ||
|
|
74d778dcb8 | ||
|
|
156d68f1b8 | ||
|
|
cb59a6340d | ||
|
|
5eb013cc2f | ||
|
|
dafb26b500 | ||
|
|
d9416a42dc | ||
|
|
ad88eff9e3 | ||
|
|
4d4f623adf | ||
|
|
ac9c9cd4e3 | ||
|
|
580eeef6b7 | ||
|
|
f25f5a26cf | ||
|
|
972d43c2a2 | ||
|
|
2a7475c435 | ||
|
|
71b41efe6c | ||
|
|
33a7fee1cc | ||
|
|
fa7fb644ea | ||
|
|
13b996171a | ||
|
|
77bb3870bf | ||
|
|
9863303a5e | ||
|
|
0caccc3da8 | ||
|
|
b75427b86d | ||
|
|
054c4ec61a | ||
|
|
8da21f9914 | ||
|
|
99ba512862 | ||
|
|
eab59fcbd8 | ||
|
|
484da2200e | ||
|
|
330bb6d954 | ||
|
|
d4b6c8da04 | ||
|
|
a5ef438cfe | ||
|
|
de196c716b | ||
|
|
cb248a1f19 | ||
|
|
df2f1b2b7c | ||
|
|
36e26d8009 | ||
|
|
a5973de02b | ||
|
|
68f272bc25 | ||
|
|
b66a5c1ee9 | ||
|
|
bfc42638a4 | ||
|
|
a8c9689b43 | ||
|
|
26ff3f56ea | ||
|
|
a49993e399 | ||
|
|
9f42226224 | ||
|
|
8f4c00df0b | ||
|
|
6cebec86c5 | ||
|
|
8f5b017857 | ||
|
|
9915a3eebf | ||
|
|
19c2d3bcf1 | ||
|
|
9259f306ec | ||
|
|
483bc8f1b7 | ||
|
|
4f33101319 | ||
|
|
3cef470134 | ||
|
|
ba493e3e19 | ||
|
|
93c53e5fc8 | ||
|
|
d931feadf5 | ||
|
|
fe32ff15b3 | ||
|
|
a44dea64b8 | ||
|
|
54af76e9cf | ||
|
|
fcfef255c1 | ||
|
|
2914c20522 | ||
|
|
825b7b7cf9 | ||
|
|
b9fb78c24d | ||
|
|
2fbce7d84d | ||
|
|
69a23f34b4 | ||
|
|
9f90306f6c | ||
|
|
1fb6f96571 | ||
|
|
0b8dd63510 | ||
|
|
b79bc0d9a8 | ||
|
|
8149192455 | ||
|
|
66c0cc070a | ||
|
|
e2ab3a0efb | ||
|
|
e0b7d1a8f0 | ||
|
|
012a1a7915 | ||
|
|
2af36a3db4 | ||
|
|
8df3009cb2 | ||
|
|
161ae9879a | ||
|
|
71a60a46be | ||
|
|
93acac1f3b | ||
|
|
b4ebd98ee8 | ||
|
|
78c0c5c213 | ||
|
|
30d5587fbe | ||
|
|
e4223787be | ||
|
|
3850287deb | ||
|
|
7fae95e248 | ||
|
|
b037203b8f | ||
|
|
9b132e71f2 | ||
|
|
1a21659b5e | ||
|
|
1a1dd092d0 | ||
|
|
9adc1f7266 | ||
|
|
6953f763d2 | ||
|
|
4ecf77f431 | ||
|
|
c4f5b160a6 | ||
|
|
d8f6dbc58f | ||
|
|
8d3747a304 | ||
|
|
1740913a14 | ||
|
|
3cf0395a18 | ||
|
|
42dfc9d126 | ||
|
|
d7bd731c73 | ||
|
|
9e86abb004 | ||
|
|
dc8ce0f6a4 | ||
|
|
2ddb0c719a | ||
|
|
05383a2bc3 | ||
|
|
48c0252893 | ||
|
|
82fd6f1860 | ||
|
|
694022506d | ||
|
|
45a86a22e3 | ||
|
|
1100826ed8 | ||
|
|
d1065c8ac4 | ||
|
|
2fdd9edde1 | ||
|
|
a84ab0c049 | ||
|
|
d9dd0a594e | ||
|
|
f0d59a8c9c | ||
|
|
d50fb69ce9 | ||
|
|
8bc13fc91f | ||
|
|
2ce06a8154 | ||
|
|
0a0c0b069f | ||
|
|
47b62aa390 | ||
|
|
9a2f91d3d4 | ||
|
|
2df940ee40 | ||
|
|
67a5d8f1bd | ||
|
|
297a8d4c8b | ||
|
|
976bce5fdd | ||
|
|
8c89438b97 | ||
|
|
7ca7bd6111 | ||
|
|
095befd9b7 | ||
|
|
3159868ba4 | ||
|
|
7befa4a084 | ||
|
|
2ee96c2ea4 | ||
|
|
74b67e5549 | ||
|
|
1e24161d4c | ||
|
|
a839fb0bfc | ||
|
|
46267a135b | ||
|
|
f7ab0400a3 | ||
|
|
74863117c5 | ||
|
|
e872272fbd | ||
|
|
9ae7d591cc | ||
|
|
c3c697f4a8 | ||
|
|
60e8b95593 | ||
|
|
1636710099 | ||
|
|
4296c3d136 | ||
|
|
bcda5eea93 | ||
|
|
a63ede0e3a | ||
|
|
a9414065b5 | ||
|
|
fa79adf931 | ||
|
|
cd5f752d26 | ||
|
|
6a41b182f5 | ||
|
|
0011ce26d3 | ||
|
|
1ef92df83c | ||
|
|
3e8ef33402 | ||
|
|
4d4c3bea92 | ||
|
|
9ecbfb0655 | ||
|
|
944492168e | ||
|
|
1c39befa0f | ||
|
|
57ec6a2b3d | ||
|
|
3653d6b911 | ||
|
|
82c2cc0f40 | ||
|
|
48e9f3f8a9 | ||
|
|
12865437d7 | ||
|
|
090e18e405 | ||
|
|
7db49b1528 | ||
|
|
85aad42529 | ||
|
|
9c254be4b5 | ||
|
|
5bd9a15e4b | ||
|
|
3cedab45ee | ||
|
|
56f3fe2d12 | ||
|
|
a2954554b5 | ||
|
|
528ada7d32 | ||
|
|
b7e6e7b1b0 | ||
|
|
e17da08a74 | ||
|
|
32cedf1078 | ||
|
|
60bb3fd4aa | ||
|
|
f421990ae0 | ||
|
|
f9333d2b82 | ||
|
|
dfa5475ecb | ||
|
|
b6e5425bd3 | ||
|
|
1b7347f1d9 | ||
|
|
2ef23d2cb3 | ||
|
|
bba81f6594 | ||
|
|
3a4f08f2f7 | ||
|
|
f8ad465113 | ||
|
|
6df993ce29 | ||
|
|
6009eae42d | ||
|
|
7bed9963ff | ||
|
|
a0610ac05f | ||
|
|
afd063a2b9 | ||
|
|
6b92dcbb2a | ||
|
|
12af99f546 | ||
|
|
68501d646d | ||
|
|
d215d236f0 | ||
|
|
459cf79ef3 | ||
|
|
1d357eca4e | ||
|
|
ca8a7c3bc9 | ||
|
|
3b936eca3f | ||
|
|
2f06e9bc1c | ||
|
|
e25c0705c6 | ||
|
|
d4e9526c75 | ||
|
|
2e2e81638b | ||
|
|
baf5c9700f | ||
|
|
dff7daefc7 | ||
|
|
7e27d704ca | ||
|
|
9722f22837 | ||
|
|
707a12f8c1 | ||
|
|
b9b8864631 | ||
|
|
31bdc0589e | ||
|
|
cb29caf88c | ||
|
|
dd044eba36 | ||
|
|
b8518884b0 | ||
|
|
ed20a54137 | ||
|
|
7781bf1444 | ||
|
|
14db4179b9 | ||
|
|
37eab3ece2 | ||
|
|
768b483351 | ||
|
|
59d277da3d | ||
|
|
f68fd0fa94 | ||
|
|
dd1fcc21e0 | ||
|
|
f445722140 | ||
|
|
a3def6bf4c | ||
|
|
cbd2ac2032 | ||
|
|
1d3d4e78f5 | ||
|
|
0c841ec686 | ||
|
|
f875942e79 | ||
|
|
e901c6708c | ||
|
|
c5863a5309 | ||
|
|
4cf8b72e3f | ||
|
|
4dd3ba29b6 | ||
|
|
96bd66f9e6 | ||
|
|
70716bf99f | ||
|
|
16449cd078 | ||
|
|
5459e293d1 | ||
|
|
749713d698 | ||
|
|
4a5af48f33 | ||
|
|
5cf58a32dc | ||
|
|
a956888355 | ||
|
|
cb835b033c | ||
|
|
b9289e2685 | ||
|
|
c0bd0d49ae | ||
|
|
36fbbed1b0 | ||
|
|
34f70e4ba7 | ||
|
|
8bc361ee7c | ||
|
|
7426bb4e76 | ||
|
|
92b536b32c | ||
|
|
5627161c5e | ||
|
|
091fab154a | ||
|
|
c6e11f6ef2 | ||
|
|
16b5cd75b1 | ||
|
|
f040b491d4 | ||
|
|
8174da31e8 | ||
|
|
53518f4c47 | ||
|
|
693d829946 | ||
|
|
5ab11eb1bc | ||
|
|
486d197854 | ||
|
|
332f518774 | ||
|
|
ed33114947 | ||
|
|
aeb38cd2e9 | ||
|
|
17cf5f48a1 | ||
|
|
22ca482458 | ||
|
|
2565ab30a4 | ||
|
|
1caabef56a | ||
|
|
6d8fe3c162 | ||
|
|
bdccdf0893 | ||
|
|
f0927bf065 | ||
|
|
ed1fb9a95e | ||
|
|
5f25df7d19 | ||
|
|
e55f78c767 | ||
|
|
6d257d2455 | ||
|
|
693a5214ef | ||
|
|
52c16ab7dd | ||
|
|
0d98c77301 | ||
|
|
e04d672750 | ||
|
|
c06c511dc9 | ||
|
|
0ae1ecd867 | ||
|
|
c8fc67fa2b | ||
|
|
89348f69f1 | ||
|
|
55b035eaaa | ||
|
|
a522f9879f | ||
|
|
4ee32b3263 | ||
|
|
c339b4fef8 | ||
|
|
9ff981f34f |
@@ -3,7 +3,6 @@ npm-debug.log
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
# when unset: 1 (true) - dont unset this, just for development
|
||||
DEBUG=0
|
||||
SQL_DEBUG=0
|
||||
DEBUG_TOOLBAR=0
|
||||
# Gunicorn log level for debugging (default value is "info" when unset)
|
||||
# (see https://docs.gunicorn.org/en/stable/settings.html#loglevel for available settings)
|
||||
# GUNICORN_LOG_LEVEL="debug"
|
||||
|
||||
# HTTP port to bind to
|
||||
# TANDOOR_PORT=8080
|
||||
@@ -9,9 +13,18 @@ SQL_DEBUG=0
|
||||
# hosts the application can run under e.g. recipes.mydomain.com,cooking.mydomain.com,...
|
||||
ALLOWED_HOSTS=*
|
||||
|
||||
# Cross Site Request Forgery protection
|
||||
# (https://docs.djangoproject.com/en/4.2/ref/settings/#std-setting-CSRF_TRUSTED_ORIGINS)
|
||||
# CSRF_TRUSTED_ORIGINS = []
|
||||
|
||||
# Cross Origin Resource Sharing
|
||||
# (https://github.com/adamchainz/django-cors-header)
|
||||
# CORS_ALLOW_ALL_ORIGINS = True
|
||||
|
||||
# random secret key, use for example `base64 /dev/urandom | head -c50` to generate one
|
||||
# ---------------------------- REQUIRED -------------------------
|
||||
# ---------------------------- AT LEAST ONE REQUIRED -------------------------
|
||||
SECRET_KEY=
|
||||
SECRET_KEY_FILE=
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
# your default timezone See https://timezonedb.com/time-zones for a list of timezones
|
||||
@@ -23,8 +36,9 @@ DB_ENGINE=django.db.backends.postgresql
|
||||
POSTGRES_HOST=db_recipes
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=djangouser
|
||||
# ---------------------------- REQUIRED -------------------------
|
||||
# ---------------------------- AT LEAST ONE REQUIRED -------------------------
|
||||
POSTGRES_PASSWORD=
|
||||
POSTGRES_PASSWORD_FILE=
|
||||
# ---------------------------------------------------------------
|
||||
POSTGRES_DB=djangodb
|
||||
|
||||
@@ -68,6 +82,10 @@ SHOPPING_MIN_AUTOSYNC_INTERVAL=5
|
||||
# when unset: 1 (true) - this is temporary until an appropriate amount of time has passed for everyone to migrate
|
||||
GUNICORN_MEDIA=0
|
||||
|
||||
# GUNICORN SERVER RELATED SETTINGS (see https://docs.gunicorn.org/en/stable/design.html#how-many-workers for recommended settings)
|
||||
# GUNICORN_WORKERS=1
|
||||
# GUNICORN_THREADS=1
|
||||
|
||||
# S3 Media settings: store mediafiles in s3 or any compatible storage backend (e.g. minio)
|
||||
# as long as S3_ACCESS_KEY is not set S3 features are disabled
|
||||
# S3_ACCESS_KEY=
|
||||
@@ -77,6 +95,7 @@ GUNICORN_MEDIA=0
|
||||
# S3_QUERYSTRING_AUTH=1 # default true, set to 0 to serve media from a public bucket without signed urls
|
||||
# S3_QUERYSTRING_EXPIRE=3600 # number of seconds querystring are valid for
|
||||
# S3_ENDPOINT_URL= # when using a custom endpoint like minio
|
||||
# S3_CUSTOM_DOMAIN= # when using a CDN/proxy to S3 (see https://github.com/TandoorRecipes/recipes/issues/1943)
|
||||
|
||||
# Email Settings, see https://docs.djangoproject.com/en/3.2/ref/settings/#email-host
|
||||
# Required for email confirmation and password reset (automatically activates if host is set)
|
||||
@@ -86,13 +105,17 @@ GUNICORN_MEDIA=0
|
||||
# EMAIL_HOST_PASSWORD=
|
||||
# EMAIL_USE_TLS=0
|
||||
# EMAIL_USE_SSL=0
|
||||
# DEFAULT_FROM_EMAIL= # email sender address (default 'webmaster@localhost')
|
||||
# ACCOUNT_EMAIL_SUBJECT_PREFIX= # prefix used for account related emails (default "[Tandoor Recipes] ")
|
||||
# email sender address (default 'webmaster@localhost')
|
||||
# DEFAULT_FROM_EMAIL=
|
||||
# prefix used for account related emails (default "[Tandoor Recipes] ")
|
||||
# ACCOUNT_EMAIL_SUBJECT_PREFIX=
|
||||
|
||||
# allow authentication via reverse proxy (e.g. authelia), leave off if you dont know what you are doing
|
||||
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
|
||||
# allow authentication via the REMOTE-USER header (can be used for e.g. authelia).
|
||||
# ATTENTION: Leave off if you don't know what you are doing! Enabling this without proper configuration will enable anybody
|
||||
# to login with any username!
|
||||
# See docs for additional information: https://docs.tandoor.dev/features/authentication/#reverse-proxy-authentication
|
||||
# when unset: 0 (false)
|
||||
REVERSE_PROXY_AUTH=0
|
||||
REMOTE_USER_AUTH=0
|
||||
|
||||
# Default settings for spaces, apply per space and can be changed in the admin view
|
||||
# SPACE_DEFAULT_MAX_RECIPES=0 # 0=unlimited recipes
|
||||
@@ -100,7 +123,8 @@ REVERSE_PROXY_AUTH=0
|
||||
# SPACE_DEFAULT_MAX_FILES=0 # Maximum file storage for space in MB. 0 for unlimited, -1 to disable file upload.
|
||||
# SPACE_DEFAULT_ALLOW_SHARING=1 # Allow users to share recipes with public links
|
||||
|
||||
# allow people to create accounts on your application instance (without an invite link)
|
||||
# allow people to create local accounts on your application instance (without an invite link)
|
||||
# social accounts will always be able to sign up
|
||||
# when unset: 0 (false)
|
||||
# ENABLE_SIGNUP=0
|
||||
|
||||
@@ -119,7 +143,7 @@ REVERSE_PROXY_AUTH=0
|
||||
# ENABLE_METRICS=0
|
||||
|
||||
# allows you to setup OAuth providers
|
||||
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
|
||||
# see docs for more information https://docs.tandoor.dev/features/authentication/
|
||||
# SOCIAL_PROVIDERS = allauth.socialaccount.providers.github, allauth.socialaccount.providers.nextcloud,
|
||||
|
||||
# Should a newly created user from a social provider get assigned to the default space and given permission by default ?
|
||||
@@ -150,6 +174,7 @@ REVERSE_PROXY_AUTH=0
|
||||
#AUTH_LDAP_BIND_PASSWORD=
|
||||
#AUTH_LDAP_USER_SEARCH_BASE_DN=
|
||||
#AUTH_LDAP_TLS_CACERTFILE=
|
||||
#AUTH_LDAP_START_TLS=
|
||||
|
||||
# Enables exporting PDF (see export docs)
|
||||
# Disabled by default, uncomment to enable
|
||||
|
||||
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -14,3 +14,8 @@ updates:
|
||||
directory: "/vue/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
||||
110
.github/workflows/build-docker-open-data.yml
vendored
Normal file
110
.github/workflows/build-docker-open-data.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Build Docker Container with open data plugin installed
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build-container:
|
||||
name: Build ${{ matrix.name }} Container
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
continue-on-error: ${{ matrix.continue-on-error }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Standard build config
|
||||
- name: Standard
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: |
|
||||
if [[ "$GITHUB_REF" = refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
|
||||
elif [[ "$GITHUB_REF" = refs/heads/beta ]]; then
|
||||
echo VERSION=beta >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo VERSION=develop >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# clone open data plugin
|
||||
- name: clone open data plugin repo
|
||||
uses: actions/checkout@master
|
||||
with:
|
||||
repository: TandoorRecipes/open_data_plugin
|
||||
ref: master
|
||||
path: ./recipes/plugins/open_data_plugin
|
||||
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: yarn
|
||||
cache-dependency-path: vue/yarn.lock
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install --frozen-lockfile
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
|
||||
- name: Setup Open Data Plugin Links
|
||||
working-directory: ./recipes/plugins/open_data_plugin
|
||||
run: python setup_repo.py
|
||||
|
||||
- name: Build Open Data Frontend
|
||||
working-directory: ./recipes/plugins/open_data_plugin/vue
|
||||
run: yarn build
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
vabene1111/recipes
|
||||
ghcr.io/TandoorRecipes/recipes
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
tags: |
|
||||
type=raw,value=latest,suffix=-open-data-plugin,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{version}}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{major}}.{{minor}}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{major}}
|
||||
type=ref,suffix=-open-data-plugin,event=branch
|
||||
- name: Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
pull: true
|
||||
push: ${{ github.secret_source == 'Actions' }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
126
.github/workflows/build-docker.yml
vendored
Normal file
126
.github/workflows/build-docker.yml
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
name: Build Docker Container
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build-container:
|
||||
name: Build ${{ matrix.name }} Container
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
continue-on-error: ${{ matrix.continue-on-error }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Standard build config
|
||||
- name: Standard
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: |
|
||||
if [[ "$GITHUB_REF" = refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
|
||||
elif [[ "$GITHUB_REF" = refs/heads/beta ]]; then
|
||||
echo VERSION=beta >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo VERSION=develop >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: yarn
|
||||
cache-dependency-path: vue/yarn.lock
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install --frozen-lockfile
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
vabene1111/recipes
|
||||
ghcr.io/TandoorRecipes/recipes
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
- name: Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
pull: true
|
||||
push: ${{ github.secret_source == 'Actions' }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
notify-stable:
|
||||
name: Notify Stable
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- name: Set tag name
|
||||
run: |
|
||||
# Strip "refs/tags/" prefix
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
# Send stable discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
|
||||
|
||||
notify-beta:
|
||||
name: Notify Beta
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: github.ref == 'refs/heads/beta'
|
||||
steps:
|
||||
# Send beta discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
||||
13
.github/workflows/ci.yml
vendored
13
.github/workflows/ci.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Continuous Integration
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -12,15 +12,15 @@ jobs:
|
||||
python-version: ['3.10']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '14'
|
||||
node-version: '18'
|
||||
- name: Install Vue dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
@@ -29,7 +29,8 @@ jobs:
|
||||
run: yarn build
|
||||
- name: Install Django dependencies
|
||||
run: |
|
||||
sudo apt-get install -y libsasl2-dev python-dev libldap2-dev libssl-dev
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y libsasl2-dev python3-dev libldap2-dev libssl-dev
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
python3 manage.py collectstatic --noinput
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
with:
|
||||
languages: python, javascript
|
||||
@@ -47,6 +47,6 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
languages: javascript, python
|
||||
|
||||
46
.github/workflows/docker-publish-beta.yml
vendored
46
.github/workflows/docker-publish-beta.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: publish beta image docker
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'beta'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = 'beta'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: beta
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
||||
42
.github/workflows/docker-publish-dev.yml
vendored
42
.github/workflows/docker-publish-dev.yml
vendored
@@ -1,42 +0,0 @@
|
||||
name: publish dev image docker
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '*/*'
|
||||
- '!master'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = 'develop'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Clear Cache
|
||||
working-directory: ./vue
|
||||
run: yarn cache clean --all
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Publish to Registry
|
||||
uses: elgohr/Publish-Docker-Github-Action@2.13
|
||||
with:
|
||||
name: vabene1111/recipes
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
43
.github/workflows/docker-publish-latest.yml
vendored
43
.github/workflows/docker-publish-latest.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: publish latest image docker
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: latest
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
52
.github/workflows/docker-publish-release.yml
vendored
52
.github/workflows/docker-publish-release.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: publish tagged release docker
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build image job
|
||||
steps:
|
||||
- name: Checkout master
|
||||
uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 Version {{ EVENT_PAYLOAD.release.tag_name }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ EVENT_PAYLOAD.release.tag_name }}'
|
||||
5
.github/workflows/docs.yml
vendored
5
.github/workflows/docs.yml
vendored
@@ -3,15 +3,14 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
- run: pip install mkdocs-material mkdocs-include-markdown-plugin
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -74,13 +74,16 @@ mediafiles/
|
||||
\.env
|
||||
staticfiles/
|
||||
postgresql/
|
||||
data/
|
||||
|
||||
|
||||
/docker-compose.override.yml
|
||||
vue/node_modules
|
||||
plugins
|
||||
.vscode/
|
||||
vetur.config.js
|
||||
cookbook/static/vue
|
||||
vue/webpack-stats.json
|
||||
cookbook/templates/sw.js
|
||||
.prettierignore
|
||||
vue/.yarn
|
||||
|
||||
8
.idea/dictionaries/vaben.xml
generated
Normal file
8
.idea/dictionaries/vaben.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<component name="ProjectDictionaryState">
|
||||
<dictionary name="vaben">
|
||||
<words>
|
||||
<w>pinia</w>
|
||||
<w>selfhosted</w>
|
||||
</words>
|
||||
</dictionary>
|
||||
</component>
|
||||
1
.idea/dictionaries/vabene1111_PC.xml
generated
1
.idea/dictionaries/vabene1111_PC.xml
generated
@@ -6,6 +6,7 @@
|
||||
<w>csrftoken</w>
|
||||
<w>gunicorn</w>
|
||||
<w>ical</w>
|
||||
<w>invitelink</w>
|
||||
<w>mealie</w>
|
||||
<w>pepperplate</w>
|
||||
<w>safron</w>
|
||||
|
||||
2
.idea/recipes.iml
generated
2
.idea/recipes.iml
generated
@@ -18,7 +18,7 @@
|
||||
<excludeFolder url="file://$MODULE_DIR$/staticfiles" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.9 (recipes)" jdkType="Python SDK" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="TemplatesService">
|
||||
|
||||
2
.idea/vcs.xml
generated
2
.idea/vcs.xml
generated
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
@@ -1,7 +1,10 @@
|
||||
# Contributers
|
||||
|
||||
Many thanks to everyone who contributed to this project! If you add something or help out feel free to add yourself
|
||||
to this list.
|
||||
|
||||
## Code/Features
|
||||
|
||||
Please have a look at the [list of pull requests](https://github.com/vabene1111/recipes/pulls) for
|
||||
a complete list of contributions.
|
||||
Below are some of the larger contributions made yet.
|
||||
@@ -20,46 +23,61 @@ Below are some of the larger contributions made yet.
|
||||
|
||||
## Translations
|
||||
|
||||
### Catalan
|
||||
### Catalan
|
||||
|
||||
[Rubenix](https://www.transifex.com/user/profile/rubenix/)
|
||||
|
||||
### Dutch
|
||||
[D0T1X](https://www.transifex.com/user/profile/D0T1X/)
|
||||
[ikbenfrank](https://www.transifex.com/user/profile/ikbenfrank/)
|
||||
[kampsj](https://www.transifex.com/user/profile/kampsj/)
|
||||
|
||||
[D0T1X](https://www.transifex.com/user/profile/D0T1X/)
|
||||
[ikbenfrank](https://www.transifex.com/user/profile/ikbenfrank/)
|
||||
[kampsj](https://www.transifex.com/user/profile/kampsj/)
|
||||
|
||||
### French
|
||||
[jt117](https://www.transifex.com/user/profile/jt117/)
|
||||
[nerdinator](https://www.transifex.com/user/profile/nerdinator/)
|
||||
[agaume](https://www.transifex.com/user/profile/agaume/)
|
||||
|
||||
[jt117](https://www.transifex.com/user/profile/jt117/)
|
||||
[nerdinator](https://www.transifex.com/user/profile/nerdinator/)
|
||||
[agaume](https://www.transifex.com/user/profile/agaume/)
|
||||
|
||||
### German
|
||||
|
||||
[eTaurus](https://www.transifex.com/user/profile/eTaurus/)
|
||||
[l0c4lh057](https://www.transifex.com/user/profile/l0c4lh057/)
|
||||
[hyperbit00]
|
||||
[hyperbit00](https://github.com/hyperbit00)
|
||||
|
||||
### Hungarian
|
||||
|
||||
[igazka](https://www.transifex.com/user/profile/igazka/)
|
||||
|
||||
### Italian
|
||||
[SK3LA](https://www.transifex.com/user/profile/SK3LA/)
|
||||
[auanasgheps](https://www.transifex.com/user/profile/auanasgheps/)
|
||||
|
||||
[SK3LA](https://www.transifex.com/user/profile/SK3LA/)
|
||||
[auanasgheps](https://www.transifex.com/user/profile/auanasgheps/)
|
||||
|
||||
### Latvian
|
||||
|
||||
[melkypie](https://github.com/melkypie)
|
||||
|
||||
### Portuguese
|
||||
|
||||
[hds](https://www.transifex.com/user/profile/hds/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[stormsz](https://www.transifex.com/user/profile/stormsz/)
|
||||
[hds](https://www.transifex.com/user/profile/hds/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[stormsz](https://www.transifex.com/user/profile/stormsz/)
|
||||
|
||||
### Russian
|
||||
|
||||
[amillerr](https://github.com/amillerr)
|
||||
|
||||
### Spanish
|
||||
|
||||
[albertocp](https://www.transifex.com/user/profile/albertocp/)
|
||||
[alfa5](https://www.transifex.com/user/profile/alfa5/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[sergio.laya](https://www.transifex.com/user/profile/sergio.laya/)
|
||||
[albertocp](https://www.transifex.com/user/profile/albertocp/)
|
||||
[alfa5](https://www.transifex.com/user/profile/alfa5/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[sergio.laya](https://www.transifex.com/user/profile/sergio.laya/)
|
||||
|
||||
### Swedish
|
||||
|
||||
[makanz](https://github.com/makanz)
|
||||
|
||||
### Turkish
|
||||
|
||||
|
||||
16
Dockerfile
16
Dockerfile
@@ -1,7 +1,7 @@
|
||||
FROM python:3.10-alpine3.15
|
||||
FROM python:3.10-alpine3.18
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@@ -15,7 +15,11 @@ WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev libressl-dev libffi-dev cargo openssl-dev openldap-dev python3-dev && \
|
||||
RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
@@ -26,5 +30,11 @@ RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-de
|
||||
|
||||
#Copy project and execute it.
|
||||
COPY . ./
|
||||
|
||||
# collect information from git repositories
|
||||
RUN /opt/recipes/venv/bin/python version.py
|
||||
# delete git repositories to reduce image size
|
||||
RUN find . -type d -name ".git" | xargs rm -rf
|
||||
|
||||
RUN chmod +x boot.sh
|
||||
ENTRYPOINT ["/opt/recipes/boot.sh"]
|
||||
|
||||
12
README.md
12
README.md
@@ -61,9 +61,17 @@ a public page.
|
||||
|
||||
Documentation can be found [here](https://docs.tandoor.dev/).
|
||||
|
||||
## Contributing
|
||||
## Support our work
|
||||
Tandoor is developed by volunteers in their free time just because its fun. That said earning
|
||||
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
|
||||
Because of that there are several ways you can support us
|
||||
|
||||
You can help out with the ongoing development by looking for potential bugs in our code base, or by contributing new features. We are always welcoming new pull requests containing bug fixes, refactors and new features. We have a list of tasks and bugs on our issue tracker on Github. Please comment on issues if you want to contribute with, to avoid duplicating effort.
|
||||
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
|
||||
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
|
||||
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
|
||||
|
||||
## Contributing
|
||||
Contributions are welcome but please read [this](https://docs.tandoor.dev/contribute/#contributing-code) **BEFORE** contributing anything!
|
||||
|
||||
## Your Feedback
|
||||
|
||||
|
||||
@@ -6,5 +6,4 @@ Since this software is still considered beta/WIP support is always only given fo
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please open a normal public issue if you have any security related concerns. If you feel like the issue should not be discussed in
|
||||
public just open a generic issue and we will discuss further communication there (since GitHub does not allow everyone to create a security advisory :/).
|
||||
Please use GitHub Security Advisories to report any kind of security vulnerabilities.
|
||||
|
||||
29
boot.sh
29
boot.sh
@@ -2,6 +2,9 @@
|
||||
source venv/bin/activate
|
||||
|
||||
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
|
||||
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
|
||||
GUNICORN_LOG_LEVEL="${GUNICORN_LOG_LEVEL:-'info'}"
|
||||
NGINX_CONF_FILE=/opt/recipes/nginx/conf.d/Recipes.conf
|
||||
|
||||
display_warning() {
|
||||
@@ -16,9 +19,14 @@ if [ ! -f "$NGINX_CONF_FILE" ] && [ $GUNICORN_MEDIA -eq 0 ]; then
|
||||
display_warning "Nginx configuration file could not be found at the default location!\nPath: ${NGINX_CONF_FILE}"
|
||||
fi
|
||||
|
||||
# SECRET_KEY must be set in .env file
|
||||
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
|
||||
|
||||
if [ -f "${SECRET_KEY_FILE}" ]; then
|
||||
export SECRET_KEY=$(cat "$SECRET_KEY_FILE")
|
||||
fi
|
||||
|
||||
if [ -z "${SECRET_KEY}" ]; then
|
||||
display_warning "The environment variable 'SECRET_KEY' is not set but REQUIRED for running Tandoor!"
|
||||
display_warning "The environment variable 'SECRET_KEY' (or 'SECRET_KEY_FILE' that points to an existing file) is not set but REQUIRED for running Tandoor!"
|
||||
fi
|
||||
|
||||
|
||||
@@ -27,14 +35,19 @@ echo "Waiting for database to be ready..."
|
||||
attempt=0
|
||||
max_attempts=20
|
||||
|
||||
if [ "${DB_ENGINE}" != 'django.db.backends.sqlite3' ]; then
|
||||
if [ "${DB_ENGINE}" == 'django.db.backends.postgresql' ] || [ "${DATABASE_URL}" == 'postgres'* ]; then
|
||||
|
||||
# POSTGRES_PASSWORD must be set in .env file
|
||||
if [ -z "${POSTGRES_PASSWORD}" ]; then
|
||||
display_warning "The environment variable 'POSTGRES_PASSWORD' is not set but REQUIRED for running Tandoor!"
|
||||
# POSTGRES_PASSWORD (or a valid file at POSTGRES_PASSWORD_FILE) must be set in .env file
|
||||
|
||||
if [ -f "${POSTGRES_PASSWORD_FILE}" ]; then
|
||||
export POSTGRES_PASSWORD=$(cat "$POSTGRES_PASSWORD_FILE")
|
||||
fi
|
||||
|
||||
while pg_isready --host=${POSTGRES_HOST} --port=${POSTGRES_PORT} -q; status=$?; attempt=$((attempt+1)); [ $status -ne 0 ] && [ $attempt -le $max_attempts ]; do
|
||||
if [ -z "${POSTGRES_PASSWORD}" ]; then
|
||||
display_warning "The environment variable 'POSTGRES_PASSWORD' (or 'POSTGRES_PASSWORD_FILE' that points to an existing file) is not set but REQUIRED for running Tandoor!"
|
||||
fi
|
||||
|
||||
while pg_isready --host=${POSTGRES_HOST} --port=${POSTGRES_PORT} --user=${POSTGRES_USER} -q; status=$?; attempt=$((attempt+1)); [ $status -ne 0 ] && [ $attempt -le $max_attempts ]; do
|
||||
sleep 5
|
||||
done
|
||||
fi
|
||||
@@ -63,4 +76,4 @@ echo "Done"
|
||||
|
||||
chmod -R 755 /opt/recipes/mediafiles
|
||||
|
||||
exec gunicorn -b :$TANDOOR_PORT --access-logfile - --error-logfile - --log-level INFO recipes.wsgi
|
||||
exec gunicorn -b :$TANDOOR_PORT --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
|
||||
|
||||
@@ -10,12 +10,13 @@ from treebeard.forms import movenodeform_factory
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
|
||||
from .models import (BookmarkletImport, Comment, CookLog, Food, FoodInheritField, ImportLog,
|
||||
Ingredient, InviteLink, Keyword, MealPlan, MealType, NutritionInformation,
|
||||
from .models import (BookmarkletImport, Comment, CookLog, Food, ImportLog, Ingredient, InviteLink,
|
||||
Keyword, MealPlan, MealType, NutritionInformation, Property, PropertyType,
|
||||
Recipe, RecipeBook, RecipeBookEntry, RecipeImport, SearchPreference, ShareLink,
|
||||
ShoppingList, ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
|
||||
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation)
|
||||
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
|
||||
ViewLog)
|
||||
|
||||
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
@@ -32,46 +33,14 @@ admin.site.unregister(Group)
|
||||
@admin.action(description='Delete all data from a space')
|
||||
def delete_space_action(modeladmin, request, queryset):
|
||||
for space in queryset:
|
||||
CookLog.objects.filter(space=space).delete()
|
||||
ViewLog.objects.filter(space=space).delete()
|
||||
ImportLog.objects.filter(space=space).delete()
|
||||
BookmarkletImport.objects.filter(space=space).delete()
|
||||
|
||||
Comment.objects.filter(recipe__space=space).delete()
|
||||
Keyword.objects.filter(space=space).delete()
|
||||
Ingredient.objects.filter(space=space).delete()
|
||||
Food.objects.filter(space=space).delete()
|
||||
Unit.objects.filter(space=space).delete()
|
||||
Step.objects.filter(space=space).delete()
|
||||
NutritionInformation.objects.filter(space=space).delete()
|
||||
RecipeBookEntry.objects.filter(book__space=space).delete()
|
||||
RecipeBook.objects.filter(space=space).delete()
|
||||
MealType.objects.filter(space=space).delete()
|
||||
MealPlan.objects.filter(space=space).delete()
|
||||
ShareLink.objects.filter(space=space).delete()
|
||||
Recipe.objects.filter(space=space).delete()
|
||||
|
||||
RecipeImport.objects.filter(space=space).delete()
|
||||
SyncLog.objects.filter(sync__space=space).delete()
|
||||
Sync.objects.filter(space=space).delete()
|
||||
Storage.objects.filter(space=space).delete()
|
||||
|
||||
ShoppingListEntry.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingListRecipe.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingList.objects.filter(space=space).delete()
|
||||
|
||||
SupermarketCategoryRelation.objects.filter(supermarket__space=space).delete()
|
||||
SupermarketCategory.objects.filter(space=space).delete()
|
||||
Supermarket.objects.filter(space=space).delete()
|
||||
|
||||
InviteLink.objects.filter(space=space).delete()
|
||||
UserFile.objects.filter(space=space).delete()
|
||||
Automation.objects.filter(space=space).delete()
|
||||
space.safe_delete()
|
||||
|
||||
|
||||
class SpaceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'created_by', 'max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
|
||||
search_fields = ('name', 'created_by__username')
|
||||
autocomplete_fields = ('created_by',)
|
||||
filter_horizontal = ('food_inherit',)
|
||||
list_filter = ('max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
|
||||
date_hierarchy = 'created_at'
|
||||
actions = [delete_space_action]
|
||||
@@ -80,15 +49,26 @@ class SpaceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(Space, SpaceAdmin)
|
||||
|
||||
|
||||
class UserSpaceAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'space',)
|
||||
search_fields = ('user__username', 'space__name',)
|
||||
filter_horizontal = ('groups',)
|
||||
autocomplete_fields = ('user', 'space',)
|
||||
|
||||
|
||||
admin.site.register(UserSpace, UserSpaceAdmin)
|
||||
|
||||
|
||||
class UserPreferenceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'space', 'theme', 'nav_color', 'default_page', 'search_style',) # TODO add new fields
|
||||
search_fields = ('user__username', 'space__name')
|
||||
list_filter = ('theme', 'nav_color', 'default_page', 'search_style')
|
||||
list_display = ('name', 'theme', 'nav_color', 'default_page',)
|
||||
search_fields = ('user__username',)
|
||||
list_filter = ('theme', 'nav_color', 'default_page',)
|
||||
date_hierarchy = 'created_at'
|
||||
filter_horizontal = ('plan_share', 'shopping_share',)
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(UserPreference, UserPreferenceAdmin)
|
||||
@@ -101,7 +81,7 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
@@ -176,9 +156,16 @@ class KeywordAdmin(TreeAdmin):
|
||||
admin.site.register(Keyword, KeywordAdmin)
|
||||
|
||||
|
||||
@admin.action(description='Delete Steps not part of a Recipe.')
|
||||
def delete_unattached_steps(modeladmin, request, queryset):
|
||||
with scopes_disabled():
|
||||
Step.objects.filter(recipe=None).delete()
|
||||
|
||||
|
||||
class StepAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'order',)
|
||||
search_fields = ('name',)
|
||||
actions = [delete_unattached_steps]
|
||||
|
||||
|
||||
admin.site.register(Step, StepAdmin)
|
||||
@@ -203,9 +190,9 @@ class RecipeAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def created_by(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql':
|
||||
actions = [rebuild_index]
|
||||
|
||||
|
||||
@@ -227,9 +214,24 @@ class FoodAdmin(TreeAdmin):
|
||||
admin.site.register(Food, FoodAdmin)
|
||||
|
||||
|
||||
class UnitConversionAdmin(admin.ModelAdmin):
|
||||
list_display = ('base_amount', 'base_unit', 'food', 'converted_amount', 'converted_unit')
|
||||
search_fields = ('food__name', 'unit__name')
|
||||
|
||||
|
||||
admin.site.register(UnitConversion, UnitConversionAdmin)
|
||||
|
||||
|
||||
@admin.action(description='Delete Ingredients not part of a Recipe.')
|
||||
def delete_unattached_ingredients(modeladmin, request, queryset):
|
||||
with scopes_disabled():
|
||||
Ingredient.objects.filter(step__recipe=None).delete()
|
||||
|
||||
|
||||
class IngredientAdmin(admin.ModelAdmin):
|
||||
list_display = ('food', 'amount', 'unit')
|
||||
search_fields = ('food__name', 'unit__name')
|
||||
actions = [delete_unattached_ingredients]
|
||||
|
||||
|
||||
admin.site.register(Ingredient, IngredientAdmin)
|
||||
@@ -237,12 +239,12 @@ admin.site.register(Ingredient, IngredientAdmin)
|
||||
|
||||
class CommentAdmin(admin.ModelAdmin):
|
||||
list_display = ('recipe', 'name', 'created_at')
|
||||
search_fields = ('text', 'user__username')
|
||||
search_fields = ('text', 'created_by__username')
|
||||
date_hierarchy = 'created_at'
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(Comment, CommentAdmin)
|
||||
@@ -261,7 +263,7 @@ class RecipeBookAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def user_name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(RecipeBook, RecipeBookAdmin)
|
||||
@@ -275,11 +277,11 @@ admin.site.register(RecipeBookEntry, RecipeBookEntryAdmin)
|
||||
|
||||
|
||||
class MealPlanAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'recipe', 'meal_type', 'date')
|
||||
list_display = ('user', 'recipe', 'meal_type', 'from_date', 'to_date')
|
||||
|
||||
@staticmethod
|
||||
def user(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(MealPlan, MealPlanAdmin)
|
||||
@@ -312,6 +314,7 @@ admin.site.register(InviteLink, InviteLinkAdmin)
|
||||
|
||||
class CookLogAdmin(admin.ModelAdmin):
|
||||
list_display = ('recipe', 'created_by', 'created_at', 'rating', 'servings')
|
||||
search_fields = ('recipe__name', 'space__name',)
|
||||
|
||||
|
||||
admin.site.register(CookLog, CookLogAdmin)
|
||||
@@ -345,6 +348,20 @@ class ShareLinkAdmin(admin.ModelAdmin):
|
||||
admin.site.register(ShareLink, ShareLinkAdmin)
|
||||
|
||||
|
||||
class PropertyTypeAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'name')
|
||||
|
||||
|
||||
admin.site.register(PropertyType, PropertyTypeAdmin)
|
||||
|
||||
|
||||
class PropertyAdmin(admin.ModelAdmin):
|
||||
list_display = ('property_amount', 'property_type')
|
||||
|
||||
|
||||
admin.site.register(Property, PropertyAdmin)
|
||||
|
||||
|
||||
class NutritionInformationAdmin(admin.ModelAdmin):
|
||||
list_display = ('id',)
|
||||
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import django_filters
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.forms import MultiSelectWidget
|
||||
from cookbook.models import Food, Keyword, Recipe, ShoppingList
|
||||
|
||||
with scopes_disabled():
|
||||
class RecipeFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(method='filter_name')
|
||||
keywords = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Keyword.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_keywords'
|
||||
)
|
||||
foods = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Food.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_foods',
|
||||
label=_('Ingredients')
|
||||
)
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(data, *args, **kwargs)
|
||||
self.filters['foods'].queryset = Food.objects.filter(space=space).all()
|
||||
self.filters['keywords'].queryset = Keyword.objects.filter(space=space).all()
|
||||
|
||||
@staticmethod
|
||||
def filter_keywords(queryset, name, value):
|
||||
if not name == 'keywords':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(keywords=x)
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_foods(queryset, name, value):
|
||||
if not name == 'foods':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(steps__ingredients__food__name=x).distinct()
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_name(queryset, name, value):
|
||||
if not name == 'name':
|
||||
return queryset
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2',
|
||||
'django.db.backends.postgresql']:
|
||||
queryset = queryset.annotate(similarity=TrigramSimilarity('name', value), ).filter(
|
||||
Q(similarity__gt=0.1) | Q(name__unaccent__icontains=value)).order_by('-similarity')
|
||||
else:
|
||||
queryset = queryset.filter(name__icontains=value)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ['name', 'keywords', 'foods', 'internal']
|
||||
|
||||
# class FoodFilter(django_filters.FilterSet):
|
||||
# name = django_filters.CharFilter(lookup_expr='icontains')
|
||||
|
||||
# class Meta:
|
||||
# model = Food
|
||||
# fields = ['name']
|
||||
|
||||
class ShoppingListFilter(django_filters.FilterSet):
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
if data is not None:
|
||||
data = data.copy()
|
||||
data.setdefault("finished", False)
|
||||
super().__init__(data, *args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
model = ShoppingList
|
||||
fields = ['finished']
|
||||
@@ -9,8 +9,8 @@ from django_scopes import scopes_disabled
|
||||
from django_scopes.forms import SafeModelChoiceField, SafeModelMultipleChoiceField
|
||||
from hcaptcha.fields import hCaptchaField
|
||||
|
||||
from .models import (Comment, Food, InviteLink, Keyword, MealPlan, MealType, Recipe, RecipeBook,
|
||||
RecipeBookEntry, SearchPreference, Space, Storage, Sync, User, UserPreference)
|
||||
from .models import (Comment, Food, InviteLink, Keyword, Recipe, RecipeBook, RecipeBookEntry,
|
||||
SearchPreference, Space, Storage, Sync, User, UserPreference)
|
||||
|
||||
|
||||
class SelectWidget(widgets.Select):
|
||||
@@ -37,19 +37,15 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
prefix = 'preference'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if x := kwargs.get('instance', None):
|
||||
space = x.space
|
||||
else:
|
||||
space = kwargs.pop('space')
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userpreference__space=space).all()
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userspace__space=space).all()
|
||||
|
||||
class Meta:
|
||||
model = UserPreference
|
||||
fields = (
|
||||
'default_unit', 'use_fractions', 'use_kj', 'theme', 'nav_color',
|
||||
'sticky_navbar', 'default_page', 'show_recent', 'search_style',
|
||||
'plan_share', 'ingredient_decimals', 'comments',
|
||||
'sticky_navbar', 'default_page', 'plan_share', 'ingredient_decimals', 'comments', 'left_handed', 'show_step_ingredients',
|
||||
)
|
||||
|
||||
labels = {
|
||||
@@ -60,35 +56,33 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
'nav_color': _('Navbar color'),
|
||||
'sticky_navbar': _('Sticky navbar'),
|
||||
'default_page': _('Default page'),
|
||||
'show_recent': _('Show recent recipes'),
|
||||
'search_style': _('Search style'),
|
||||
'plan_share': _('Plan sharing'),
|
||||
'ingredient_decimals': _('Ingredient decimal places'),
|
||||
'shopping_auto_sync': _('Shopping list auto sync period'),
|
||||
'comments': _('Comments')
|
||||
'comments': _('Comments'),
|
||||
'left_handed': _('Left-handed mode'),
|
||||
'show_step_ingredients': _('Show step ingredients table')
|
||||
}
|
||||
|
||||
help_texts = {
|
||||
'nav_color': _('Color of the top navigation bar. Not all colors work with all themes, just try them out!'),
|
||||
# noqa: E501
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'), # noqa: E501
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'),
|
||||
'use_fractions': _(
|
||||
'Enables support for fractions in ingredient amounts (e.g. convert decimals to fractions automatically)'),
|
||||
# noqa: E501
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'), # noqa: E501
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'),
|
||||
'plan_share': _('Users with whom newly created meal plans should be shared by default.'),
|
||||
'shopping_share': _('Users with whom to share shopping lists.'),
|
||||
# noqa: E501
|
||||
'show_recent': _('Show recently viewed recipes on search page.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'), # noqa: E501
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'),
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'), # noqa: E501
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoexclude_onhand': _('Exclude ingredients that are on hand.'),
|
||||
'left_handed': _('Will optimize the UI for use with your left hand.'),
|
||||
'show_step_ingredients': _('Add ingredients table next to recipe steps. Applies at creation time for manually created and URL imported recipes. Individual steps can be overridden in the edit recipe view.')
|
||||
}
|
||||
|
||||
widgets = {
|
||||
@@ -153,11 +147,14 @@ class ImportExportBase(forms.Form):
|
||||
RECIPESAGE = 'RECIPESAGE'
|
||||
DOMESTICA = 'DOMESTICA'
|
||||
MEALMASTER = 'MEALMASTER'
|
||||
MELARECIPES = 'MELARECIPES'
|
||||
REZKONV = 'REZKONV'
|
||||
OPENEATS = 'OPENEATS'
|
||||
PLANTOEAT = 'PLANTOEAT'
|
||||
COOKBOOKAPP = 'COOKBOOKAPP'
|
||||
COPYMETHAT = 'COPYMETHAT'
|
||||
COOKMATE = 'COOKMATE'
|
||||
REZEPTSUITEDE = 'REZEPTSUITEDE'
|
||||
PDF = 'PDF'
|
||||
|
||||
type = forms.ChoiceField(choices=(
|
||||
@@ -165,12 +162,31 @@ class ImportExportBase(forms.Form):
|
||||
(MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'), (SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'),
|
||||
(PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'), (DOMESTICA, 'Domestica'),
|
||||
(MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
|
||||
(COOKMATE, 'Cookmate'), (REZEPTSUITEDE, 'Recipesuite.de')
|
||||
))
|
||||
|
||||
|
||||
class MultipleFileInput(forms.ClearableFileInput):
|
||||
allow_multiple_selected = True
|
||||
|
||||
|
||||
class MultipleFileField(forms.FileField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault("widget", MultipleFileInput())
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def clean(self, data, initial=None):
|
||||
single_file_clean = super().clean
|
||||
if isinstance(data, (list, tuple)):
|
||||
result = [single_file_clean(d, initial) for d in data]
|
||||
else:
|
||||
result = single_file_clean(data, initial)
|
||||
return result
|
||||
|
||||
|
||||
class ImportForm(ImportExportBase):
|
||||
files = forms.FileField(required=True, widget=forms.ClearableFileInput(attrs={'multiple': True}))
|
||||
files = MultipleFileField(required=True)
|
||||
duplicates = forms.BooleanField(help_text=_(
|
||||
'To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
|
||||
required=False)
|
||||
@@ -307,50 +323,6 @@ class ImportRecipeForm(forms.ModelForm):
|
||||
}
|
||||
|
||||
|
||||
# TODO deprecate
|
||||
class MealPlanForm(forms.ModelForm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['recipe'].queryset = Recipe.objects.filter(space=space).all()
|
||||
self.fields['meal_type'].queryset = MealType.objects.filter(space=space).all()
|
||||
self.fields['shared'].queryset = User.objects.filter(userpreference__space=space).all()
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super(MealPlanForm, self).clean()
|
||||
|
||||
if cleaned_data['title'] == '' and cleaned_data['recipe'] is None:
|
||||
raise forms.ValidationError(
|
||||
_('You must provide at least a recipe or a title.')
|
||||
)
|
||||
|
||||
return cleaned_data
|
||||
|
||||
class Meta:
|
||||
model = MealPlan
|
||||
fields = (
|
||||
'recipe', 'title', 'meal_type', 'note',
|
||||
'servings', 'date', 'shared'
|
||||
)
|
||||
|
||||
help_texts = {
|
||||
'shared': _('You can list default users to share recipes with in the settings.'), # noqa: E501
|
||||
'note': _('You can use markdown to format this field. See the <a href="/docs/markdown/">docs here</a>')
|
||||
# noqa: E501
|
||||
}
|
||||
|
||||
widgets = {
|
||||
'recipe': SelectWidget,
|
||||
'date': DateWidget,
|
||||
'shared': MultiSelectWidget
|
||||
}
|
||||
field_classes = {
|
||||
'recipe': SafeModelChoiceField,
|
||||
'meal_type': SafeModelChoiceField,
|
||||
'shared': SafeModelMultipleChoiceField,
|
||||
}
|
||||
|
||||
|
||||
class InviteLinkForm(forms.ModelForm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
user = kwargs.pop('user')
|
||||
@@ -491,8 +463,8 @@ class ShoppingPreferenceForm(forms.ModelForm):
|
||||
help_texts = {
|
||||
'shopping_share': _('Users will see all items you add to your shopping list. They must add you to see items on their list.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoinclude_related': _('When adding a meal plan to the shopping list (manually or automatically), include all related recipes.'),
|
||||
@@ -536,11 +508,12 @@ class SpacePreferenceForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = Space
|
||||
|
||||
fields = ('food_inherit', 'reset_food_inherit', 'show_facet_count')
|
||||
fields = ('food_inherit', 'reset_food_inherit', 'use_plural')
|
||||
|
||||
help_texts = {
|
||||
'food_inherit': _('Fields on food that should be inherited by default.'),
|
||||
'show_facet_count': _('Show recipe counts on search filters'), }
|
||||
'use_plural': _('Use the plural form for units and food inside this space.'),
|
||||
}
|
||||
|
||||
widgets = {
|
||||
'food_inherit': MultiSelectWidget
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from gettext import gettext as _
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.core.cache import caches
|
||||
from gettext import gettext as _
|
||||
|
||||
from cookbook.models import InviteLink
|
||||
|
||||
@@ -14,13 +13,16 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
|
||||
def is_open_for_signup(self, request):
|
||||
"""
|
||||
Whether to allow sign ups.
|
||||
Whether to allow sign-ups.
|
||||
"""
|
||||
signup_token = False
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(
|
||||
valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
signup_token = True
|
||||
|
||||
if (request.resolver_match.view_name == 'account_signup' or request.resolver_match.view_name == 'socialaccount_signup') and not settings.ENABLE_SIGNUP and not signup_token:
|
||||
if request.resolver_match.view_name == 'account_signup' and not settings.ENABLE_SIGNUP and not signup_token:
|
||||
return False
|
||||
elif request.resolver_match.view_name == 'socialaccount_signup' and len(settings.SOCIAL_PROVIDERS) < 1:
|
||||
return False
|
||||
else:
|
||||
return super(AllAuthCustomAdapter, self).is_open_for_signup(request)
|
||||
@@ -31,7 +33,10 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
default = datetime.datetime.now()
|
||||
c = caches['default'].get_or_set(email, default, timeout=360)
|
||||
if c == default:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
try:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
except Exception: # dont fail signup just because confirmation mail could not be send
|
||||
pass
|
||||
else:
|
||||
messages.add_message(self.request, messages.ERROR, _('In order to prevent spam, the requested email was not send. Please wait a few minutes and try again.'))
|
||||
else:
|
||||
|
||||
227
cookbook/helper/automation_helper.py
Normal file
227
cookbook/helper/automation_helper.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import re
|
||||
|
||||
from django.core.cache import caches
|
||||
from django.db.models.functions import Lower
|
||||
|
||||
from cookbook.models import Automation
|
||||
|
||||
|
||||
class AutomationEngine:
|
||||
request = None
|
||||
source = None
|
||||
use_cache = None
|
||||
food_aliases = None
|
||||
keyword_aliases = None
|
||||
unit_aliases = None
|
||||
never_unit = None
|
||||
transpose_words = None
|
||||
regex_replace = {
|
||||
Automation.DESCRIPTION_REPLACE: None,
|
||||
Automation.INSTRUCTION_REPLACE: None,
|
||||
Automation.FOOD_REPLACE: None,
|
||||
Automation.UNIT_REPLACE: None,
|
||||
Automation.NAME_REPLACE: None,
|
||||
}
|
||||
|
||||
def __init__(self, request, use_cache=True, source=None):
|
||||
self.request = request
|
||||
self.use_cache = use_cache
|
||||
if not source:
|
||||
self.source = "default_string_to_avoid_false_regex_match"
|
||||
else:
|
||||
self.source = source
|
||||
|
||||
def apply_keyword_automation(self, keyword):
|
||||
keyword = keyword.strip()
|
||||
if self.use_cache and self.keyword_aliases is None:
|
||||
self.keyword_aliases = {}
|
||||
KEYWORD_CACHE_KEY = f'automation_keyword_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(KEYWORD_CACHE_KEY, None):
|
||||
self.keyword_aliases = c
|
||||
caches['default'].touch(KEYWORD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.KEYWORD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.keyword_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(KEYWORD_CACHE_KEY, self.keyword_aliases, 30)
|
||||
else:
|
||||
self.keyword_aliases = {}
|
||||
if self.keyword_aliases:
|
||||
try:
|
||||
keyword = self.keyword_aliases[keyword.lower()]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.KEYWORD_ALIAS, param_1__iexact=keyword, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return keyword
|
||||
|
||||
def apply_unit_automation(self, unit):
|
||||
unit = unit.strip()
|
||||
if self.use_cache and self.unit_aliases is None:
|
||||
self.unit_aliases = {}
|
||||
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(UNIT_CACHE_KEY, None):
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.unit_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
self.unit_aliases = {}
|
||||
if self.unit_aliases:
|
||||
try:
|
||||
unit = self.unit_aliases[unit.lower()]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1__iexact=unit, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return self.apply_regex_replace_automation(unit, Automation.UNIT_REPLACE)
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
food = food.strip()
|
||||
if self.use_cache and self.food_aliases is None:
|
||||
self.food_aliases = {}
|
||||
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(FOOD_CACHE_KEY, None):
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.food_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
else:
|
||||
self.food_aliases = {}
|
||||
|
||||
if self.food_aliases:
|
||||
try:
|
||||
return self.food_aliases[food.lower()]
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1__iexact=food, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return self.apply_regex_replace_automation(food, Automation.FOOD_REPLACE)
|
||||
|
||||
def apply_never_unit_automation(self, tokens):
|
||||
"""
|
||||
Moves a string that should never be treated as a unit to next token and optionally replaced with default unit
|
||||
e.g. NEVER_UNIT: param1: egg, param2: None would modify ['1', 'egg', 'white'] to ['1', '', 'egg', 'white']
|
||||
or NEVER_UNIT: param1: egg, param2: pcs would modify ['1', 'egg', 'yolk'] to ['1', 'pcs', 'egg', 'yolk']
|
||||
:param1 string: string that should never be considered a unit, will be moved to token[2]
|
||||
:param2 (optional) unit as string: will insert unit string into token[1]
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
|
||||
if self.use_cache and self.never_unit is None:
|
||||
self.never_unit = {}
|
||||
NEVER_UNIT_CACHE_KEY = f'automation_never_unit_{self.request.space.pk}'
|
||||
if c := caches['default'].get(NEVER_UNIT_CACHE_KEY, None):
|
||||
self.never_unit = c
|
||||
caches['default'].touch(NEVER_UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.NEVER_UNIT).only('param_1', 'param_2').order_by('order').all():
|
||||
self.never_unit[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(NEVER_UNIT_CACHE_KEY, self.never_unit, 30)
|
||||
else:
|
||||
self.never_unit = {}
|
||||
|
||||
new_unit = None
|
||||
alt_unit = self.apply_unit_automation(tokens[1])
|
||||
never_unit = False
|
||||
if self.never_unit:
|
||||
try:
|
||||
new_unit = self.never_unit[tokens[1].lower()]
|
||||
never_unit = True
|
||||
except KeyError:
|
||||
return tokens
|
||||
else:
|
||||
if a := Automation.objects.annotate(param_1_lower=Lower('param_1')).filter(space=self.request.space, type=Automation.NEVER_UNIT, param_1_lower__in=[
|
||||
tokens[1].lower(), alt_unit.lower()], disabled=False).order_by('order').first():
|
||||
new_unit = a.param_2
|
||||
never_unit = True
|
||||
|
||||
if never_unit:
|
||||
tokens.insert(1, new_unit)
|
||||
return tokens
|
||||
|
||||
def apply_transpose_automation(self, string):
|
||||
"""
|
||||
If two words (param_1 & param_2) are detected in sequence, swap their position in the ingredient string
|
||||
:param 1: first word to detect
|
||||
:param 2: second word to detect
|
||||
return: new ingredient string
|
||||
"""
|
||||
if self.use_cache and self.transpose_words is None:
|
||||
self.transpose_words = {}
|
||||
TRANSPOSE_WORDS_CACHE_KEY = f'automation_transpose_words_{self.request.space.pk}'
|
||||
if c := caches['default'].get(TRANSPOSE_WORDS_CACHE_KEY, None):
|
||||
self.transpose_words = c
|
||||
caches['default'].touch(TRANSPOSE_WORDS_CACHE_KEY, 30)
|
||||
else:
|
||||
i = 0
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.TRANSPOSE_WORDS).only(
|
||||
'param_1', 'param_2').order_by('order').all()[:512]:
|
||||
self.transpose_words[i] = [a.param_1.lower(), a.param_2.lower()]
|
||||
i += 1
|
||||
caches['default'].set(TRANSPOSE_WORDS_CACHE_KEY, self.transpose_words, 30)
|
||||
else:
|
||||
self.transpose_words = {}
|
||||
|
||||
tokens = [x.lower() for x in string.replace(',', ' ').split()]
|
||||
if self.transpose_words:
|
||||
for key, value in self.transpose_words.items():
|
||||
if value[0] in tokens and value[1] in tokens:
|
||||
string = re.sub(rf"\b({value[0]})\W*({value[1]})\b", r"\2 \1", string, flags=re.IGNORECASE)
|
||||
else:
|
||||
for rule in Automation.objects.filter(space=self.request.space, type=Automation.TRANSPOSE_WORDS, disabled=False) \
|
||||
.annotate(param_1_lower=Lower('param_1'), param_2_lower=Lower('param_2')) \
|
||||
.filter(param_1_lower__in=tokens, param_2_lower__in=tokens).order_by('order')[:512]:
|
||||
if rule.param_1 in tokens and rule.param_2 in tokens:
|
||||
string = re.sub(rf"\b({rule.param_1})\W*({rule.param_2})\b", r"\2 \1", string, flags=re.IGNORECASE)
|
||||
return string
|
||||
|
||||
def apply_regex_replace_automation(self, string, automation_type):
|
||||
# TODO add warning - maybe on SPACE page? when a max of 512 automations of a specific type is exceeded (ALIAS types excluded?)
|
||||
"""
|
||||
Replaces strings in a recipe field that are from a matched source
|
||||
field_type are Automation.type that apply regex replacements
|
||||
Automation.DESCRIPTION_REPLACE
|
||||
Automation.INSTRUCTION_REPLACE
|
||||
Automation.FOOD_REPLACE
|
||||
Automation.UNIT_REPLACE
|
||||
Automation.NAME_REPLACE
|
||||
|
||||
regex replacment utilized the following fields from the Automation model
|
||||
:param 1: source that should apply the automation in regex format ('.*' for all)
|
||||
:param 2: regex pattern to match ()
|
||||
:param 3: replacement string (leave blank to delete)
|
||||
return: new string
|
||||
"""
|
||||
if self.use_cache and self.regex_replace[automation_type] is None:
|
||||
self.regex_replace[automation_type] = {}
|
||||
REGEX_REPLACE_CACHE_KEY = f'automation_regex_replace_{self.request.space.pk}'
|
||||
if c := caches['default'].get(REGEX_REPLACE_CACHE_KEY, None):
|
||||
self.regex_replace[automation_type] = c[automation_type]
|
||||
caches['default'].touch(REGEX_REPLACE_CACHE_KEY, 30)
|
||||
else:
|
||||
i = 0
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
|
||||
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
|
||||
self.regex_replace[automation_type][i] = [a.param_1, a.param_2, a.param_3]
|
||||
i += 1
|
||||
caches['default'].set(REGEX_REPLACE_CACHE_KEY, self.regex_replace, 30)
|
||||
else:
|
||||
self.regex_replace[automation_type] = {}
|
||||
|
||||
if self.regex_replace[automation_type]:
|
||||
for rule in self.regex_replace[automation_type].values():
|
||||
if re.match(rule[0], (self.source)[:512]):
|
||||
string = re.sub(rule[1], rule[2], string, flags=re.IGNORECASE)
|
||||
else:
|
||||
for rule in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
|
||||
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
|
||||
if re.match(rule.param_1, (self.source)[:512]):
|
||||
string = re.sub(rule.param_2, rule.param_3, string, flags=re.IGNORECASE)
|
||||
return string
|
||||
11
cookbook/helper/cache_helper.py
Normal file
11
cookbook/helper/cache_helper.py
Normal file
@@ -0,0 +1,11 @@
|
||||
class CacheHelper:
|
||||
space = None
|
||||
|
||||
BASE_UNITS_CACHE_KEY = None
|
||||
PROPERTY_TYPE_CACHE_KEY = None
|
||||
|
||||
def __init__(self, space):
|
||||
self.space = space
|
||||
|
||||
self.BASE_UNITS_CACHE_KEY = f'SPACE_{space.id}_BASE_UNITS'
|
||||
self.PROPERTY_TYPE_CACHE_KEY = f'SPACE_{space.id}_PROPERTY_TYPES'
|
||||
@@ -10,4 +10,5 @@ def context_settings(request):
|
||||
'TERMS_URL': settings.TERMS_URL,
|
||||
'PRIVACY_URL': settings.PRIVACY_URL,
|
||||
'IMPRINT_URL': settings.IMPRINT_URL,
|
||||
'SHOPPING_MIN_AUTOSYNC_INTERVAL': settings.SHOPPING_MIN_AUTOSYNC_INTERVAL,
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
def rescale_image_jpeg(image_object, base_width=1020):
|
||||
@@ -11,7 +10,7 @@ def rescale_image_jpeg(image_object, base_width=1020):
|
||||
width_percent = (base_width / float(img.size[0]))
|
||||
height = int((float(img.size[1]) * float(width_percent)))
|
||||
|
||||
img = img.resize((base_width, height), Image.ANTIALIAS)
|
||||
img = img.resize((base_width, height), Image.LANCZOS)
|
||||
img_bytes = BytesIO()
|
||||
img.save(img_bytes, 'JPEG', quality=90, optimize=True, icc_profile=icc_profile)
|
||||
|
||||
@@ -22,7 +21,7 @@ def rescale_image_png(image_object, base_width=1020):
|
||||
image_object = Image.open(image_object)
|
||||
wpercent = (base_width / float(image_object.size[0]))
|
||||
hsize = int((float(image_object.size[1]) * float(wpercent)))
|
||||
img = image_object.resize((base_width, hsize), Image.ANTIALIAS)
|
||||
img = image_object.resize((base_width, hsize), Image.LANCZOS)
|
||||
|
||||
im_io = BytesIO()
|
||||
img.save(im_io, 'PNG', quality=90)
|
||||
@@ -38,10 +37,17 @@ def get_filetype(name):
|
||||
|
||||
# TODO this whole file needs proper documentation, refactoring, and testing
|
||||
# TODO also add env variable to define which images sizes should be compressed
|
||||
def handle_image(request, image_object, filetype='.jpeg'):
|
||||
# filetype argument can not be optional, otherwise this function will treat all images as if they were a jpeg
|
||||
# Because it's no longer optional, no reason to return it
|
||||
def handle_image(request, image_object, filetype):
|
||||
try:
|
||||
Image.open(image_object).verify()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
return rescale_image_jpeg(image_object), filetype
|
||||
return rescale_image_jpeg(image_object)
|
||||
if filetype == '.png':
|
||||
return rescale_image_png(image_object), filetype
|
||||
return image_object, filetype
|
||||
return rescale_image_png(image_object)
|
||||
return image_object
|
||||
|
||||
@@ -2,18 +2,16 @@ import re
|
||||
import string
|
||||
import unicodedata
|
||||
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.models import Unit, Food, Automation
|
||||
from cookbook.helper.automation_helper import AutomationEngine
|
||||
from cookbook.models import Food, Ingredient, Unit
|
||||
|
||||
|
||||
class IngredientParser:
|
||||
request = None
|
||||
ignore_rules = False
|
||||
food_aliases = {}
|
||||
unit_aliases = {}
|
||||
automation = None
|
||||
|
||||
def __init__(self, request, cache_mode, ignore_automations=False):
|
||||
def __init__(self, request, cache_mode=True, ignore_automations=False):
|
||||
"""
|
||||
Initialize ingredient parser
|
||||
:param request: request context (to control caching, rule ownership, etc.)
|
||||
@@ -22,65 +20,8 @@ class IngredientParser:
|
||||
"""
|
||||
self.request = request
|
||||
self.ignore_rules = ignore_automations
|
||||
if cache_mode:
|
||||
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(FOOD_CACHE_KEY, None):
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').all():
|
||||
self.food_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
|
||||
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(UNIT_CACHE_KEY, None):
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').all():
|
||||
self.unit_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
self.food_aliases = {}
|
||||
self.unit_aliases = {}
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
"""
|
||||
Apply food alias automations to passed foood
|
||||
:param food: unit as string
|
||||
:return: food as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return food
|
||||
else:
|
||||
if self.food_aliases:
|
||||
try:
|
||||
return self.food_aliases[food]
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).first():
|
||||
return automation.param_2
|
||||
return food
|
||||
|
||||
def apply_unit_automation(self, unit):
|
||||
"""
|
||||
Apply unit alias automations to passed unit
|
||||
:param unit: unit as string
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return unit
|
||||
else:
|
||||
if self.unit_aliases:
|
||||
try:
|
||||
return self.unit_aliases[unit]
|
||||
except KeyError:
|
||||
return unit
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).first():
|
||||
return automation.param_2
|
||||
return unit
|
||||
if not self.ignore_rules:
|
||||
self.automation = AutomationEngine(self.request, use_cache=cache_mode)
|
||||
|
||||
def get_unit(self, unit):
|
||||
"""
|
||||
@@ -91,7 +32,10 @@ class IngredientParser:
|
||||
if not unit:
|
||||
return None
|
||||
if len(unit) > 0:
|
||||
u, created = Unit.objects.get_or_create(name=self.apply_unit_automation(unit), space=self.request.space)
|
||||
if self.ignore_rules:
|
||||
u, created = Unit.objects.get_or_create(name=unit.strip(), space=self.request.space)
|
||||
else:
|
||||
u, created = Unit.objects.get_or_create(name=self.automation.apply_unit_automation(unit), space=self.request.space)
|
||||
return u
|
||||
return None
|
||||
|
||||
@@ -104,7 +48,10 @@ class IngredientParser:
|
||||
if not food:
|
||||
return None
|
||||
if len(food) > 0:
|
||||
f, created = Food.objects.get_or_create(name=self.apply_food_automation(food), space=self.request.space)
|
||||
if self.ignore_rules:
|
||||
f, created = Food.objects.get_or_create(name=food.strip(), space=self.request.space)
|
||||
else:
|
||||
f, created = Food.objects.get_or_create(name=self.automation.apply_food_automation(food), space=self.request.space)
|
||||
return f
|
||||
return None
|
||||
|
||||
@@ -124,17 +71,19 @@ class IngredientParser:
|
||||
|
||||
def parse_amount(self, x):
|
||||
amount = 0
|
||||
unit = ''
|
||||
unit = None
|
||||
note = ''
|
||||
if x.strip() == '':
|
||||
return amount, unit, note
|
||||
|
||||
did_check_frac = False
|
||||
end = 0
|
||||
while (end < len(x) and (x[end] in string.digits
|
||||
or (
|
||||
(x[end] == '.' or x[end] == ',' or x[end] == '/')
|
||||
and end + 1 < len(x)
|
||||
and x[end + 1] in string.digits
|
||||
))):
|
||||
(x[end] == '.' or x[end] == ',' or x[end] == '/')
|
||||
and end + 1 < len(x)
|
||||
and x[end + 1] in string.digits
|
||||
))):
|
||||
end += 1
|
||||
if end > 0:
|
||||
if "/" in x[:end]:
|
||||
@@ -155,33 +104,37 @@ class IngredientParser:
|
||||
except ValueError:
|
||||
unit = x[end:]
|
||||
|
||||
if unit.startswith('(') or unit.startswith('-'): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
unit = ''
|
||||
if unit is not None and unit.strip() == '':
|
||||
unit = None
|
||||
|
||||
if unit is not None and (unit.startswith('(') or unit.startswith(
|
||||
'-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
unit = None
|
||||
note = x
|
||||
return amount, unit, note
|
||||
|
||||
def parse_ingredient_with_comma(self, tokens):
|
||||
ingredient = ''
|
||||
def parse_food_with_comma(self, tokens):
|
||||
food = ''
|
||||
note = ''
|
||||
start = 0
|
||||
# search for first occurrence of an argument ending in a comma
|
||||
while start < len(tokens) and not tokens[start].endswith(','):
|
||||
start += 1
|
||||
if start == len(tokens):
|
||||
# no token ending in a comma found -> use everything as ingredient
|
||||
ingredient = ' '.join(tokens)
|
||||
# no token ending in a comma found -> use everything as food
|
||||
food = ' '.join(tokens)
|
||||
else:
|
||||
ingredient = ' '.join(tokens[:start + 1])[:-1]
|
||||
food = ' '.join(tokens[:start + 1])[:-1]
|
||||
note = ' '.join(tokens[start + 1:])
|
||||
return ingredient, note
|
||||
return food, note
|
||||
|
||||
def parse_ingredient(self, tokens):
|
||||
ingredient = ''
|
||||
def parse_food(self, tokens):
|
||||
food = ''
|
||||
note = ''
|
||||
if tokens[-1].endswith(')'):
|
||||
# Check if the matching opening bracket is in the same token
|
||||
if (not tokens[-1].startswith('(')) and ('(' in tokens[-1]):
|
||||
return self.parse_ingredient_with_comma(tokens)
|
||||
return self.parse_food_with_comma(tokens)
|
||||
# last argument ends with closing bracket -> look for opening bracket
|
||||
start = len(tokens) - 1
|
||||
while not tokens[start].startswith('(') and not start == 0:
|
||||
@@ -191,33 +144,62 @@ class IngredientParser:
|
||||
raise ValueError
|
||||
elif start < 0:
|
||||
# no opening bracket anywhere -> just ignore the last bracket
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
food, note = self.parse_food_with_comma(tokens)
|
||||
else:
|
||||
# opening bracket found -> split in ingredient and note, remove brackets from note # noqa: E501
|
||||
# opening bracket found -> split in food and note, remove brackets from note # noqa: E501
|
||||
note = ' '.join(tokens[start:])[1:-1]
|
||||
ingredient = ' '.join(tokens[:start])
|
||||
food = ' '.join(tokens[:start])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
return ingredient, note
|
||||
food, note = self.parse_food_with_comma(tokens)
|
||||
return food, note
|
||||
|
||||
def parse(self, x):
|
||||
def parse(self, ingredient):
|
||||
"""
|
||||
Main parsing function, takes an ingredient string (e.g. '1 l Water') and extracts amount, unit, food, ...
|
||||
:param ingredient: string ingredient
|
||||
:return: amount, unit (can be None), food, note (can be empty)
|
||||
"""
|
||||
# initialize default values
|
||||
amount = 0
|
||||
unit = ''
|
||||
ingredient = ''
|
||||
unit = None
|
||||
food = ''
|
||||
note = ''
|
||||
unit_note = ''
|
||||
|
||||
if len(ingredient) == 0:
|
||||
raise ValueError('string to parse cannot be empty')
|
||||
|
||||
# some people/languages put amount and unit at the end of the ingredient string
|
||||
# if something like this is detected move it to the beginning so the parser can handle it
|
||||
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
|
||||
match = re.search(r'[1-9](\d)*\s*([^\W\d_])+', ingredient)
|
||||
print(f'reording from {ingredient} to {ingredient[match.start():match.end()] + " " + ingredient.replace(ingredient[match.start():match.end()], "")}')
|
||||
ingredient = ingredient[match.start():match.end()] + ' ' + ingredient.replace(ingredient[match.start():match.end()], '')
|
||||
|
||||
# if the string contains parenthesis early on remove it and place it at the end
|
||||
# because its likely some kind of note
|
||||
if re.match('(.){1,6}\s\((.[^\(\)])+\)\s', x):
|
||||
match = re.search('\((.[^\(])+\)', x)
|
||||
x = x[:match.start()] + x[match.end():] + ' ' + x[match.start():match.end()]
|
||||
if re.match('(.){1,6}\\s\\((.[^\\(\\)])+\\)\\s', ingredient):
|
||||
match = re.search('\\((.[^\\(])+\\)', ingredient)
|
||||
ingredient = ingredient[:match.start()] + ingredient[match.end():] + ' ' + ingredient[match.start():match.end()]
|
||||
|
||||
tokens = x.split()
|
||||
# leading spaces before commas result in extra tokens, clean them out
|
||||
ingredient = ingredient.replace(' ,', ',')
|
||||
|
||||
# handle "(from) - (to)" amounts by using the minimum amount and adding the range to the description
|
||||
# "10.5 - 200 g XYZ" => "100 g XYZ (10.5 - 200)"
|
||||
ingredient = re.sub("^(\\d+|\\d+[\\.,]\\d+) - (\\d+|\\d+[\\.,]\\d+) (.*)", "\\1 \\3 (\\1 - \\2)", ingredient)
|
||||
|
||||
# if amount and unit are connected add space in between
|
||||
if re.match('([0-9])+([A-z])+\\s', ingredient):
|
||||
ingredient = re.sub(r'(?<=([a-z])|\d)(?=(?(1)\d|[a-z]))', ' ', ingredient)
|
||||
|
||||
if not self.ignore_rules:
|
||||
ingredient = self.automation.apply_transpose_automation(ingredient)
|
||||
|
||||
tokens = ingredient.split() # split at each space into tokens
|
||||
if len(tokens) == 1:
|
||||
# there only is one argument, that must be the ingredient
|
||||
ingredient = tokens[0]
|
||||
# there only is one argument, that must be the food
|
||||
food = tokens[0]
|
||||
else:
|
||||
try:
|
||||
# try to parse first argument as amount
|
||||
@@ -226,49 +208,69 @@ class IngredientParser:
|
||||
# three arguments if it already has a unit there can't be
|
||||
# a fraction for the amount
|
||||
if len(tokens) > 2:
|
||||
if not self.ignore_rules:
|
||||
tokens = self.automation.apply_never_unit_automation(tokens)
|
||||
try:
|
||||
if not unit == '':
|
||||
if unit is not None:
|
||||
# a unit is already found, no need to try the second argument for a fraction
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except # noqa: E501
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except
|
||||
raise ValueError
|
||||
# try to parse second argument as amount and add that, in case of '2 1/2' or '2 ½'
|
||||
amount += self.parse_fraction(tokens[1])
|
||||
# assume that units can't end with a comma
|
||||
if len(tokens) > 3 and not tokens[2].endswith(','):
|
||||
# try to use third argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
# try to use third argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
ingredient, note = self.parse_ingredient(tokens[3:])
|
||||
food, note = self.parse_food(tokens[3:])
|
||||
unit = tokens[2]
|
||||
except ValueError:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
except ValueError:
|
||||
# assume that units can't end with a comma
|
||||
if not tokens[1].endswith(','):
|
||||
# try to use second argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
# try to use second argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
if unit == '':
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
if unit is None:
|
||||
unit = tokens[1]
|
||||
else:
|
||||
note = tokens[1]
|
||||
except ValueError:
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
# only two arguments, first one is the amount
|
||||
# which means this is the ingredient
|
||||
ingredient = tokens[1]
|
||||
# which means this is the food
|
||||
food = tokens[1]
|
||||
except ValueError:
|
||||
try:
|
||||
# can't parse first argument as amount
|
||||
# -> no unit -> parse everything as ingredient
|
||||
ingredient, note = self.parse_ingredient(tokens)
|
||||
# -> no unit -> parse everything as food
|
||||
food, note = self.parse_food(tokens)
|
||||
except ValueError:
|
||||
ingredient = ' '.join(tokens[1:])
|
||||
food = ' '.join(tokens[1:])
|
||||
|
||||
if unit_note not in note:
|
||||
note += ' ' + unit_note
|
||||
return amount, self.apply_unit_automation(unit.strip()), self.apply_food_automation(ingredient.strip()), note.strip()
|
||||
|
||||
if unit and not self.ignore_rules:
|
||||
unit = self.automation.apply_unit_automation(unit)
|
||||
|
||||
if food and not self.ignore_rules:
|
||||
food = self.automation.apply_food_automation(food)
|
||||
if len(food) > Food._meta.get_field('name').max_length: # test if food name is to long
|
||||
# try splitting it at a space and taking only the first arg
|
||||
if len(food.split()) > 1 and len(food.split()[0]) < Food._meta.get_field('name').max_length:
|
||||
note = ' '.join(food.split()[1:]) + ' ' + note
|
||||
food = food.split()[0]
|
||||
else:
|
||||
note = food + ' ' + note
|
||||
food = food[:Food._meta.get_field('name').max_length]
|
||||
|
||||
if len(food.strip()) == 0:
|
||||
raise ValueError(f'Error parsing string {ingredient}, food cannot be empty')
|
||||
|
||||
return amount, unit, food, note[:Ingredient._meta.get_field('note').max_length].strip()
|
||||
|
||||
@@ -35,6 +35,7 @@ Negative examples:
|
||||
u'<p>del.icio.us</p>'
|
||||
|
||||
"""
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import markdown
|
||||
|
||||
@@ -64,7 +65,7 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
|
||||
else:
|
||||
url = 'http://' + url
|
||||
|
||||
el = markdown.util.etree.Element("a")
|
||||
el = Element("a")
|
||||
el.set('href', url)
|
||||
el.text = markdown.util.AtomicString(text)
|
||||
return el
|
||||
@@ -73,9 +74,9 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
|
||||
class UrlizeExtension(markdown.Extension):
|
||||
""" Urlize Extension for Python-Markdown. """
|
||||
|
||||
def extendMarkdown(self, md, md_globals):
|
||||
def extendMarkdown(self, md):
|
||||
""" Replace autolink with UrlizePattern """
|
||||
md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
|
||||
md.inlinePatterns.register(UrlizePattern(URLIZE_RE, md), 'autolink', 120)
|
||||
|
||||
|
||||
def makeExtension(*args, **kwargs):
|
||||
|
||||
210
cookbook/helper/open_data_importer.py
Normal file
210
cookbook/helper/open_data_importer.py
Normal file
@@ -0,0 +1,210 @@
|
||||
from cookbook.models import (Food, FoodProperty, Property, PropertyType, Supermarket,
|
||||
SupermarketCategory, SupermarketCategoryRelation, Unit, UnitConversion)
|
||||
|
||||
|
||||
class OpenDataImporter:
|
||||
request = None
|
||||
data = {}
|
||||
slug_id_cache = {}
|
||||
update_existing = False
|
||||
use_metric = True
|
||||
|
||||
def __init__(self, request, data, update_existing=False, use_metric=True):
|
||||
self.request = request
|
||||
self.data = data
|
||||
self.update_existing = update_existing
|
||||
self.use_metric = use_metric
|
||||
|
||||
def _update_slug_cache(self, object_class, datatype):
|
||||
self.slug_id_cache[datatype] = dict(object_class.objects.filter(space=self.request.space, open_data_slug__isnull=False).values_list('open_data_slug', 'id', ))
|
||||
|
||||
def import_units(self):
|
||||
datatype = 'unit'
|
||||
|
||||
insert_list = []
|
||||
for u in list(self.data[datatype].keys()):
|
||||
insert_list.append(Unit(
|
||||
name=self.data[datatype][u]['name'],
|
||||
plural_name=self.data[datatype][u]['plural_name'],
|
||||
base_unit=self.data[datatype][u]['base_unit'] if self.data[datatype][u]['base_unit'] != '' else None,
|
||||
open_data_slug=u,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
if self.update_existing:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=(
|
||||
'name', 'plural_name', 'base_unit', 'open_data_slug'), unique_fields=('space', 'name',))
|
||||
else:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_category(self):
|
||||
datatype = 'category'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(SupermarketCategory(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
return SupermarketCategory.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_property(self):
|
||||
datatype = 'property'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(PropertyType(
|
||||
name=self.data[datatype][k]['name'],
|
||||
unit=self.data[datatype][k]['unit'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
return PropertyType.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_supermarket(self):
|
||||
datatype = 'store'
|
||||
|
||||
self._update_slug_cache(SupermarketCategory, 'category')
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(Supermarket(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
# always add open data slug if matching supermarket is found, otherwise relation might fail
|
||||
supermarkets = Supermarket.objects.bulk_create(insert_list, unique_fields=('space', 'name',), update_conflicts=True, update_fields=('open_data_slug',))
|
||||
self._update_slug_cache(Supermarket, 'store')
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
relations = []
|
||||
order = 0
|
||||
for c in self.data[datatype][k]['categories']:
|
||||
relations.append(
|
||||
SupermarketCategoryRelation(
|
||||
supermarket_id=self.slug_id_cache[datatype][k],
|
||||
category_id=self.slug_id_cache['category'][c],
|
||||
order=order,
|
||||
)
|
||||
)
|
||||
order += 1
|
||||
|
||||
SupermarketCategoryRelation.objects.bulk_create(relations, ignore_conflicts=True, unique_fields=('supermarket', 'category',))
|
||||
|
||||
return supermarkets
|
||||
|
||||
def import_food(self):
|
||||
identifier_list = []
|
||||
datatype = 'food'
|
||||
for k in list(self.data[datatype].keys()):
|
||||
identifier_list.append(self.data[datatype][k]['name'])
|
||||
identifier_list.append(self.data[datatype][k]['plural_name'])
|
||||
|
||||
existing_objects_flat = []
|
||||
existing_objects = {}
|
||||
for f in Food.objects.filter(space=self.request.space).filter(name__in=identifier_list).values_list('id', 'name', 'plural_name'):
|
||||
existing_objects_flat.append(f[1])
|
||||
existing_objects_flat.append(f[2])
|
||||
existing_objects[f[1]] = f
|
||||
existing_objects[f[2]] = f
|
||||
|
||||
self._update_slug_cache(Unit, 'unit')
|
||||
self._update_slug_cache(PropertyType, 'property')
|
||||
|
||||
insert_list = []
|
||||
insert_list_flat = []
|
||||
update_list = []
|
||||
update_field_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
if not (self.data[datatype][k]['name'] in existing_objects_flat or self.data[datatype][k]['plural_name'] in existing_objects_flat):
|
||||
if not (self.data[datatype][k]['name'] in insert_list_flat or self.data[datatype][k]['plural_name'] in insert_list_flat):
|
||||
insert_list.append({'data': {
|
||||
'name': self.data[datatype][k]['name'],
|
||||
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
'fdc_id': self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'open_data_slug': k,
|
||||
'space': self.request.space.id,
|
||||
}})
|
||||
# build a fake second flat array to prevent duplicate foods from being inserted.
|
||||
# trying to insert a duplicate would throw a db error :(
|
||||
insert_list_flat.append(self.data[datatype][k]['name'])
|
||||
insert_list_flat.append(self.data[datatype][k]['plural_name'])
|
||||
else:
|
||||
if self.data[datatype][k]['name'] in existing_objects:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['name']][0]
|
||||
else:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['plural_name']][0]
|
||||
|
||||
if self.update_existing:
|
||||
update_field_list = ['name', 'plural_name', 'preferred_unit_id', 'preferred_shopping_unit_id', 'supermarket_category_id', 'fdc_id', 'open_data_slug', ]
|
||||
update_list.append(Food(
|
||||
id=existing_food_id,
|
||||
name=self.data[datatype][k]['name'],
|
||||
plural_name=self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
supermarket_category_id=self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
fdc_id=self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
open_data_slug=k,
|
||||
))
|
||||
else:
|
||||
update_field_list = ['open_data_slug', ]
|
||||
update_list.append(Food(id=existing_food_id, open_data_slug=k, ))
|
||||
|
||||
Food.load_bulk(insert_list, None)
|
||||
if len(update_list) > 0:
|
||||
Food.objects.bulk_update(update_list, update_field_list)
|
||||
|
||||
self._update_slug_cache(Food, 'food')
|
||||
|
||||
food_property_list = []
|
||||
# alias_list = []
|
||||
|
||||
for k in list(self.data[datatype].keys()):
|
||||
for fp in self.data[datatype][k]['properties']['type_values']:
|
||||
# try catch here because somettimes key "k" is not set for he food cache
|
||||
try:
|
||||
food_property_list.append(Property(
|
||||
property_type_id=self.slug_id_cache['property'][fp['property_type']],
|
||||
property_amount=fp['property_value'],
|
||||
import_food_id=self.slug_id_cache['food'][k],
|
||||
space=self.request.space,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
|
||||
Property.objects.bulk_create(food_property_list, ignore_conflicts=True, unique_fields=('space', 'import_food_id', 'property_type',))
|
||||
|
||||
property_food_relation_list = []
|
||||
for p in Property.objects.filter(space=self.request.space, import_food_id__isnull=False).values_list('import_food_id', 'id', ):
|
||||
property_food_relation_list.append(Food.properties.through(food_id=p[0], property_id=p[1]))
|
||||
|
||||
FoodProperty.objects.bulk_create(property_food_relation_list, ignore_conflicts=True, unique_fields=('food_id', 'property_id',))
|
||||
|
||||
return insert_list + update_list
|
||||
|
||||
def import_conversion(self):
|
||||
datatype = 'conversion'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
# try catch here because sometimes key "k" is not set for he food cache
|
||||
try:
|
||||
insert_list.append(UnitConversion(
|
||||
base_amount=self.data[datatype][k]['base_amount'],
|
||||
base_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['base_unit']],
|
||||
converted_amount=self.data[datatype][k]['converted_amount'],
|
||||
converted_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['converted_unit']],
|
||||
food_id=self.slug_id_cache['food'][self.data[datatype][k]['food']],
|
||||
open_data_slug=k,
|
||||
space=self.request.space,
|
||||
created_by=self.request.user,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
|
||||
return UnitConversion.objects.bulk_create(insert_list, ignore_conflicts=True, unique_fields=('space', 'base_unit', 'converted_unit', 'food', 'open_data_slug'))
|
||||
@@ -1,18 +1,19 @@
|
||||
"""
|
||||
Source: https://djangosnippets.org/snippets/1703/
|
||||
"""
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
|
||||
from cookbook.models import ShareLink
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -30,11 +31,12 @@ def get_allowed_groups(groups_required):
|
||||
return groups_allowed
|
||||
|
||||
|
||||
def has_group_permission(user, groups):
|
||||
def has_group_permission(user, groups, no_cache=False):
|
||||
"""
|
||||
Tests if a given user is member of a certain group (or any higher group)
|
||||
Superusers always bypass permission checks.
|
||||
Unauthenticated users can't be member of any group thus always return false.
|
||||
:param no_cache: (optional) do not return cached results, always check agains DB
|
||||
:param user: django auth user object
|
||||
:param groups: list or tuple of groups the user should be checked for
|
||||
:return: True if user is in allowed groups, false otherwise
|
||||
@@ -42,10 +44,23 @@ def has_group_permission(user, groups):
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
groups_allowed = get_allowed_groups(groups)
|
||||
|
||||
CACHE_KEY = hash((inspect.stack()[0][3], (user.pk, user.username, user.email), groups_allowed))
|
||||
if not no_cache:
|
||||
cached_result = cache.get(CACHE_KEY, default=None)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
result = False
|
||||
if user.is_authenticated:
|
||||
if bool(user.groups.filter(name__in=groups_allowed)):
|
||||
return True
|
||||
return False
|
||||
if user_space := user.userspace_set.filter(active=True):
|
||||
if len(user_space) != 1:
|
||||
result = False # do not allow any group permission if more than one space is active, needs to be changed when simultaneous multi-space-tenancy is added
|
||||
elif bool(user_space.first().groups.filter(name__in=groups_allowed)):
|
||||
result = True
|
||||
|
||||
cache.set(CACHE_KEY, result, timeout=10)
|
||||
return result
|
||||
|
||||
|
||||
def is_object_owner(user, obj):
|
||||
@@ -53,7 +68,6 @@ def is_object_owner(user, obj):
|
||||
Tests if a given user is the owner of a given object
|
||||
test performed by checking user against the objects user
|
||||
and create_by field (if exists)
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of object, false otherwise
|
||||
@@ -66,11 +80,25 @@ def is_object_owner(user, obj):
|
||||
return False
|
||||
|
||||
|
||||
def is_space_owner(user, obj):
|
||||
"""
|
||||
Tests if a given user is the owner the space of a given object
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of the objects space, false otherwise
|
||||
"""
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
try:
|
||||
return obj.get_space().get_owner() == user
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def is_object_shared(user, obj):
|
||||
"""
|
||||
Tests if a given user is shared for a given object
|
||||
test performed by checking user against the objects shared table
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is shared for object, false otherwise
|
||||
@@ -91,15 +119,15 @@ def share_link_valid(recipe, share):
|
||||
"""
|
||||
try:
|
||||
CACHE_KEY = f'recipe_share_{recipe.pk}_{share}'
|
||||
if c := caches['default'].get(CACHE_KEY, False):
|
||||
if c := cache.get(CACHE_KEY, False):
|
||||
return c
|
||||
|
||||
if link := ShareLink.objects.filter(recipe=recipe, uuid=share, abuse_blocked=False).first():
|
||||
if 0 < settings.SHARING_LIMIT < link.request_count:
|
||||
if 0 < settings.SHARING_LIMIT < link.request_count and not link.space.no_sharing_limit:
|
||||
return False
|
||||
link.request_count += 1
|
||||
link.save()
|
||||
caches['default'].set(CACHE_KEY, True, timeout=3)
|
||||
cache.set(CACHE_KEY, True, timeout=3)
|
||||
return True
|
||||
return False
|
||||
except ValidationError:
|
||||
@@ -166,7 +194,7 @@ class OwnerRequiredMixin(object):
|
||||
|
||||
try:
|
||||
obj = self.get_object()
|
||||
if obj.get_space() != request.space:
|
||||
if not request.user.userspace.filter(space=obj.get_space()).exists():
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You do not have the required permissions to view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('index'))
|
||||
@@ -184,7 +212,7 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
verifies user has ownership over object
|
||||
(either user or created_by or user is request user)
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!') # noqa: E501
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated
|
||||
@@ -193,6 +221,28 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
return is_object_owner(request.user, obj)
|
||||
|
||||
|
||||
class CustomIsOwnerReadOnly(CustomIsOwner):
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return super().has_object_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
|
||||
class CustomIsSpaceOwner(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for django rest framework views
|
||||
verifies if the user is the owner of the space the object belongs to
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated and request.space.created_by == request.user
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return is_space_owner(request.user, obj)
|
||||
|
||||
|
||||
# TODO function duplicate/too similar name
|
||||
class CustomIsShared(permissions.BasePermission):
|
||||
"""
|
||||
@@ -205,9 +255,6 @@ class CustomIsShared(permissions.BasePermission):
|
||||
return request.user.is_authenticated
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
# # temporary hack to make old shopping list work with new shopping list
|
||||
# if obj.__class__.__name__ in ['ShoppingList', 'ShoppingListEntry']:
|
||||
# return is_object_shared(request.user, obj) or obj.created_by in list(request.user.get_shopping_share())
|
||||
return is_object_shared(request.user, obj)
|
||||
|
||||
|
||||
@@ -262,3 +309,134 @@ class CustomIsShare(permissions.BasePermission):
|
||||
if share:
|
||||
return share_link_valid(obj, share)
|
||||
return False
|
||||
|
||||
|
||||
class CustomRecipePermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for recipe api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a guest or a share link is given and the request is safe
|
||||
share = request.query_params.get('share', None)
|
||||
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS) or has_group_permission(
|
||||
request.user, ['user'])) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
share = request.query_params.get('share', None)
|
||||
if share:
|
||||
return share_link_valid(obj, share)
|
||||
else:
|
||||
if obj.private:
|
||||
return ((obj.created_by == request.user) or (request.user in obj.shared.all())) and obj.space == request.space
|
||||
else:
|
||||
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS)
|
||||
or has_group_permission(request.user, ['user'])) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for user api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # a space filtered user list is visible for everyone
|
||||
return has_group_permission(request.user, ['guest'])
|
||||
|
||||
def has_object_permission(self, request, view, obj): # object write permissions are only available for user
|
||||
if request.method in SAFE_METHODS and 'pk' in view.kwargs and has_group_permission(request.user, ['guest']) and request.space in obj.userspace_set.all():
|
||||
return True
|
||||
elif request.user == obj:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class CustomTokenHasScope(TokenHasScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if isinstance(request.auth, AccessToken):
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return request.user.is_authenticated
|
||||
|
||||
|
||||
class CustomTokenHasReadWriteScope(TokenHasReadWriteScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasReadWriteScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if isinstance(request.auth, AccessToken):
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def above_space_limit(space): # TODO add file storage limit
|
||||
"""
|
||||
Test if the space has reached any limit (e.g. max recipes, users, ..)
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal any limit else false, message)
|
||||
"""
|
||||
r_limit, r_msg = above_space_recipe_limit(space)
|
||||
u_limit, u_msg = above_space_user_limit(space)
|
||||
return r_limit or u_limit, (r_msg + ' ' + u_msg).strip()
|
||||
|
||||
|
||||
def above_space_recipe_limit(space):
|
||||
"""
|
||||
Test if a space has reached its recipe limit
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal limit else false, message)
|
||||
"""
|
||||
limit = space.max_recipes != 0 and Recipe.objects.filter(space=space).count() >= space.max_recipes
|
||||
if limit:
|
||||
return True, _('You have reached the maximum number of recipes for your space.')
|
||||
return False, ''
|
||||
|
||||
|
||||
def above_space_user_limit(space):
|
||||
"""
|
||||
Test if a space has reached its user limit
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal limit else false, message)
|
||||
"""
|
||||
limit = space.max_users != 0 and UserSpace.objects.filter(space=space).count() > space.max_users
|
||||
if limit:
|
||||
return True, _('You have more users than allowed in your space.')
|
||||
return False, ''
|
||||
|
||||
|
||||
def switch_user_active_space(user, space):
|
||||
"""
|
||||
Switch the currently active space of a user by setting all spaces to inactive and activating the one passed
|
||||
:param user: user to change active space for
|
||||
:param space: space to activate user for
|
||||
:return user space object or none if not found/no permission
|
||||
"""
|
||||
try:
|
||||
us = UserSpace.objects.get(space=space, user=user)
|
||||
if not us.active:
|
||||
UserSpace.objects.filter(user=user).update(active=False)
|
||||
us.active = True
|
||||
us.save()
|
||||
return us
|
||||
else:
|
||||
return us
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class IsReadOnlyDRF(permissions.BasePermission):
|
||||
message = 'You cannot interact with this object as it is not owned by you!'
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method in SAFE_METHODS
|
||||
|
||||
74
cookbook/helper/property_helper.py
Normal file
74
cookbook/helper/property_helper.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.helper.cache_helper import CacheHelper
|
||||
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
|
||||
from cookbook.models import PropertyType
|
||||
|
||||
|
||||
class FoodPropertyHelper:
|
||||
space = None
|
||||
|
||||
def __init__(self, space):
|
||||
"""
|
||||
Helper to perform food property calculations
|
||||
:param space: space to limit scope to
|
||||
"""
|
||||
self.space = space
|
||||
|
||||
def calculate_recipe_properties(self, recipe):
|
||||
"""
|
||||
Calculate all food properties for a given recipe.
|
||||
:param recipe: recipe to calculate properties for
|
||||
:return: dict of with property keys and total/food values for each property available
|
||||
"""
|
||||
ingredients = []
|
||||
computed_properties = {}
|
||||
|
||||
for s in recipe.steps.all():
|
||||
ingredients += s.ingredients.all()
|
||||
|
||||
property_types = caches['default'].get(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, None)
|
||||
|
||||
if not property_types:
|
||||
property_types = PropertyType.objects.filter(space=self.space).all()
|
||||
# cache is cleared on property type save signal so long duration is fine
|
||||
caches['default'].set(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, property_types, 60 * 60)
|
||||
|
||||
for fpt in property_types:
|
||||
computed_properties[fpt.id] = {'id': fpt.id, 'name': fpt.name, 'description': fpt.description,
|
||||
'unit': fpt.unit, 'order': fpt.order, 'food_values': {}, 'total_value': 0, 'missing_value': False}
|
||||
|
||||
uch = UnitConversionHelper(self.space)
|
||||
|
||||
for i in ingredients:
|
||||
if i.food is not None:
|
||||
conversions = uch.get_conversions(i)
|
||||
for pt in property_types:
|
||||
found_property = False
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
computed_properties[pt.id]['missing_value'] = i.food.properties_food_unit is None
|
||||
else:
|
||||
for p in i.food.properties.all():
|
||||
if p.property_type == pt:
|
||||
for c in conversions:
|
||||
if c.unit == i.food.properties_food_unit:
|
||||
found_property = True
|
||||
computed_properties[pt.id]['total_value'] += (c.amount / i.food.properties_food_amount) * p.property_amount
|
||||
computed_properties[pt.id]['food_values'] = self.add_or_create(
|
||||
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
|
||||
if not found_property:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
|
||||
return computed_properties
|
||||
|
||||
# small dict helper to add to existing key or create new, probably a better way of doing this
|
||||
# TODO move to central helper ?
|
||||
@staticmethod
|
||||
def add_or_create(d, key, value, food):
|
||||
if key in d:
|
||||
d[key]['value'] += value
|
||||
else:
|
||||
d[key] = {'id': food.id, 'food': food.name, 'value': value}
|
||||
return d
|
||||
@@ -1,194 +0,0 @@
|
||||
import json
|
||||
import re
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import unquote
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import Tag
|
||||
from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
|
||||
|
||||
def get_recipe_from_source(text, url, request):
|
||||
def build_node(k, v):
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
return node
|
||||
|
||||
def get_children_dict(children):
|
||||
kid_list = []
|
||||
for k, v in children.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
return kid_list
|
||||
|
||||
def get_children_list(children):
|
||||
kid_list = []
|
||||
for kid in children:
|
||||
if type(kid) == list:
|
||||
node = {
|
||||
'name': "unknown list",
|
||||
'value': "unknown list",
|
||||
'children': get_children_list(kid)
|
||||
}
|
||||
kid_list.append(node)
|
||||
elif type(kid) == dict:
|
||||
for k, v in kid.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
else:
|
||||
kid_list.append({
|
||||
'name': normalize_string(str(kid)),
|
||||
'value': normalize_string(str(kid))
|
||||
})
|
||||
return kid_list
|
||||
|
||||
recipe_json = {
|
||||
'name': '',
|
||||
'url': '',
|
||||
'description': '',
|
||||
'image': '',
|
||||
'keywords': [],
|
||||
'recipeIngredient': [],
|
||||
'recipeInstructions': '',
|
||||
'servings': '',
|
||||
'prepTime': '',
|
||||
'cookTime': ''
|
||||
}
|
||||
recipe_tree = []
|
||||
parse_list = []
|
||||
html_data = []
|
||||
images = []
|
||||
text = unquote(text)
|
||||
|
||||
try:
|
||||
parse_list.append(remove_graph(json.loads(text)))
|
||||
if not url and 'url' in parse_list[0]:
|
||||
url = parse_list[0]['url']
|
||||
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
|
||||
except JSONDecodeError:
|
||||
soup = BeautifulSoup(text, "html.parser")
|
||||
html_data = get_from_html(soup)
|
||||
images += get_images_from_source(soup, url)
|
||||
for el in soup.find_all('script', type='application/ld+json'):
|
||||
el = remove_graph(el)
|
||||
if not url and 'url' in el:
|
||||
url = el['url']
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
for el in soup.find_all(type='application/json'):
|
||||
el = remove_graph(el)
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
|
||||
recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
for el in parse_list:
|
||||
temp_tree = []
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
for k, v in el.items():
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
temp_tree.append(node)
|
||||
|
||||
if '@type' in el and el['@type'] == 'Recipe':
|
||||
recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
else:
|
||||
recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
|
||||
return recipe_json, recipe_tree, html_data, images
|
||||
|
||||
|
||||
def get_from_html(soup):
|
||||
INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
html = []
|
||||
for s in soup.strings:
|
||||
if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
html.append(s)
|
||||
return html
|
||||
|
||||
|
||||
def get_images_from_source(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
|
||||
|
||||
def remove_graph(el):
|
||||
# recipes type might be wrapped in @graph type
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
if '@graph' in el:
|
||||
for x in el['@graph']:
|
||||
if '@type' in x and x['@type'] == 'Recipe':
|
||||
el = x
|
||||
except (TypeError, JSONDecodeError):
|
||||
pass
|
||||
return el
|
||||
@@ -1,46 +1,54 @@
|
||||
import json
|
||||
from collections import Counter
|
||||
from datetime import date, timedelta
|
||||
|
||||
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity
|
||||
from django.core.cache import caches
|
||||
from django.db.models import Avg, Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Avg, Case, Count, Exists, F, Max, OuterRef, Q, Subquery, Value, When
|
||||
from django.db.models.functions import Coalesce, Lower, Substr
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.filters import RecipeFilter
|
||||
from cookbook.helper.HelperFunctions import Round, str2bool
|
||||
from cookbook.helper.permission_helper import has_group_permission
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, RecipeBook, SearchFields,
|
||||
from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, SearchFields,
|
||||
SearchPreference, ViewLog)
|
||||
from recipes import settings
|
||||
|
||||
|
||||
# TODO create extensive tests to make sure ORs ANDs and various filters, sorting, etc work as expected
|
||||
# TODO consider creating a simpleListRecipe API that only includes minimum of recipe info and minimal filtering
|
||||
class RecipeSearch():
|
||||
_postgres = settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']
|
||||
_postgres = settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql'
|
||||
|
||||
def __init__(self, request, **params):
|
||||
def __init__(self, request, **params):
|
||||
self._request = request
|
||||
self._queryset = None
|
||||
if f := params.get('filter', None):
|
||||
custom_filter = CustomFilter.objects.filter(id=f, space=self._request.space).filter(Q(created_by=self._request.user) |
|
||||
Q(shared=self._request.user) | Q(recipebook__shared=self._request.user)).first()
|
||||
custom_filter = (
|
||||
CustomFilter.objects.filter(id=f, space=self._request.space)
|
||||
.filter(Q(created_by=self._request.user) | Q(shared=self._request.user) | Q(recipebook__shared=self._request.user))
|
||||
.first()
|
||||
)
|
||||
if custom_filter:
|
||||
self._params = {**json.loads(custom_filter.search)}
|
||||
self._original_params = {**(params or {})}
|
||||
# json.loads casts rating as an integer, expecting string
|
||||
if isinstance(self._params.get('rating', None), int):
|
||||
self._params['rating'] = str(self._params['rating'])
|
||||
else:
|
||||
self._params = {**(params or {})}
|
||||
else:
|
||||
self._params = {**(params or {})}
|
||||
if self._request.user.is_authenticated:
|
||||
self._search_prefs = request.user.searchpreference
|
||||
CACHE_KEY = f'search_pref_{request.user.id}'
|
||||
cached_result = cache.get(CACHE_KEY, default=None)
|
||||
if cached_result is not None:
|
||||
self._search_prefs = cached_result
|
||||
else:
|
||||
self._search_prefs = request.user.searchpreference
|
||||
cache.set(CACHE_KEY, self._search_prefs, timeout=10)
|
||||
else:
|
||||
self._search_prefs = SearchPreference()
|
||||
self._string = self._params.get('query').strip() if self._params.get('query', None) else None
|
||||
self._string = self._params.get('query').strip(
|
||||
) if self._params.get('query', None) else None
|
||||
self._rating = self._params.get('rating', None)
|
||||
self._keywords = {
|
||||
'or': self._params.get('keywords_or', None) or self._params.get('keywords', None),
|
||||
@@ -69,7 +77,8 @@ class RecipeSearch():
|
||||
self._random = str2bool(self._params.get('random', False))
|
||||
self._new = str2bool(self._params.get('new', False))
|
||||
self._num_recent = int(self._params.get('num_recent', 0))
|
||||
self._include_children = str2bool(self._params.get('include_children', None))
|
||||
self._include_children = str2bool(
|
||||
self._params.get('include_children', None))
|
||||
self._timescooked = self._params.get('timescooked', None)
|
||||
self._cookedon = self._params.get('cookedon', None)
|
||||
self._createdon = self._params.get('createdon', None)
|
||||
@@ -77,9 +86,9 @@ class RecipeSearch():
|
||||
self._viewedon = self._params.get('viewedon', None)
|
||||
self._makenow = self._params.get('makenow', None)
|
||||
# this supports hidden feature to find recipes missing X ingredients
|
||||
if type(self._makenow) == bool and self._makenow == True:
|
||||
if isinstance(self._makenow, bool) and self._makenow == True:
|
||||
self._makenow = 0
|
||||
elif type(self._makenow) == str and self._makenow in ["yes", "true"]:
|
||||
elif isinstance(self._makenow, str) and self._makenow in ["yes", "true"]:
|
||||
self._makenow = 0
|
||||
else:
|
||||
try:
|
||||
@@ -112,19 +121,20 @@ class RecipeSearch():
|
||||
)
|
||||
self.search_rank = None
|
||||
self.orderby = []
|
||||
self._default_sort = ['-favorite'] # TODO add user setting
|
||||
self._filters = None
|
||||
self._fuzzy_match = None
|
||||
|
||||
def get_queryset(self, queryset):
|
||||
self._queryset = queryset
|
||||
self._queryset = self._queryset.prefetch_related('keywords')
|
||||
|
||||
self._build_sort_order()
|
||||
self._recently_viewed(num_recent=self._num_recent)
|
||||
self._cooked_on_filter(cooked_date=self._cookedon)
|
||||
self._created_on_filter(created_date=self._createdon)
|
||||
self._updated_on_filter(updated_date=self._updatedon)
|
||||
self._viewed_on_filter(viewed_date=self._viewedon)
|
||||
self._favorite_recipes(timescooked=self._timescooked)
|
||||
self._favorite_recipes(times_cooked=self._timescooked)
|
||||
self._new_recipes()
|
||||
self.keyword_filters(**self._keywords)
|
||||
self.food_filters(**self._foods)
|
||||
@@ -135,7 +145,7 @@ class RecipeSearch():
|
||||
self.unit_filters(units=self._units)
|
||||
self._makenow_filter(missing=self._makenow)
|
||||
self.string_filters(string=self._string)
|
||||
return self._queryset.filter(space=self._request.space).distinct().order_by(*self.orderby)
|
||||
return self._queryset.filter(space=self._request.space).order_by(*self.orderby)
|
||||
|
||||
def _sort_includes(self, *args):
|
||||
for x in args:
|
||||
@@ -147,11 +157,11 @@ class RecipeSearch():
|
||||
|
||||
def _build_sort_order(self):
|
||||
if self._random:
|
||||
self._queryset = self._queryset.order_by("?")
|
||||
self.orderby = ['?']
|
||||
else:
|
||||
order = []
|
||||
# TODO add userpreference for default sort order and replace '-favorite'
|
||||
default_order = ['-favorite']
|
||||
default_order = ['name']
|
||||
# recent and new_recipe are always first; they float a few recipes to the top
|
||||
if self._num_recent:
|
||||
order += ['-recent']
|
||||
@@ -160,7 +170,6 @@ class RecipeSearch():
|
||||
|
||||
# if a sort order is provided by user - use that order
|
||||
if self._sort_order:
|
||||
|
||||
if not isinstance(self._sort_order, list):
|
||||
order += [self._sort_order]
|
||||
else:
|
||||
@@ -176,8 +185,10 @@ class RecipeSearch():
|
||||
# otherwise sort by the remaining order_by attributes or favorite by default
|
||||
else:
|
||||
order += default_order
|
||||
order[:] = [Lower('name').asc() if x == 'name' else x for x in order]
|
||||
order[:] = [Lower('name').desc() if x == '-name' else x for x in order]
|
||||
order[:] = [Lower('name').asc() if x ==
|
||||
'name' else x for x in order]
|
||||
order[:] = [Lower('name').desc() if x ==
|
||||
'-name' else x for x in order]
|
||||
self.orderby = order
|
||||
|
||||
def string_filters(self, string=None):
|
||||
@@ -194,7 +205,8 @@ class RecipeSearch():
|
||||
for f in self._filters:
|
||||
query_filter |= f
|
||||
|
||||
self._queryset = self._queryset.filter(query_filter).distinct() # this creates duplicate records which can screw up other aggregates, see makenow for workaround
|
||||
# this creates duplicate records which can screw up other aggregates, see makenow for workaround
|
||||
self._queryset = self._queryset.filter(query_filter).distinct()
|
||||
if self._fulltext_include:
|
||||
if self._fuzzy_match is None:
|
||||
self._queryset = self._queryset.annotate(score=Coalesce(Max(self.search_rank), 0.0))
|
||||
@@ -208,7 +220,7 @@ class RecipeSearch():
|
||||
else:
|
||||
self._queryset = self._queryset.annotate(simularity=Coalesce(Subquery(simularity), 0.0))
|
||||
if self._sort_includes('score') and self._fulltext_include and self._fuzzy_match is not None:
|
||||
self._queryset = self._queryset.annotate(score=F('rank')+F('simularity'))
|
||||
self._queryset = self._queryset.annotate(score=F('rank') + F('simularity'))
|
||||
else:
|
||||
query_filter = Q()
|
||||
for f in [x + '__unaccent__iexact' if x in self._unaccent_include else x + '__iexact' for x in SearchFields.objects.all().values_list('field', flat=True)]:
|
||||
@@ -222,12 +234,13 @@ class RecipeSearch():
|
||||
default = timezone.now() - timedelta(days=100000)
|
||||
else:
|
||||
default = timezone.now()
|
||||
self._queryset = self._queryset.annotate(lastcooked=Coalesce(
|
||||
Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default)))
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastcooked=Coalesce(Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default))
|
||||
)
|
||||
if cooked_date is None:
|
||||
return
|
||||
|
||||
cooked_date = date(*[int(x) for x in cooked_date.split('-') if x != ''])
|
||||
cooked_date = date(*[int(x)for x in cooked_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__lte=cooked_date).exclude(lastcooked=default)
|
||||
@@ -248,7 +261,7 @@ class RecipeSearch():
|
||||
if updated_date is None:
|
||||
return
|
||||
lessthan = '-' in updated_date[:1]
|
||||
updated_date = date(*[int(x) for x in updated_date.split('-') if x != ''])
|
||||
updated_date = date(*[int(x)for x in updated_date.split('-') if x != ''])
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(updated_at__date__lte=updated_date)
|
||||
else:
|
||||
@@ -257,12 +270,13 @@ class RecipeSearch():
|
||||
def _viewed_on_filter(self, viewed_date=None):
|
||||
if self._sort_includes('lastviewed') or viewed_date:
|
||||
longTimeAgo = timezone.now() - timedelta(days=100000)
|
||||
self._queryset = self._queryset.annotate(lastviewed=Coalesce(
|
||||
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo)))
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastviewed=Coalesce(Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo))
|
||||
)
|
||||
if viewed_date is None:
|
||||
return
|
||||
lessthan = '-' in viewed_date[:1]
|
||||
viewed_date = date(*[int(x) for x in viewed_date.split('-') if x != ''])
|
||||
viewed_date = date(*[int(x)for x in viewed_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__lte=viewed_date).exclude(lastviewed=longTimeAgo)
|
||||
@@ -273,9 +287,11 @@ class RecipeSearch():
|
||||
# TODO make new days a user-setting
|
||||
if not self._new:
|
||||
return
|
||||
self._queryset = (
|
||||
self._queryset.annotate(new_recipe=Case(
|
||||
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')), default=Value(0), ))
|
||||
self._queryset = self._queryset.annotate(
|
||||
new_recipe=Case(
|
||||
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')),
|
||||
default=Value(0),
|
||||
)
|
||||
)
|
||||
|
||||
def _recently_viewed(self, num_recent=None):
|
||||
@@ -285,29 +301,35 @@ class RecipeSearch():
|
||||
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__pk'))), Value(0)))
|
||||
return
|
||||
|
||||
num_recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space).values(
|
||||
'recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
|
||||
num_recent_recipes = (
|
||||
ViewLog.objects.filter(created_by=self._request.user, space=self._request.space)
|
||||
.values('recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
|
||||
)
|
||||
self._queryset = self._queryset.annotate(recent=Coalesce(Max(Case(When(pk__in=num_recent_recipes.values('recipe'), then='viewlog__pk'))), Value(0)))
|
||||
|
||||
def _favorite_recipes(self, timescooked=None):
|
||||
if self._sort_includes('favorite') or timescooked:
|
||||
lessthan = '-' in (timescooked or []) or not self._sort_includes('-favorite')
|
||||
if lessthan:
|
||||
def _favorite_recipes(self, times_cooked=None):
|
||||
if self._sort_includes('favorite') or times_cooked:
|
||||
less_than = '-' in (times_cooked or []) and not self._sort_includes('-favorite')
|
||||
if less_than:
|
||||
default = 1000
|
||||
else:
|
||||
default = 0
|
||||
favorite_recipes = CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk')
|
||||
).values('recipe').annotate(count=Count('pk', distinct=True)).values('count')
|
||||
favorite_recipes = (
|
||||
CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk'))
|
||||
.values('recipe')
|
||||
.annotate(count=Count('pk', distinct=True))
|
||||
.values('count')
|
||||
)
|
||||
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
|
||||
if timescooked is None:
|
||||
if times_cooked is None:
|
||||
return
|
||||
|
||||
if timescooked == '0':
|
||||
if times_cooked == '0':
|
||||
self._queryset = self._queryset.filter(favorite=0)
|
||||
elif lessthan:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(timescooked[1:])).exclude(favorite=0)
|
||||
elif less_than:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked.replace('-', ''))).exclude(favorite=0)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(timescooked))
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(times_cooked))
|
||||
|
||||
def keyword_filters(self, **kwargs):
|
||||
if all([kwargs[x] is None for x in kwargs]):
|
||||
@@ -387,8 +409,9 @@ class RecipeSearch():
|
||||
|
||||
def rating_filter(self, rating=None):
|
||||
if rating or self._sort_includes('rating'):
|
||||
lessthan = self._sort_includes('-rating') or '-' in (rating or [])
|
||||
if lessthan:
|
||||
lessthan = '-' in (rating or [])
|
||||
reverse = 'rating' in (self._sort_order or []) and '-rating' not in (self._sort_order or [])
|
||||
if lessthan or reverse:
|
||||
default = 100
|
||||
else:
|
||||
default = 0
|
||||
@@ -440,7 +463,7 @@ class RecipeSearch():
|
||||
if not steps:
|
||||
return
|
||||
if not isinstance(steps, list):
|
||||
steps = [unistepsts]
|
||||
steps = [steps]
|
||||
self._queryset = self._queryset.filter(steps__id__in=steps)
|
||||
|
||||
def build_fulltext_filters(self, string=None):
|
||||
@@ -497,269 +520,62 @@ class RecipeSearch():
|
||||
trigram += TrigramSimilarity(f, self._string)
|
||||
else:
|
||||
trigram = TrigramSimilarity(f, self._string)
|
||||
self._fuzzy_match = Recipe.objects.annotate(trigram=trigram).distinct(
|
||||
).annotate(simularity=Max('trigram')).values('id', 'simularity').filter(simularity__gt=self._search_prefs.trigram_threshold)
|
||||
self._fuzzy_match = (
|
||||
Recipe.objects.annotate(trigram=trigram)
|
||||
.distinct()
|
||||
.annotate(simularity=Max('trigram'))
|
||||
.values('id', 'simularity')
|
||||
.filter(simularity__gt=self._search_prefs.trigram_threshold)
|
||||
)
|
||||
self._filters += [Q(pk__in=self._fuzzy_match.values('pk'))]
|
||||
|
||||
def _makenow_filter(self, missing=None):
|
||||
if missing is None or (type(missing) == bool and missing == False):
|
||||
if missing is None or (isinstance(missing, bool) and missing == False):
|
||||
return
|
||||
shopping_users = [*self._request.user.get_shopping_share(), self._request.user]
|
||||
|
||||
onhand_filter = (
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users) # or substitute food onhand
|
||||
# or substitute food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users)
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
)
|
||||
makenow_recipes = Recipe.objects.annotate(
|
||||
count_food=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__isnull=False), distinct=True),
|
||||
count_onhand=Count('steps__ingredients__food__pk', filter=onhand_filter, distinct=True),
|
||||
count_ignore_shopping=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True,
|
||||
steps__ingredients__food__recipe__isnull=True), distinct=True),
|
||||
count_ignore_shopping=Count(
|
||||
'steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True, steps__ingredients__food__recipe__isnull=True), distinct=True
|
||||
),
|
||||
has_child_sub=Case(When(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users), then=Value(1)), default=Value(0)),
|
||||
has_sibling_sub=Case(When(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users), then=Value(1)), default=Value(0))
|
||||
).annotate(missingfood=F('count_food')-F('count_onhand')-F('count_ignore_shopping')).filter(missingfood=missing)
|
||||
).annotate(missingfood=F('count_food') - F('count_onhand') - F('count_ignore_shopping')).filter(missingfood__lte=missing)
|
||||
self._queryset = self._queryset.distinct().filter(id__in=makenow_recipes.values('id'))
|
||||
|
||||
@ staticmethod
|
||||
@staticmethod
|
||||
def __children_substitute_filter(shopping_users=None):
|
||||
children_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen*OuterRef('depth')),
|
||||
depth__gt=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
).annotate(child_onhand=Coalesce(Func('pk', function='Count'), 0)).values('child_onhand')
|
||||
return Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0).filter(substitute_children=True
|
||||
).annotate(child_onhand=Coalesce(Subquery(children_onhand_subquery), 0)).exclude(child_onhand=0)
|
||||
|
||||
@ staticmethod
|
||||
def __sibling_substitute_filter(shopping_users=None):
|
||||
sibling_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen*(OuterRef('depth')-1)),
|
||||
depth=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
).annotate(sibling_onhand=Coalesce(Func('pk', function='Count'), 0)).values('sibling_onhand')
|
||||
return Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0).filter(substitute_siblings=True
|
||||
).annotate(sibling_onhand=Coalesce(Subquery(sibling_onhand_subquery), 0)).exclude(sibling_onhand=0)
|
||||
|
||||
|
||||
class RecipeFacet():
|
||||
class CacheEmpty(Exception):
|
||||
pass
|
||||
|
||||
def __init__(self, request, queryset=None, hash_key=None, cache_timeout=3600):
|
||||
if hash_key is None and queryset is None:
|
||||
raise ValueError(_("One of queryset or hash_key must be provided"))
|
||||
|
||||
self._request = request
|
||||
self._queryset = queryset
|
||||
self.hash_key = hash_key or str(hash(frozenset(self._queryset.values_list('pk'))))
|
||||
self._SEARCH_CACHE_KEY = f"recipes_filter_{self.hash_key}"
|
||||
self._cache_timeout = cache_timeout
|
||||
self._cache = caches['default'].get(self._SEARCH_CACHE_KEY, {})
|
||||
if self._cache is None and self._queryset is None:
|
||||
raise self.CacheEmpty("No queryset provided and cache empty")
|
||||
|
||||
self.Keywords = self._cache.get('Keywords', None)
|
||||
self.Foods = self._cache.get('Foods', None)
|
||||
self.Books = self._cache.get('Books', None)
|
||||
self.Ratings = self._cache.get('Ratings', None)
|
||||
# TODO Move Recent to recipe annotation/serializer: requrires change in RecipeSearch(), RecipeSearchView.vue and serializer
|
||||
self.Recent = self._cache.get('Recent', None)
|
||||
|
||||
if self._queryset is not None:
|
||||
self._recipe_list = list(self._queryset.values_list('id', flat=True))
|
||||
self._search_params = {
|
||||
'keyword_list': self._request.query_params.getlist('keywords', []),
|
||||
'food_list': self._request.query_params.getlist('foods', []),
|
||||
'book_list': self._request.query_params.getlist('book', []),
|
||||
'search_keywords_or': str2bool(self._request.query_params.get('keywords_or', True)),
|
||||
'search_foods_or': str2bool(self._request.query_params.get('foods_or', True)),
|
||||
'search_books_or': str2bool(self._request.query_params.get('books_or', True)),
|
||||
'space': self._request.space,
|
||||
}
|
||||
elif self.hash_key is not None:
|
||||
self._recipe_list = self._cache.get('recipe_list', [])
|
||||
self._search_params = {
|
||||
'keyword_list': self._cache.get('keyword_list', None),
|
||||
'food_list': self._cache.get('food_list', None),
|
||||
'book_list': self._cache.get('book_list', None),
|
||||
'search_keywords_or': self._cache.get('search_keywords_or', None),
|
||||
'search_foods_or': self._cache.get('search_foods_or', None),
|
||||
'search_books_or': self._cache.get('search_books_or', None),
|
||||
'space': self._cache.get('space', None),
|
||||
}
|
||||
|
||||
self._cache = {
|
||||
**self._search_params,
|
||||
'recipe_list': self._recipe_list,
|
||||
'Ratings': self.Ratings,
|
||||
'Recent': self.Recent,
|
||||
'Keywords': self.Keywords,
|
||||
'Foods': self.Foods,
|
||||
'Books': self.Books
|
||||
|
||||
}
|
||||
caches['default'].set(self._SEARCH_CACHE_KEY, self._cache, self._cache_timeout)
|
||||
|
||||
def get_facets(self, from_cache=False):
|
||||
if from_cache:
|
||||
return {
|
||||
'cache_key': self.hash_key or '',
|
||||
'Ratings': self.Ratings or {},
|
||||
'Recent': self.Recent or [],
|
||||
'Keywords': self.Keywords or [],
|
||||
'Foods': self.Foods or [],
|
||||
'Books': self.Books or []
|
||||
}
|
||||
return {
|
||||
'cache_key': self.hash_key,
|
||||
'Ratings': self.get_ratings(),
|
||||
'Recent': self.get_recent(),
|
||||
'Keywords': self.get_keywords(),
|
||||
'Foods': self.get_foods(),
|
||||
'Books': self.get_books()
|
||||
}
|
||||
|
||||
def set_cache(self, key, value):
|
||||
self._cache = {**self._cache, key: value}
|
||||
caches['default'].set(
|
||||
self._SEARCH_CACHE_KEY,
|
||||
self._cache,
|
||||
self._cache_timeout
|
||||
children_onhand_subquery = Food.objects.filter(path__startswith=OuterRef('path'), depth__gt=OuterRef('depth'), onhand_users__in=shopping_users)
|
||||
return (
|
||||
Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
|
||||
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
|
||||
)
|
||||
.exclude(depth=1, numchild=0)
|
||||
.filter(substitute_children=True)
|
||||
.annotate(child_onhand_count=Exists(children_onhand_subquery))
|
||||
.filter(child_onhand_count=True)
|
||||
)
|
||||
|
||||
def get_books(self):
|
||||
if self.Books is None:
|
||||
self.Books = []
|
||||
return self.Books
|
||||
|
||||
def get_keywords(self):
|
||||
if self.Keywords is None:
|
||||
if self._search_params['search_keywords_or']:
|
||||
keywords = Keyword.objects.filter(space=self._request.space).distinct()
|
||||
else:
|
||||
keywords = Keyword.objects.filter(Q(recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
|
||||
|
||||
# set keywords to root objects only
|
||||
keywords = self._keyword_queryset(keywords)
|
||||
self.Keywords = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
|
||||
self.set_cache('Keywords', self.Keywords)
|
||||
return self.Keywords
|
||||
|
||||
def get_foods(self):
|
||||
if self.Foods is None:
|
||||
# # if using an OR search, will annotate all keywords, otherwise, just those that appear in results
|
||||
if self._search_params['search_foods_or']:
|
||||
foods = Food.objects.filter(space=self._request.space).distinct()
|
||||
else:
|
||||
foods = Food.objects.filter(Q(ingredient__step__recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
|
||||
|
||||
# set keywords to root objects only
|
||||
foods = self._food_queryset(foods)
|
||||
|
||||
self.Foods = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
|
||||
self.set_cache('Foods', self.Foods)
|
||||
return self.Foods
|
||||
|
||||
def get_books(self):
|
||||
if self.Books is None:
|
||||
self.Books = []
|
||||
return self.Books
|
||||
|
||||
def get_ratings(self):
|
||||
if self.Ratings is None:
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
if self._queryset is None:
|
||||
self._queryset = Recipe.objects.filter(id__in=self._recipe_list)
|
||||
rating_qs = self._queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=self._request.user, then='cooklog__rating'), default=Value(0)))))
|
||||
self.Ratings = dict(Counter(r.rating for r in rating_qs))
|
||||
else:
|
||||
self.Rating = {}
|
||||
self.set_cache('Ratings', self.Ratings)
|
||||
return self.Ratings
|
||||
|
||||
def get_recent(self):
|
||||
if self.Recent is None:
|
||||
# TODO make days of recent recipe a setting
|
||||
recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space, created_at__gte=timezone.now() - timedelta(days=14)
|
||||
).values_list('recipe__pk', flat=True)
|
||||
self.Recent = list(recent_recipes)
|
||||
self.set_cache('Recent', self.Recent)
|
||||
return self.Recent
|
||||
|
||||
def add_food_children(self, id):
|
||||
try:
|
||||
food = Food.objects.get(id=id)
|
||||
nodes = food.get_ancestors()
|
||||
except Food.DoesNotExist:
|
||||
return self.get_facets()
|
||||
foods = self._food_queryset(food.get_children(), food)
|
||||
deep_search = self.Foods
|
||||
for node in nodes:
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
|
||||
deep_search = deep_search[index]['children']
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == food.id), None)
|
||||
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
|
||||
self.set_cache('Foods', self.Foods)
|
||||
return self.get_facets()
|
||||
|
||||
def add_keyword_children(self, id):
|
||||
try:
|
||||
keyword = Keyword.objects.get(id=id)
|
||||
nodes = keyword.get_ancestors()
|
||||
except Keyword.DoesNotExist:
|
||||
return self.get_facets()
|
||||
keywords = self._keyword_queryset(keyword.get_children(), keyword)
|
||||
deep_search = self.Keywords
|
||||
for node in nodes:
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
|
||||
deep_search = deep_search[index]['children']
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == keyword.id), None)
|
||||
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
|
||||
self.set_cache('Keywords', self.Keywords)
|
||||
return self.get_facets()
|
||||
|
||||
def _recipe_count_queryset(self, field, depth=1, steplen=4):
|
||||
return Recipe.objects.filter(**{f'{field}__path__startswith': OuterRef('path'), f'{field}__depth__gte': depth}, id__in=self._recipe_list, space=self._request.space
|
||||
).annotate(count=Coalesce(Func('pk', function='Count'), 0)).values('count')
|
||||
|
||||
def _keyword_queryset(self, queryset, keyword=None):
|
||||
depth = getattr(keyword, 'depth', 0) + 1
|
||||
steplen = depth * Keyword.steplen
|
||||
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('keywords', depth, steplen)), 0)
|
||||
).filter(depth=depth, count__gt=0
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
def _food_queryset(self, queryset, food=None):
|
||||
depth = getattr(food, 'depth', 0) + 1
|
||||
steplen = depth * Food.steplen
|
||||
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('steps__ingredients__food', depth, steplen)), 0)
|
||||
).filter(depth__lte=depth, count__gt=0
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth__lte=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
|
||||
def old_search(request):
|
||||
if has_group_permission(request.user, ('guest',)):
|
||||
params = dict(request.GET)
|
||||
params['internal'] = None
|
||||
f = RecipeFilter(params,
|
||||
queryset=Recipe.objects.filter(space=request.user.userpreference.space).all().order_by(Lower('name').asc()),
|
||||
space=request.space)
|
||||
return f.qs
|
||||
@staticmethod
|
||||
def __sibling_substitute_filter(shopping_users=None):
|
||||
sibling_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)), depth=OuterRef('depth'), onhand_users__in=shopping_users
|
||||
)
|
||||
return (
|
||||
Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
|
||||
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
|
||||
)
|
||||
.exclude(depth=1, numchild=0)
|
||||
.filter(substitute_siblings=True)
|
||||
.annotate(sibling_onhand=Exists(sibling_onhand_subquery))
|
||||
.filter(sibling_onhand=True)
|
||||
)
|
||||
|
||||
@@ -1,24 +1,47 @@
|
||||
import random
|
||||
import re
|
||||
import traceback
|
||||
from html import unescape
|
||||
from unicodedata import decomposition
|
||||
|
||||
from django.utils.dateparse import parse_duration
|
||||
from django.utils.translation import gettext as _
|
||||
from isodate import parse_duration as iso_parse_duration
|
||||
from isodate.isoerror import ISO8601Error
|
||||
from recipe_scrapers._utils import get_minutes
|
||||
from pytube import YouTube
|
||||
from recipe_scrapers._utils import get_host_name, get_minutes
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.automation_helper import AutomationEngine
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.models import Keyword
|
||||
from cookbook.models import Automation, Keyword, PropertyType
|
||||
|
||||
|
||||
def get_from_scraper(scrape, request):
|
||||
# converting the scrape_me object to the existing json format based on ld+json
|
||||
recipe_json = {}
|
||||
|
||||
recipe_json = {
|
||||
'steps': [],
|
||||
'internal': True
|
||||
}
|
||||
keywords = []
|
||||
|
||||
# assign source URL
|
||||
try:
|
||||
recipe_json['name'] = parse_name(scrape.title() or None)
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
except Exception:
|
||||
recipe_json['source_url'] = ''
|
||||
|
||||
automation_engine = AutomationEngine(request, source=recipe_json.get('source_url'))
|
||||
# assign recipe name
|
||||
try:
|
||||
recipe_json['name'] = parse_name(scrape.title()[:128] or None)
|
||||
except Exception:
|
||||
recipe_json['name'] = None
|
||||
if not recipe_json['name']:
|
||||
@@ -27,8 +50,15 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
recipe_json['name'] = ''
|
||||
|
||||
if isinstance(recipe_json['name'], list) and len(recipe_json['name']) > 0:
|
||||
recipe_json['name'] = recipe_json['name'][0]
|
||||
|
||||
recipe_json['name'] = automation_engine.apply_regex_replace_automation(recipe_json['name'], Automation.NAME_REPLACE)
|
||||
|
||||
# assign recipe description
|
||||
# TODO notify user about limit if reached - >256 description will be truncated
|
||||
try:
|
||||
description = scrape.description() or None
|
||||
description = scrape.description() or None
|
||||
except Exception:
|
||||
description = None
|
||||
if not description:
|
||||
@@ -38,47 +68,44 @@ def get_from_scraper(scrape, request):
|
||||
description = ''
|
||||
|
||||
recipe_json['description'] = parse_description(description)
|
||||
recipe_json['description'] = automation_engine.apply_regex_replace_automation(recipe_json['description'], Automation.DESCRIPTION_REPLACE)
|
||||
|
||||
# assign servings attributes
|
||||
try:
|
||||
servings = scrape.yields() or None
|
||||
# dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
except Exception:
|
||||
servings = None
|
||||
if not servings:
|
||||
try:
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
except Exception:
|
||||
servings = 1
|
||||
if type(servings) != int:
|
||||
try:
|
||||
servings = int(re.findall(r'\b\d+\b', servings)[0])
|
||||
except Exception:
|
||||
servings = 1
|
||||
recipe_json['servings'] = max(servings, 1)
|
||||
servings = 1
|
||||
|
||||
recipe_json['servings'] = parse_servings(servings)
|
||||
recipe_json['servings_text'] = parse_servings_text(servings)
|
||||
|
||||
# assign time attributes
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.prep_time()) or 0
|
||||
recipe_json['working_time'] = get_minutes(scrape.prep_time()) or 0
|
||||
except Exception:
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.schema.data.get("prepTime")) or 0
|
||||
recipe_json['working_time'] = get_minutes(scrape.schema.data.get("prepTime")) or 0
|
||||
except Exception:
|
||||
recipe_json['prepTime'] = 0
|
||||
recipe_json['working_time'] = 0
|
||||
try:
|
||||
recipe_json['cookTime'] = get_minutes(scrape.cook_time()) or 0
|
||||
recipe_json['waiting_time'] = get_minutes(scrape.cook_time()) or 0
|
||||
except Exception:
|
||||
try:
|
||||
recipe_json['cookTime'] = get_minutes(scrape.schema.data.get("cookTime")) or 0
|
||||
recipe_json['waiting_time'] = get_minutes(scrape.schema.data.get("cookTime")) or 0
|
||||
except Exception:
|
||||
recipe_json['cookTime'] = 0
|
||||
recipe_json['waiting_time'] = 0
|
||||
|
||||
if recipe_json['cookTime'] + recipe_json['prepTime'] == 0:
|
||||
if recipe_json['working_time'] + recipe_json['waiting_time'] == 0:
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.total_time()) or 0
|
||||
recipe_json['working_time'] = get_minutes(scrape.total_time()) or 0
|
||||
except Exception:
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.schema.data.get("totalTime")) or 0
|
||||
recipe_json['working_time'] = get_minutes(scrape.schema.data.get("totalTime")) or 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# assign image
|
||||
try:
|
||||
recipe_json['image'] = parse_image(scrape.image()) or None
|
||||
except Exception:
|
||||
@@ -89,7 +116,7 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
recipe_json['image'] = ''
|
||||
|
||||
keywords = []
|
||||
# assign keywords
|
||||
try:
|
||||
if scrape.schema.data.get("keywords"):
|
||||
keywords += listify_keywords(scrape.schema.data.get("keywords"))
|
||||
@@ -113,66 +140,141 @@ def get_from_scraper(scrape, request):
|
||||
keywords += listify_keywords(scrape.schema.data.get("recipeCuisine"))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
|
||||
if scrape.author():
|
||||
keywords.append(scrape.author())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request)
|
||||
except AttributeError:
|
||||
recipe_json['keywords'] = keywords
|
||||
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
|
||||
ingredients = []
|
||||
# assign steps
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], 'show_ingredients_table': request.user.userpreference.show_step_ingredients, })
|
||||
except Exception:
|
||||
pass
|
||||
if len(recipe_json['steps']) == 0:
|
||||
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
|
||||
|
||||
if len(recipe_json['description']) > 256: # split at 256 as long descriptions don't look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f"*{recipe_json['description']}* \n\n" + recipe_json['steps'][0]['instruction']
|
||||
else:
|
||||
recipe_json['description'] = recipe_json['description'][:512]
|
||||
|
||||
try:
|
||||
for x in scrape.ingredients():
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredients.append(
|
||||
{
|
||||
if x.strip() != '':
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredient = {
|
||||
'amount': amount,
|
||||
'unit': {
|
||||
'text': unit,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': ingredient,
|
||||
'id': random.randrange(10000, 99999)
|
||||
'food': {
|
||||
'name': ingredient,
|
||||
},
|
||||
'unit': None,
|
||||
'note': note,
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
ingredients.append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': {
|
||||
'text': '',
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': x,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': '',
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
recipe_json['recipeIngredient'] = ingredients
|
||||
if unit:
|
||||
ingredient['unit'] = {'name': unit, }
|
||||
recipe_json['steps'][0]['ingredients'].append(ingredient)
|
||||
except Exception:
|
||||
recipe_json['steps'][0]['ingredients'].append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': None,
|
||||
'food': {
|
||||
'name': x,
|
||||
},
|
||||
'note': '',
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
recipe_json['recipeIngredient'] = ingredients
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['recipeInstructions'] = parse_instructions(scrape.instructions())
|
||||
recipe_json['properties'] = get_recipe_properties(request.space, scrape.schema.nutrients())
|
||||
print(recipe_json['properties'])
|
||||
except Exception:
|
||||
recipe_json['recipeInstructions'] = ""
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
for s in recipe_json['steps']:
|
||||
s['instruction'] = automation_engine.apply_regex_replace_automation(s['instruction'], Automation.INSTRUCTION_REPLACE)
|
||||
# re.sub(a.param_2, a.param_3, s['instruction'])
|
||||
|
||||
if scrape.canonical_url():
|
||||
recipe_json['url'] = scrape.canonical_url()
|
||||
recipe_json['recipeInstructions'] += "\n\n" + _("Imported from") + ": " + scrape.canonical_url()
|
||||
return recipe_json
|
||||
|
||||
|
||||
def get_recipe_properties(space, property_data):
|
||||
# {'servingSize': '1', 'calories': '302 kcal', 'proteinContent': '7,66g', 'fatContent': '11,56g', 'carbohydrateContent': '41,33g'}
|
||||
properties = {
|
||||
"property-calories": "calories",
|
||||
"property-carbohydrates": "carbohydrateContent",
|
||||
"property-proteins": "proteinContent",
|
||||
"property-fats": "fatContent",
|
||||
}
|
||||
recipe_properties = []
|
||||
for pt in PropertyType.objects.filter(space=space, open_data_slug__in=list(properties.keys())).all():
|
||||
for p in list(properties.keys()):
|
||||
if pt.open_data_slug == p:
|
||||
if properties[p] in property_data:
|
||||
recipe_properties.append({
|
||||
'property_type': {
|
||||
'id': pt.id,
|
||||
'name': pt.name,
|
||||
},
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / float(property_data['servingSize']),
|
||||
})
|
||||
|
||||
return recipe_properties
|
||||
|
||||
|
||||
def get_from_youtube_scraper(url, request):
|
||||
"""A YouTube Information Scraper."""
|
||||
kw, created = Keyword.objects.get_or_create(name='YouTube', space=request.space)
|
||||
default_recipe_json = {
|
||||
'name': '',
|
||||
'internal': True,
|
||||
'description': '',
|
||||
'servings': 1,
|
||||
'working_time': 0,
|
||||
'waiting_time': 0,
|
||||
'image': "",
|
||||
'keywords': [{'name': kw.name, 'label': kw.name, 'id': kw.pk}],
|
||||
'source_url': url,
|
||||
'steps': [
|
||||
{
|
||||
'ingredients': [],
|
||||
'instruction': ''
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# TODO add automation here
|
||||
try:
|
||||
automation_engine = AutomationEngine(request, source=url)
|
||||
video = YouTube(url=url)
|
||||
default_recipe_json['name'] = automation_engine.apply_regex_replace_automation(video.title, Automation.NAME_REPLACE)
|
||||
default_recipe_json['image'] = video.thumbnail_url
|
||||
default_recipe_json['steps'][0]['instruction'] = automation_engine.apply_regex_replace_automation(video.description, Automation.INSTRUCTION_REPLACE)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return default_recipe_json
|
||||
|
||||
|
||||
def parse_name(name):
|
||||
if type(name) == list:
|
||||
if isinstance(name, list):
|
||||
try:
|
||||
name = name[0]
|
||||
except Exception:
|
||||
@@ -180,113 +282,74 @@ def parse_name(name):
|
||||
return normalize_string(name)
|
||||
|
||||
|
||||
def parse_ingredients(ingredients):
|
||||
# some pages have comma separated ingredients in a single array entry
|
||||
try:
|
||||
if type(ingredients[0]) == dict:
|
||||
return ingredients
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
|
||||
if (len(ingredients) == 1 and type(ingredients) == list):
|
||||
ingredients = ingredients[0].split(',')
|
||||
elif type(ingredients) == str:
|
||||
ingredients = ingredients.split(',')
|
||||
|
||||
for x in ingredients:
|
||||
if '\n' in x:
|
||||
ingredients.remove(x)
|
||||
for i in x.split('\n'):
|
||||
ingredients.insert(0, i)
|
||||
|
||||
ingredient_list = []
|
||||
|
||||
for x in ingredients:
|
||||
if x.replace(' ', '') != '':
|
||||
x = x.replace('½', "0.5").replace('¼', "0.25").replace('¾', "0.75")
|
||||
try:
|
||||
amount, unit, ingredient, note = parse_single_ingredient(x)
|
||||
if ingredient:
|
||||
ingredient_list.append(
|
||||
{
|
||||
'amount': amount,
|
||||
'unit': {
|
||||
'text': unit,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': ingredient,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': note,
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
ingredient_list.append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': {
|
||||
'text': '',
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': x,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': '',
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
|
||||
ingredients = ingredient_list
|
||||
else:
|
||||
ingredients = []
|
||||
return ingredients
|
||||
|
||||
|
||||
def parse_description(description):
|
||||
return normalize_string(description)
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
instruction_text = ''
|
||||
|
||||
# flatten instructions if they are in a list
|
||||
if type(instructions) == list:
|
||||
for i in instructions:
|
||||
if type(i) == str:
|
||||
instruction_text += i
|
||||
else:
|
||||
if 'text' in i:
|
||||
instruction_text += i['text'] + '\n\n'
|
||||
elif 'itemListElement' in i:
|
||||
for ile in i['itemListElement']:
|
||||
if type(ile) == str:
|
||||
instruction_text += ile + '\n\n'
|
||||
elif 'text' in ile:
|
||||
instruction_text += ile['text'] + '\n\n'
|
||||
else:
|
||||
instruction_text += str(i)
|
||||
instructions = instruction_text
|
||||
|
||||
normalized_string = normalize_string(instructions)
|
||||
def clean_instruction_string(instruction):
|
||||
# handle HTML tags that can be converted to markup
|
||||
normalized_string = instruction \
|
||||
.replace("<nobr>", "**") \
|
||||
.replace("</nobr>", "**") \
|
||||
.replace("<strong>", "**") \
|
||||
.replace("</strong>", "**")
|
||||
normalized_string = normalize_string(normalized_string)
|
||||
normalized_string = normalized_string.replace('\n', ' \n')
|
||||
normalized_string = normalized_string.replace(' \n \n', '\n\n')
|
||||
return normalized_string
|
||||
|
||||
# handle unsupported, special UTF8 character in Thermomix-specific instructions,
|
||||
# that happen in nearly every recipe on Cookidoo, Zaubertopf Club, Rezeptwelt
|
||||
# and in Thermomix-specific recipes on many other sites
|
||||
return normalized_string \
|
||||
.replace("", _('reverse rotation')) \
|
||||
.replace("", _('careful rotation')) \
|
||||
.replace("", _('knead')) \
|
||||
.replace("Andicken ", _('thicken')) \
|
||||
.replace("Erwärmen ", _('warm up')) \
|
||||
.replace("Fermentieren ", _('ferment')) \
|
||||
.replace("Sous-vide ", _("sous-vide"))
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
"""
|
||||
Convert arbitrary instructions object from website import and turn it into a flat list of strings
|
||||
:param instructions: any instructions object from import
|
||||
:return: list of strings (from one to many elements depending on website)
|
||||
"""
|
||||
instruction_list = []
|
||||
|
||||
if isinstance(instructions, list):
|
||||
for i in instructions:
|
||||
if isinstance(i, str):
|
||||
instruction_list.append(clean_instruction_string(i))
|
||||
else:
|
||||
if 'text' in i:
|
||||
instruction_list.append(clean_instruction_string(i['text']))
|
||||
elif 'itemListElement' in i:
|
||||
for ile in i['itemListElement']:
|
||||
if isinstance(ile, str):
|
||||
instruction_list.append(clean_instruction_string(ile))
|
||||
elif 'text' in ile:
|
||||
instruction_list.append(clean_instruction_string(ile['text']))
|
||||
else:
|
||||
instruction_list.append(clean_instruction_string(str(i)))
|
||||
else:
|
||||
instruction_list.append(clean_instruction_string(instructions))
|
||||
|
||||
return instruction_list
|
||||
|
||||
|
||||
def parse_image(image):
|
||||
# check if list of images is returned, take first if so
|
||||
if not image:
|
||||
return None
|
||||
if type(image) == list:
|
||||
if isinstance(image, list):
|
||||
for pic in image:
|
||||
if (type(pic) == str) and (pic[:4] == 'http'):
|
||||
if (isinstance(pic, str)) and (pic[:4] == 'http'):
|
||||
image = pic
|
||||
elif 'url' in pic:
|
||||
image = pic['url']
|
||||
elif type(image) == dict:
|
||||
elif isinstance(image, dict):
|
||||
if 'url' in image:
|
||||
image = image['url']
|
||||
|
||||
@@ -297,12 +360,12 @@ def parse_image(image):
|
||||
|
||||
|
||||
def parse_servings(servings):
|
||||
if type(servings) == str:
|
||||
if isinstance(servings, str):
|
||||
try:
|
||||
servings = int(re.search(r'\d+', servings).group())
|
||||
except AttributeError:
|
||||
servings = 1
|
||||
elif type(servings) == list:
|
||||
elif isinstance(servings, list):
|
||||
try:
|
||||
servings = int(re.findall(r'\b\d+\b', servings[0])[0])
|
||||
except KeyError:
|
||||
@@ -310,52 +373,53 @@ def parse_servings(servings):
|
||||
return servings
|
||||
|
||||
|
||||
def parse_cooktime(cooktime):
|
||||
if type(cooktime) not in [int, float]:
|
||||
def parse_servings_text(servings):
|
||||
if isinstance(servings, str):
|
||||
try:
|
||||
cooktime = float(re.search(r'\d+', cooktime).group())
|
||||
servings = re.sub("\\d+", '', servings).strip()
|
||||
except Exception:
|
||||
servings = ''
|
||||
if isinstance(servings, list):
|
||||
try:
|
||||
servings = parse_servings_text(servings[1])
|
||||
except Exception:
|
||||
pass
|
||||
return str(servings)[:32]
|
||||
|
||||
|
||||
def parse_time(recipe_time):
|
||||
if type(recipe_time) not in [int, float]:
|
||||
try:
|
||||
recipe_time = float(re.search(r'\d+', recipe_time).group())
|
||||
except (ValueError, AttributeError):
|
||||
try:
|
||||
cooktime = round(iso_parse_duration(cooktime).seconds / 60)
|
||||
recipe_time = round(iso_parse_duration(recipe_time).seconds / 60)
|
||||
except ISO8601Error:
|
||||
try:
|
||||
if (type(cooktime) == list and len(cooktime) > 0):
|
||||
cooktime = cooktime[0]
|
||||
cooktime = round(parse_duration(cooktime).seconds / 60)
|
||||
if (isinstance(recipe_time, list) and len(recipe_time) > 0):
|
||||
recipe_time = recipe_time[0]
|
||||
recipe_time = round(parse_duration(recipe_time).seconds / 60)
|
||||
except AttributeError:
|
||||
cooktime = 0
|
||||
recipe_time = 0
|
||||
|
||||
return cooktime
|
||||
return recipe_time
|
||||
|
||||
|
||||
def parse_preptime(preptime):
|
||||
if type(preptime) not in [int, float]:
|
||||
try:
|
||||
preptime = float(re.search(r'\d+', preptime).group())
|
||||
except ValueError:
|
||||
try:
|
||||
preptime = round(iso_parse_duration(preptime).seconds / 60)
|
||||
except ISO8601Error:
|
||||
try:
|
||||
if (type(preptime) == list and len(preptime) > 0):
|
||||
preptime = preptime[0]
|
||||
preptime = round(parse_duration(preptime).seconds / 60)
|
||||
except AttributeError:
|
||||
preptime = 0
|
||||
|
||||
return preptime
|
||||
|
||||
|
||||
def parse_keywords(keyword_json, space):
|
||||
def parse_keywords(keyword_json, request):
|
||||
keywords = []
|
||||
automation_engine = AutomationEngine(request)
|
||||
|
||||
# keywords as list
|
||||
for kw in keyword_json:
|
||||
kw = normalize_string(kw)
|
||||
# if alias exists use that instead
|
||||
|
||||
if len(kw) != 0:
|
||||
if k := Keyword.objects.filter(name=kw, space=space).first():
|
||||
keywords.append({'id': str(k.id), 'text': str(k.name)})
|
||||
automation_engine.apply_keyword_automation(kw)
|
||||
if k := Keyword.objects.filter(name=kw, space=request.space).first():
|
||||
keywords.append({'label': str(k), 'name': k.name, 'id': k.id})
|
||||
else:
|
||||
keywords.append({'id': random.randrange(1111111, 9999999, 1), 'text': kw})
|
||||
keywords.append({'label': kw, 'name': kw})
|
||||
|
||||
return keywords
|
||||
|
||||
@@ -363,15 +427,15 @@ def parse_keywords(keyword_json, space):
|
||||
def listify_keywords(keyword_list):
|
||||
# keywords as string
|
||||
try:
|
||||
if type(keyword_list[0]) == dict:
|
||||
if isinstance(keyword_list[0], dict):
|
||||
return keyword_list
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
if type(keyword_list) == str:
|
||||
if isinstance(keyword_list, str):
|
||||
keyword_list = keyword_list.split(',')
|
||||
|
||||
# keywords as string in list
|
||||
if (type(keyword_list) == list and len(keyword_list) == 1 and ',' in keyword_list[0]):
|
||||
if (isinstance(keyword_list, list) and len(keyword_list) == 1 and ',' in keyword_list[0]):
|
||||
keyword_list = keyword_list[0].split(',')
|
||||
return [x.strip() for x in keyword_list]
|
||||
|
||||
@@ -393,3 +457,47 @@ def iso_duration_to_minutes(string):
|
||||
string
|
||||
).groupdict()
|
||||
return int(match['days'] or 0) * 24 * 60 + int(match['hours'] or 0) * 60 + int(match['minutes'] or 0)
|
||||
|
||||
|
||||
def get_images_from_soup(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
|
||||
|
||||
def clean_dict(input_dict, key):
|
||||
if isinstance(input_dict, dict):
|
||||
for x in list(input_dict):
|
||||
if x == key:
|
||||
del input_dict[x]
|
||||
elif isinstance(input_dict[x], dict):
|
||||
input_dict[x] = clean_dict(input_dict[x], key)
|
||||
elif isinstance(input_dict[x], list):
|
||||
temp_list = []
|
||||
for e in input_dict[x]:
|
||||
temp_list.append(clean_dict(e, key))
|
||||
|
||||
return input_dict
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from cookbook.views import views
|
||||
@@ -14,6 +13,12 @@ class ScopeMiddleware:
|
||||
|
||||
def __call__(self, request):
|
||||
prefix = settings.JS_REVERSE_SCRIPT_PREFIX or ''
|
||||
|
||||
# need to disable scopes for writing requests into userpref and enable for loading ?
|
||||
if request.path.startswith(prefix + '/api/user-preference/'):
|
||||
with scopes_disabled():
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.is_authenticated:
|
||||
|
||||
if request.path.startswith(prefix + '/admin/'):
|
||||
@@ -26,24 +31,34 @@ class ScopeMiddleware:
|
||||
if request.path.startswith(prefix + '/accounts/'):
|
||||
return self.get_response(request)
|
||||
|
||||
with scopes_disabled():
|
||||
if request.user.userpreference.space is None and not reverse('account_logout') in request.path:
|
||||
return views.no_space(request)
|
||||
if request.path.startswith(prefix + '/switch-space/'):
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
with scopes_disabled():
|
||||
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.space_overview(request)
|
||||
|
||||
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
|
||||
user_space = request.user.userspace_set.filter(active=True).first()
|
||||
|
||||
if not user_space:
|
||||
return views.space_overview(request)
|
||||
|
||||
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.no_groups(request)
|
||||
|
||||
request.space = request.user.userpreference.space
|
||||
# with scopes_disabled():
|
||||
request.space = user_space.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
else:
|
||||
if request.path.startswith(prefix + '/api/'):
|
||||
try:
|
||||
if auth := TokenAuthentication().authenticate(request):
|
||||
request.space = auth[0].userpreference.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
if auth := OAuth2Authentication().authenticate(request):
|
||||
user_space = auth[0].userspace_set.filter(active=True).first()
|
||||
if user_space:
|
||||
request.space = user_space.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
except AuthenticationFailed:
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from json import JSONDecodeError
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from recipe_scrapers import SCRAPERS, get_host_name
|
||||
from recipe_scrapers._factory import SchemaScraperFactory
|
||||
from recipe_scrapers._schemaorg import SchemaOrg
|
||||
@@ -26,17 +27,17 @@ def text_scraper(text, url=None):
|
||||
class TextScraper(scraper_class):
|
||||
def __init__(
|
||||
self,
|
||||
page_data,
|
||||
url=None
|
||||
html=None,
|
||||
url=None,
|
||||
):
|
||||
self.wild_mode = False
|
||||
self.meta_http_equiv = False
|
||||
self.soup = BeautifulSoup(page_data, "html.parser")
|
||||
self.soup = BeautifulSoup(html, "html.parser")
|
||||
self.url = url
|
||||
self.recipe = None
|
||||
try:
|
||||
self.schema = SchemaOrg(page_data)
|
||||
self.schema = SchemaOrg(html)
|
||||
except (JSONDecodeError, AttributeError):
|
||||
pass
|
||||
|
||||
return TextScraper(text, url)
|
||||
return TextScraper(url=url, html=text)
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.db.models import F, OuterRef, Q, Subquery, Value
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.HelperFunctions import Round, str2bool
|
||||
from cookbook.models import (Ingredient, MealPlan, Recipe, ShoppingListEntry, ShoppingListRecipe,
|
||||
SupermarketCategoryRelation)
|
||||
from recipes import settings
|
||||
|
||||
|
||||
def shopping_helper(qs, request):
|
||||
@@ -47,6 +44,8 @@ class RecipeShoppingEditor():
|
||||
self.mealplan = self._kwargs.get('mealplan', None)
|
||||
if type(self.mealplan) in [int, float]:
|
||||
self.mealplan = MealPlan.objects.filter(id=self.mealplan, space=self.space)
|
||||
if isinstance(self.mealplan, dict):
|
||||
self.mealplan = MealPlan.objects.filter(id=self.mealplan['id'], space=self.space).first()
|
||||
self.id = self._kwargs.get('id', None)
|
||||
|
||||
self._shopping_list_recipe = self.get_shopping_list_recipe(self.id, self.created_by, self.space)
|
||||
@@ -67,11 +66,12 @@ class RecipeShoppingEditor():
|
||||
|
||||
@property
|
||||
def _recipe_servings(self):
|
||||
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings', None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
|
||||
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings',
|
||||
None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
|
||||
|
||||
@property
|
||||
def _servings_factor(self):
|
||||
return Decimal(self.servings)/Decimal(self._recipe_servings)
|
||||
return Decimal(self.servings) / Decimal(self._recipe_servings)
|
||||
|
||||
@property
|
||||
def _shared_users(self):
|
||||
@@ -88,9 +88,10 @@ class RecipeShoppingEditor():
|
||||
|
||||
def get_recipe_ingredients(self, id, exclude_onhand=False):
|
||||
if exclude_onhand:
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(food__onhand_users__id__in=[x.id for x in self._shared_users])
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(
|
||||
food__onhand_users__id__in=[x.id for x in self._shared_users])
|
||||
else:
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
|
||||
|
||||
@property
|
||||
def _include_related(self):
|
||||
@@ -107,7 +108,10 @@ class RecipeShoppingEditor():
|
||||
self.servings = float(servings)
|
||||
|
||||
if mealplan := kwargs.get('mealplan', None):
|
||||
self.mealplan = mealplan
|
||||
if isinstance(mealplan, dict):
|
||||
self.mealplan = MealPlan.objects.filter(id=mealplan['id'], space=self.space).first()
|
||||
else:
|
||||
self.mealplan = mealplan
|
||||
self.recipe = mealplan.recipe
|
||||
elif recipe := kwargs.get('recipe', None):
|
||||
self.recipe = recipe
|
||||
@@ -165,14 +169,14 @@ class RecipeShoppingEditor():
|
||||
try:
|
||||
self._shopping_list_recipe.delete()
|
||||
return True
|
||||
except:
|
||||
except BaseException:
|
||||
return False
|
||||
|
||||
def _add_ingredients(self, ingredients=None):
|
||||
if not ingredients:
|
||||
return
|
||||
elif type(ingredients) == list:
|
||||
ingredients = Ingredient.objects.filter(id__in=ingredients)
|
||||
elif isinstance(ingredients, list):
|
||||
ingredients = Ingredient.objects.filter(id__in=ingredients, food__ignore_shopping=False)
|
||||
existing = self._shopping_list_recipe.entries.filter(ingredient__in=ingredients).values_list('ingredient__pk', flat=True)
|
||||
add_ingredients = ingredients.exclude(id__in=existing)
|
||||
|
||||
@@ -194,120 +198,3 @@ class RecipeShoppingEditor():
|
||||
to_delete = self._shopping_list_recipe.entries.exclude(ingredient__in=ingredients)
|
||||
ShoppingListEntry.objects.filter(id__in=to_delete).delete()
|
||||
self._shopping_list_recipe = self.get_shopping_list_recipe(self.id, self.created_by, self.space)
|
||||
|
||||
|
||||
# # TODO refactor as class
|
||||
# def list_from_recipe(list_recipe=None, recipe=None, mealplan=None, servings=None, ingredients=None, created_by=None, space=None, append=False):
|
||||
# """
|
||||
# Creates ShoppingListRecipe and associated ShoppingListEntrys from a recipe or a meal plan with a recipe
|
||||
# :param list_recipe: Modify an existing ShoppingListRecipe
|
||||
# :param recipe: Recipe to use as list of ingredients. One of [recipe, mealplan] are required
|
||||
# :param mealplan: alternatively use a mealplan recipe as source of ingredients
|
||||
# :param servings: Optional: Number of servings to use to scale shoppinglist. If servings = 0 an existing recipe list will be deleted
|
||||
# :param ingredients: Ingredients, list of ingredient IDs to include on the shopping list. When not provided all ingredients will be used
|
||||
# :param append: If False will remove any entries not included with ingredients, when True will append ingredients to the shopping list
|
||||
# """
|
||||
# r = recipe or getattr(mealplan, 'recipe', None) or getattr(list_recipe, 'recipe', None)
|
||||
# if not r:
|
||||
# raise ValueError(_("You must supply a recipe or mealplan"))
|
||||
|
||||
# created_by = created_by or getattr(ShoppingListEntry.objects.filter(list_recipe=list_recipe).first(), 'created_by', None)
|
||||
# if not created_by:
|
||||
# raise ValueError(_("You must supply a created_by"))
|
||||
|
||||
# try:
|
||||
# servings = float(servings)
|
||||
# except (ValueError, TypeError):
|
||||
# servings = getattr(mealplan, 'servings', 1.0)
|
||||
|
||||
# servings_factor = servings / r.servings
|
||||
|
||||
# shared_users = list(created_by.get_shopping_share())
|
||||
# shared_users.append(created_by)
|
||||
# if list_recipe:
|
||||
# created = False
|
||||
# else:
|
||||
# list_recipe = ShoppingListRecipe.objects.create(recipe=r, mealplan=mealplan, servings=servings)
|
||||
# created = True
|
||||
|
||||
# related_step_ing = []
|
||||
# if servings == 0 and not created:
|
||||
# list_recipe.delete()
|
||||
# return []
|
||||
# elif ingredients:
|
||||
# ingredients = Ingredient.objects.filter(pk__in=ingredients, space=space)
|
||||
# else:
|
||||
# ingredients = Ingredient.objects.filter(step__recipe=r, food__ignore_shopping=False, space=space)
|
||||
|
||||
# if exclude_onhand := created_by.userpreference.mealplan_autoexclude_onhand:
|
||||
# ingredients = ingredients.exclude(food__onhand_users__id__in=[x.id for x in shared_users])
|
||||
|
||||
# if related := created_by.userpreference.mealplan_autoinclude_related:
|
||||
# # TODO: add levels of related recipes (related recipes of related recipes) to use when auto-adding mealplans
|
||||
# related_recipes = r.get_related_recipes()
|
||||
|
||||
# for x in related_recipes:
|
||||
# # related recipe is a Step serving size is driven by recipe serving size
|
||||
# # TODO once/if Steps can have a serving size this needs to be refactored
|
||||
# if exclude_onhand:
|
||||
# # if steps are used more than once in a recipe or subrecipe - I don' think this results in the desired behavior
|
||||
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users]).values_list('id', flat=True)
|
||||
# else:
|
||||
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).values_list('id', flat=True)
|
||||
|
||||
# x_ing = []
|
||||
# if ingredients.filter(food__recipe=x).exists():
|
||||
# for ing in ingredients.filter(food__recipe=x):
|
||||
# if exclude_onhand:
|
||||
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users])
|
||||
# else:
|
||||
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__ignore_shopping=True)
|
||||
# for i in [x for x in x_ing]:
|
||||
# ShoppingListEntry.objects.create(
|
||||
# list_recipe=list_recipe,
|
||||
# food=i.food,
|
||||
# unit=i.unit,
|
||||
# ingredient=i,
|
||||
# amount=i.amount * Decimal(servings_factor),
|
||||
# created_by=created_by,
|
||||
# space=space,
|
||||
# )
|
||||
# # dont' add food to the shopping list that are actually recipes that will be added as ingredients
|
||||
# ingredients = ingredients.exclude(food__recipe=x)
|
||||
|
||||
# add_ingredients = list(ingredients.values_list('id', flat=True)) + related_step_ing
|
||||
# if not append:
|
||||
# existing_list = ShoppingListEntry.objects.filter(list_recipe=list_recipe)
|
||||
# # delete shopping list entries not included in ingredients
|
||||
# existing_list.exclude(ingredient__in=ingredients).delete()
|
||||
# # add shopping list entries that did not previously exist
|
||||
# add_ingredients = set(add_ingredients) - set(existing_list.values_list('ingredient__id', flat=True))
|
||||
# add_ingredients = Ingredient.objects.filter(id__in=add_ingredients, space=space)
|
||||
|
||||
# # if servings have changed, update the ShoppingListRecipe and existing Entries
|
||||
# if servings <= 0:
|
||||
# servings = 1
|
||||
|
||||
# if not created and list_recipe.servings != servings:
|
||||
# update_ingredients = set(ingredients.values_list('id', flat=True)) - set(add_ingredients.values_list('id', flat=True))
|
||||
# list_recipe.servings = servings
|
||||
# list_recipe.save()
|
||||
# for sle in ShoppingListEntry.objects.filter(list_recipe=list_recipe, ingredient__id__in=update_ingredients):
|
||||
# sle.amount = sle.ingredient.amount * Decimal(servings_factor)
|
||||
# sle.save()
|
||||
|
||||
# # add any missing Entries
|
||||
# for i in [x for x in add_ingredients if x.food]:
|
||||
|
||||
# ShoppingListEntry.objects.create(
|
||||
# list_recipe=list_recipe,
|
||||
# food=i.food,
|
||||
# unit=i.unit,
|
||||
# ingredient=i,
|
||||
# amount=i.amount * Decimal(servings_factor),
|
||||
# created_by=created_by,
|
||||
# space=space,
|
||||
# )
|
||||
|
||||
# # return all shopping list items
|
||||
# return list_recipe
|
||||
|
||||
@@ -2,7 +2,6 @@ from gettext import gettext as _
|
||||
|
||||
import bleach
|
||||
import markdown as md
|
||||
from bleach_allowlist import markdown_attrs, markdown_tags
|
||||
from jinja2 import Template, TemplateSyntaxError, UndefinedError
|
||||
from markdown.extensions.tables import TableExtension
|
||||
|
||||
@@ -22,10 +21,25 @@ class IngredientObject(object):
|
||||
else:
|
||||
self.amount = f"<scalable-number v-bind:number='{bleach.clean(str(ingredient.amount))}' v-bind:factor='ingredient_factor'></scalable-number>"
|
||||
if ingredient.unit:
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
if ingredient.unit.plural_name in (None, ""):
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
else:
|
||||
if ingredient.always_use_plural_unit or ingredient.amount > 1 and not ingredient.no_amount:
|
||||
self.unit = bleach.clean(ingredient.unit.plural_name)
|
||||
else:
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
else:
|
||||
self.unit = ""
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
if ingredient.food:
|
||||
if ingredient.food.plural_name in (None, ""):
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
else:
|
||||
if ingredient.always_use_plural_food or ingredient.amount > 1 and not ingredient.no_amount:
|
||||
self.food = bleach.clean(str(ingredient.food.plural_name))
|
||||
else:
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
else:
|
||||
self.food = ""
|
||||
self.note = bleach.clean(str(ingredient.note))
|
||||
|
||||
def __str__(self):
|
||||
@@ -38,9 +52,17 @@ class IngredientObject(object):
|
||||
def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
instructions = step.instruction
|
||||
|
||||
tags = markdown_tags + [
|
||||
tags = {
|
||||
"h1", "h2", "h3", "h4", "h5", "h6",
|
||||
"b", "i", "strong", "em", "tt",
|
||||
"p", "br",
|
||||
"span", "div", "blockquote", "code", "pre", "hr",
|
||||
"ul", "ol", "li", "dd", "dt",
|
||||
"img",
|
||||
"a",
|
||||
"sub", "sup",
|
||||
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead'
|
||||
]
|
||||
}
|
||||
parsed_md = md.markdown(
|
||||
instructions,
|
||||
extensions=[
|
||||
@@ -48,7 +70,11 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
UrlizeExtension(), MarkdownFormatExtension()
|
||||
]
|
||||
)
|
||||
markdown_attrs['*'] = markdown_attrs['*'] + ['class']
|
||||
markdown_attrs = {
|
||||
"*": ["id", "class", 'width', 'height'],
|
||||
"img": ["src", "alt", "title"],
|
||||
"a": ["href", "alt", "title"],
|
||||
}
|
||||
|
||||
instructions = bleach.clean(parsed_md, tags, markdown_attrs)
|
||||
|
||||
|
||||
141
cookbook/helper/unit_conversion_helper.py
Normal file
141
cookbook/helper/unit_conversion_helper.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from django.core.cache import caches
|
||||
from decimal import Decimal
|
||||
|
||||
from cookbook.helper.cache_helper import CacheHelper
|
||||
from cookbook.models import Ingredient, Unit
|
||||
|
||||
CONVERSION_TABLE = {
|
||||
'weight': {
|
||||
'g': 1000,
|
||||
'kg': 1,
|
||||
'ounce': 35.274,
|
||||
'pound': 2.20462
|
||||
},
|
||||
'volume': {
|
||||
'ml': 1000,
|
||||
'l': 1,
|
||||
'fluid_ounce': 33.814,
|
||||
'pint': 2.11338,
|
||||
'quart': 1.05669,
|
||||
'gallon': 0.264172,
|
||||
'tbsp': 67.628,
|
||||
'tsp': 202.884,
|
||||
'imperial_fluid_ounce': 35.1951,
|
||||
'imperial_pint': 1.75975,
|
||||
'imperial_quart': 0.879877,
|
||||
'imperial_gallon': 0.219969,
|
||||
'imperial_tbsp': 56.3121,
|
||||
'imperial_tsp': 168.936,
|
||||
},
|
||||
}
|
||||
|
||||
BASE_UNITS_WEIGHT = list(CONVERSION_TABLE['weight'].keys())
|
||||
BASE_UNITS_VOLUME = list(CONVERSION_TABLE['volume'].keys())
|
||||
|
||||
|
||||
class ConversionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnitConversionHelper:
|
||||
space = None
|
||||
|
||||
def __init__(self, space):
|
||||
"""
|
||||
Initializes unit conversion helper
|
||||
:param space: space to perform conversions on
|
||||
"""
|
||||
self.space = space
|
||||
|
||||
@staticmethod
|
||||
def convert_from_to(from_unit, to_unit, amount):
|
||||
"""
|
||||
Convert from one base unit to another. Throws ConversionException if trying to convert between different systems (weight/volume) or if units are not supported.
|
||||
:param from_unit: str unit to convert from
|
||||
:param to_unit: str unit to convert to
|
||||
:param amount: amount to convert
|
||||
:return: Decimal converted amount
|
||||
"""
|
||||
system = None
|
||||
if from_unit in BASE_UNITS_WEIGHT and to_unit in BASE_UNITS_WEIGHT:
|
||||
system = 'weight'
|
||||
if from_unit in BASE_UNITS_VOLUME and to_unit in BASE_UNITS_VOLUME:
|
||||
system = 'volume'
|
||||
|
||||
if not system:
|
||||
raise ConversionException('Trying to convert units not existing or not in one unit system (weight/volume)')
|
||||
|
||||
return Decimal(amount / Decimal(CONVERSION_TABLE[system][from_unit] / CONVERSION_TABLE[system][to_unit]))
|
||||
|
||||
def base_conversions(self, ingredient_list):
|
||||
"""
|
||||
Calculates all possible base unit conversions for each ingredient give.
|
||||
Converts to all common base units IF they exist in the unit database of the space.
|
||||
For useful results all ingredients passed should be of the same food, otherwise filtering afterwards might be required.
|
||||
:param ingredient_list: list of ingredients to convert
|
||||
:return: ingredient list with appended conversions
|
||||
"""
|
||||
base_conversion_ingredient_list = ingredient_list.copy()
|
||||
for i in ingredient_list:
|
||||
try:
|
||||
conversion_unit = i.unit.name
|
||||
if i.unit.base_unit:
|
||||
conversion_unit = i.unit.base_unit
|
||||
|
||||
# TODO allow setting which units to convert to? possibly only once conversions become visible
|
||||
units = caches['default'].get(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, None)
|
||||
if not units:
|
||||
units = Unit.objects.filter(space=self.space, base_unit__in=(BASE_UNITS_VOLUME + BASE_UNITS_WEIGHT)).all()
|
||||
caches['default'].set(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, units, 60 * 60) # cache is cleared on unit save signal so long duration is fine
|
||||
|
||||
for u in units:
|
||||
try:
|
||||
ingredient = Ingredient(amount=self.convert_from_to(conversion_unit, u.base_unit, i.amount), unit=u, food=ingredient_list[0].food, )
|
||||
if not any((x.unit.name == ingredient.unit.name or x.unit.base_unit == ingredient.unit.name) for x in base_conversion_ingredient_list):
|
||||
base_conversion_ingredient_list.append(ingredient)
|
||||
except ConversionException:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return base_conversion_ingredient_list
|
||||
|
||||
def get_conversions(self, ingredient):
|
||||
"""
|
||||
Converts an ingredient to all possible conversions based on the custom unit conversion database.
|
||||
After that passes conversion to UnitConversionHelper.base_conversions() to get all base conversions possible.
|
||||
:param ingredient: Ingredient object
|
||||
:return: list of ingredients with all possible custom and base conversions
|
||||
"""
|
||||
conversions = [ingredient]
|
||||
if ingredient.unit:
|
||||
for c in ingredient.unit.unit_conversion_base_relation.all():
|
||||
if c.space == self.space:
|
||||
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
|
||||
if r and r not in conversions:
|
||||
conversions.append(r)
|
||||
for c in ingredient.unit.unit_conversion_converted_relation.all():
|
||||
if c.space == self.space:
|
||||
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
|
||||
if r and r not in conversions:
|
||||
conversions.append(r)
|
||||
|
||||
conversions = self.base_conversions(conversions)
|
||||
|
||||
return conversions
|
||||
|
||||
def _uc_convert(self, uc, amount, unit, food):
|
||||
"""
|
||||
Helper to calculate values for custom unit conversions.
|
||||
Converts given base values using the passed UnitConversion object into a converted Ingredient
|
||||
:param uc: UnitConversion object
|
||||
:param amount: base amount
|
||||
:param unit: base unit
|
||||
:param food: base food
|
||||
:return: converted ingredient object from base amount/unit/food
|
||||
"""
|
||||
if uc.food is None or uc.food == food:
|
||||
if unit == uc.base_unit:
|
||||
return Ingredient(amount=amount * (uc.converted_amount / uc.base_amount), unit=uc.converted_unit, food=food, space=self.space)
|
||||
else:
|
||||
return Ingredient(amount=amount * (uc.base_amount / uc.converted_amount), unit=uc.base_unit, food=food, space=self.space)
|
||||
@@ -36,7 +36,7 @@ class ChefTap(Integration):
|
||||
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space,)
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
|
||||
|
||||
if source_url != '':
|
||||
step.instruction += '\n' + source_url
|
||||
|
||||
@@ -4,6 +4,7 @@ from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
@@ -19,6 +20,10 @@ class Chowdown(Integration):
|
||||
direction_mode = False
|
||||
description_mode = False
|
||||
|
||||
description = None
|
||||
prep_time = None
|
||||
serving = None
|
||||
|
||||
ingredients = []
|
||||
directions = []
|
||||
descriptions = []
|
||||
@@ -26,6 +31,12 @@ class Chowdown(Integration):
|
||||
line = fl.decode("utf-8")
|
||||
if 'title:' in line:
|
||||
title = line.replace('title:', '').replace('"', '').strip()
|
||||
if 'description:' in line:
|
||||
description = line.replace('description:', '').replace('"', '').strip()
|
||||
if 'prep_time:' in line:
|
||||
prep_time = line.replace('prep_time:', '').replace('"', '').strip()
|
||||
if 'yield:' in line:
|
||||
serving = line.replace('yield:', '').replace('"', '').strip()
|
||||
if 'image:' in line:
|
||||
image = line.replace('image:', '').strip()
|
||||
if 'tags:' in line:
|
||||
@@ -48,15 +59,43 @@ class Chowdown(Integration):
|
||||
descriptions.append(line)
|
||||
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
if description:
|
||||
recipe.description = description
|
||||
|
||||
for k in tags.split(','):
|
||||
print(f'adding keyword {k.strip()}')
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n' + '\n'.join(descriptions), space=self.request.space,
|
||||
)
|
||||
ingredients_added = False
|
||||
for direction in directions:
|
||||
if len(direction.strip()) > 0:
|
||||
step = Step.objects.create(
|
||||
instruction=direction, name='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
else:
|
||||
step = Step.objects.create(
|
||||
instruction=direction, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
if not ingredients_added:
|
||||
ingredients_added = True
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
if serving:
|
||||
recipe.servings = parse_servings(serving)
|
||||
recipe.servings_text = 'servings'
|
||||
|
||||
if prep_time:
|
||||
recipe.working_time = parse_time(prep_time)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
@@ -76,6 +115,7 @@ class Chowdown(Integration):
|
||||
if re.match(f'^images/{image}$', z.filename):
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(z.filename)), filetype=get_filetype(z.filename))
|
||||
|
||||
recipe.save()
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
import base64
|
||||
import gzip
|
||||
import json
|
||||
import re
|
||||
from gettext import gettext as _
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.helper.recipe_url_import import (get_from_scraper, get_images_from_soup,
|
||||
iso_duration_to_minutes)
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class CookBookApp(Integration):
|
||||
@@ -23,7 +20,9 @@ class CookBookApp(Integration):
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_html = file.getvalue().decode("utf-8")
|
||||
|
||||
recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
|
||||
scrape = text_scraper(text=recipe_html)
|
||||
recipe_json = get_from_scraper(scrape, self.request)
|
||||
images = list(dict.fromkeys(get_images_from_soup(scrape.soup, None)))
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_json['name'].strip(),
|
||||
@@ -32,7 +31,7 @@ class CookBookApp(Integration):
|
||||
|
||||
try:
|
||||
recipe.servings = re.findall('([0-9])+', recipe_json['recipeYield'])[0]
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
@@ -41,7 +40,9 @@ class CookBookApp(Integration):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction=recipe_json['recipeInstructions'], space=self.request.space, )
|
||||
# assuming import files only contain single step
|
||||
step = Step.objects.create(instruction=recipe_json['steps'][0]['instruction'], space=self.request.space,
|
||||
show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
|
||||
|
||||
if 'nutrition' in recipe_json:
|
||||
step.instruction = step.instruction + '\n\n' + recipe_json['nutrition']
|
||||
@@ -50,17 +51,21 @@ class CookBookApp(Integration):
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipeIngredient']:
|
||||
f = ingredient_parser.get_food(ingredient['ingredient']['text'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit']['text'])
|
||||
for ingredient in recipe_json['steps'][0]['ingredients']:
|
||||
f = ingredient_parser.get_food(ingredient['food']['name'])
|
||||
u = None
|
||||
if unit := ingredient.get('unit', None):
|
||||
u = ingredient_parser.get_unit(unit.get('name', None))
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=ingredient['amount'], note=ingredient['note'], space=self.request.space,
|
||||
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
|
||||
))
|
||||
|
||||
if len(images) > 0:
|
||||
try:
|
||||
response = requests.get(images[0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = images[0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
|
||||
83
cookbook/integration/cookmate.py
Normal file
83
cookbook/integration/cookmate.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class Cookmate(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
return zip_info_object.filename.endswith('.xml')
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_xml = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_xml.find('title').text.strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if recipe_xml.find('preptime') is not None and recipe_xml.find('preptime').text is not None:
|
||||
recipe.working_time = parse_time(recipe_xml.find('preptime').text.strip())
|
||||
|
||||
if recipe_xml.find('cooktime') is not None and recipe_xml.find('cooktime').text is not None:
|
||||
recipe.waiting_time = parse_time(recipe_xml.find('cooktime').text.strip())
|
||||
|
||||
if recipe_xml.find('quantity') is not None and recipe_xml.find('quantity').text is not None:
|
||||
recipe.servings = parse_servings(recipe_xml.find('quantity').text.strip())
|
||||
recipe.servings_text = parse_servings_text(recipe_xml.find('quantity').text.strip())
|
||||
|
||||
if recipe_xml.find('url') is not None and recipe_xml.find('url').text is not None:
|
||||
recipe.source_url = recipe_xml.find('url').text.strip()
|
||||
|
||||
if recipe_xml.find('description') is not None: # description is a list of <li>'s with text
|
||||
if len(recipe_xml.find('description')) > 0:
|
||||
recipe.description = recipe_xml.find('description')[0].text[:512]
|
||||
|
||||
if recipe_text := recipe_xml.find('recipetext'):
|
||||
for step in recipe_text.getchildren():
|
||||
if step.text:
|
||||
step = Step.objects.create(
|
||||
instruction=step.text.strip(), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
if recipe_ingredients := recipe_xml.find('ingredient'):
|
||||
ingredient_step = recipe.steps.first()
|
||||
if ingredient_step is None:
|
||||
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
|
||||
|
||||
for ingredient in recipe_ingredients.getchildren():
|
||||
if ingredient.text:
|
||||
if ingredient.text.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
ingredient_step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
|
||||
if recipe_xml.find('imageurl') is not None:
|
||||
try:
|
||||
url = recipe_xml.find('imageurl').text.strip()
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
recipe.save()
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -1,12 +1,10 @@
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes, parse_servings
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
@@ -22,7 +20,13 @@ class CopyMeThat(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
# 'file' comes is as a beautifulsoup object
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip(), created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
try:
|
||||
source = file.find("a", {"id": "original_link"}).text
|
||||
except AttributeError:
|
||||
source = None
|
||||
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip(
|
||||
)[:128], source_url=source, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
for category in file.find_all("span", {"class": "recipeCategory"}):
|
||||
keyword, created = Keyword.objects.get_or_create(name=category.text, space=self.request.space)
|
||||
@@ -32,40 +36,81 @@ class CopyMeThat(Integration):
|
||||
recipe.servings = parse_servings(file.find("a", {"id": "recipeYield"}).text.strip())
|
||||
recipe.working_time = iso_duration_to_minutes(file.find("span", {"meta": "prepTime"}).text.strip())
|
||||
recipe.waiting_time = iso_duration_to_minutes(file.find("span", {"meta": "cookTime"}).text.strip())
|
||||
recipe.save()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find_all("li", {"class": "recipeIngredient"}):
|
||||
if ingredient.text == "":
|
||||
continue
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find_all("li", {"class": "instruction"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
|
||||
for s in file.find_all("li", {"class": "recipeNote"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
|
||||
try:
|
||||
if file.find("a", {"id": "original_link"}).text != '':
|
||||
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("a", {"id": "original_link"}).text
|
||||
step.save()
|
||||
if len(file.find("span", {"id": "starred"}).text.strip()) > 0:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('Favorite'))[0])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
if len(file.find("span", {"id": "made_this"}).text.strip()) > 0:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('I made this'))[0])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
ingredients = file.find("ul", {"id": "recipeIngredients"})
|
||||
if isinstance(ingredients, Tag):
|
||||
for ingredient in ingredients.children:
|
||||
if not isinstance(ingredient, Tag) or not ingredient.text.strip() or "recipeIngredient_spacer" in ingredient['class']:
|
||||
continue
|
||||
if any(x in ingredient['class'] for x in ["recipeIngredient_subheader", "recipeIngredient_note"]):
|
||||
step.ingredients.add(
|
||||
Ingredient.objects.create(
|
||||
is_header=True,
|
||||
note=ingredient.text.strip()[
|
||||
:256],
|
||||
original_text=ingredient.text.strip(),
|
||||
space=self.request.space,
|
||||
))
|
||||
else:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space, ))
|
||||
|
||||
instructions = file.find("ol", {"id": "recipeInstructions"})
|
||||
if isinstance(instructions, Tag):
|
||||
for instruction in instructions.children:
|
||||
if not isinstance(instruction, Tag) or instruction.text == "":
|
||||
continue
|
||||
if "instruction_subheader" in instruction['class']:
|
||||
if step.instruction:
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
step.name = instruction.text.strip()[:128]
|
||||
else:
|
||||
step.instruction += instruction.text.strip() + ' \n\n'
|
||||
|
||||
notes = file.find_all("li", {"class": "recipeNote"})
|
||||
if notes:
|
||||
step.instruction += '*Notes:* \n\n'
|
||||
|
||||
for n in notes:
|
||||
if n.text == "":
|
||||
continue
|
||||
step.instruction += '*' + n.text.strip() + '* \n\n'
|
||||
|
||||
description = ''
|
||||
try:
|
||||
description = file.find("div", {"id": "description"}).text.strip()
|
||||
except AttributeError:
|
||||
pass
|
||||
if len(description) <= 512:
|
||||
recipe.description = description
|
||||
else:
|
||||
recipe.description = description[:480] + ' ... (full description below)'
|
||||
step.instruction += '*Description:* \n\n*' + description + '* \n\n'
|
||||
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
|
||||
# import the Primary recipe image that is stored in the Zip
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import traceback
|
||||
from io import BytesIO, StringIO
|
||||
from re import match
|
||||
from zipfile import ZipFile
|
||||
@@ -19,7 +20,10 @@ class Default(Integration):
|
||||
recipe = self.decode_recipe(recipe_string)
|
||||
images = list(filter(lambda v: match('image.*', v), recipe_zip.namelist()))
|
||||
if images:
|
||||
self.import_recipe_image(recipe, BytesIO(recipe_zip.read(images[0])), filetype=get_filetype(images[0]))
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(recipe_zip.read(images[0])), filetype=get_filetype(images[0]))
|
||||
except AttributeError:
|
||||
traceback.print_exc()
|
||||
return recipe
|
||||
|
||||
def decode_recipe(self, string):
|
||||
@@ -54,7 +58,7 @@ class Default(Integration):
|
||||
|
||||
try:
|
||||
recipe_zip_obj.writestr(f'image{get_filetype(r.image.file.name)}', r.image.file.read())
|
||||
except ValueError:
|
||||
except (ValueError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
recipe_zip_obj.close()
|
||||
@@ -67,4 +71,4 @@ class Default(Integration):
|
||||
|
||||
export_zip_obj.close()
|
||||
|
||||
return [[ self.get_export_file_name(), export_zip_stream.getvalue() ]]
|
||||
return [[self.get_export_file_name(), export_zip_stream.getvalue()]]
|
||||
|
||||
@@ -28,7 +28,7 @@ class Domestica(Integration):
|
||||
recipe.save()
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction=file['directions'], space=self.request.space,
|
||||
instruction=file['directions'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
if file['source'] != '':
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import time
|
||||
import datetime
|
||||
import json
|
||||
import traceback
|
||||
import uuid
|
||||
from io import BytesIO, StringIO
|
||||
from io import BytesIO
|
||||
from zipfile import BadZipFile, ZipFile
|
||||
from django.core.cache import cache
|
||||
import datetime
|
||||
|
||||
from bs4 import Tag
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files import File
|
||||
from django.db import IntegrityError
|
||||
@@ -16,12 +13,11 @@ from django.http import HttpResponse
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scope
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.forms import ImportExportBase
|
||||
from cookbook.helper.image_processing import get_filetype, handle_image
|
||||
from cookbook.helper.image_processing import handle_image
|
||||
from cookbook.models import Keyword, Recipe
|
||||
from recipes.settings import DEBUG
|
||||
from recipes.settings import EXPORT_FILE_CACHE_DURATION
|
||||
from recipes.settings import DEBUG, EXPORT_FILE_CACHE_DURATION
|
||||
|
||||
|
||||
class Integration:
|
||||
@@ -40,8 +36,7 @@ class Integration:
|
||||
self.export_type = export_type
|
||||
self.ignored_recipes = []
|
||||
|
||||
description = f'Imported by {request.user.get_user_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
icon = '📥'
|
||||
description = f'Imported by {request.user.get_user_display_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
|
||||
try:
|
||||
last_kw = Keyword.objects.filter(name__regex=r'^(Import [0-9]+)', space=request.space).latest('created_at')
|
||||
@@ -54,23 +49,19 @@ class Integration:
|
||||
self.keyword = parent.add_child(
|
||||
name=name,
|
||||
description=description,
|
||||
icon=icon,
|
||||
space=request.space
|
||||
)
|
||||
except (IntegrityError, ValueError): # in case, for whatever reason, the name does exist append UUID to it. Not nice but works for now.
|
||||
self.keyword = parent.add_child(
|
||||
name=f'{name} {str(uuid.uuid4())[0:8]}',
|
||||
description=description,
|
||||
icon=icon,
|
||||
space=request.space
|
||||
)
|
||||
|
||||
|
||||
|
||||
def do_export(self, recipes, el):
|
||||
|
||||
with scope(space=self.request.space):
|
||||
el.total_recipes = len(recipes)
|
||||
el.total_recipes = len(recipes)
|
||||
el.cache_duration = EXPORT_FILE_CACHE_DURATION
|
||||
el.save()
|
||||
|
||||
@@ -82,7 +73,7 @@ class Integration:
|
||||
export_file = file
|
||||
|
||||
else:
|
||||
#zip the files if there is more then one file
|
||||
# zip the files if there is more then one file
|
||||
export_filename = self.get_export_file_name()
|
||||
export_stream = BytesIO()
|
||||
export_obj = ZipFile(export_stream, 'w')
|
||||
@@ -93,8 +84,7 @@ class Integration:
|
||||
export_obj.close()
|
||||
export_file = export_stream.getvalue()
|
||||
|
||||
|
||||
cache.set('export_file_'+str(el.pk), {'filename': export_filename, 'file': export_file}, EXPORT_FILE_CACHE_DURATION)
|
||||
cache.set('export_file_' + str(el.pk), {'filename': export_filename, 'file': export_file}, EXPORT_FILE_CACHE_DURATION)
|
||||
el.running = False
|
||||
el.save()
|
||||
|
||||
@@ -102,7 +92,6 @@ class Integration:
|
||||
response['Content-Disposition'] = 'attachment; filename="' + export_filename + '"'
|
||||
return response
|
||||
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
"""
|
||||
Since zipfile.namelist() returns all files in all subdirectories this function allows filtering of files
|
||||
@@ -144,7 +133,7 @@ class Integration:
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
import_zip.close()
|
||||
elif '.zip' in f['name'] or '.paprikarecipes' in f['name']:
|
||||
elif '.zip' in f['name'] or '.paprikarecipes' in f['name'] or '.mcb' in f['name']:
|
||||
import_zip = ZipFile(f['file'])
|
||||
file_list = []
|
||||
for z in import_zip.filelist:
|
||||
@@ -157,9 +146,16 @@ class Integration:
|
||||
file_list = self.split_recipe_file(BytesIO(import_zip.read('recipes.html')))
|
||||
il.total_recipes += len(file_list)
|
||||
|
||||
if isinstance(self, cookbook.integration.cookmate.Cookmate):
|
||||
new_file_list = []
|
||||
for file in file_list:
|
||||
new_file_list += etree.parse(BytesIO(import_zip.read(file.filename))).getroot().getchildren()
|
||||
il.total_recipes = len(new_file_list)
|
||||
file_list = new_file_list
|
||||
|
||||
for z in file_list:
|
||||
try:
|
||||
if isinstance(z, Tag):
|
||||
if not hasattr(z, 'filename') or isinstance(z, Tag):
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
@@ -172,7 +168,7 @@ class Integration:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name']:
|
||||
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
il.total_recipes += len(data_list)
|
||||
for d in data_list:
|
||||
@@ -243,7 +239,7 @@ class Integration:
|
||||
:param image_file: ByteIO stream containing the image
|
||||
:param filetype: type of file to write bytes to, default to .jpeg if unknown
|
||||
"""
|
||||
recipe.image = File(handle_image(self.request, File(image_file, name='image'), filetype=filetype)[0], name=f'{uuid.uuid4()}_{recipe.pk}{filetype}')
|
||||
recipe.image = File(handle_image(self.request, File(image_file, name='image'), filetype=filetype), name=f'{uuid.uuid4()}_{recipe.pk}{filetype}')
|
||||
recipe.save()
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
@@ -293,7 +289,6 @@ class Integration:
|
||||
if DEBUG:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def get_export_file_name(self, format='zip'):
|
||||
return "export_{}.{}".format(datetime.datetime.now().strftime("%Y-%m-%d"), format)
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ from zipfile import ZipFile
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Mealie(Integration):
|
||||
@@ -23,41 +24,66 @@ class Mealie(Integration):
|
||||
name=recipe_json['name'].strip(), description=description,
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
# TODO parse times (given in PT2H3M )
|
||||
# @vabene check recipe_url_import.iso_duration_to_minutes I think it does what you are looking for
|
||||
|
||||
ingredients_added = False
|
||||
for s in recipe_json['recipe_instructions']:
|
||||
step = Step.objects.create(
|
||||
instruction=s['text'], space=self.request.space,
|
||||
)
|
||||
if not ingredients_added:
|
||||
ingredients_added = True
|
||||
|
||||
if len(recipe_json['description'].strip()) > 500:
|
||||
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipe_ingredient']:
|
||||
try:
|
||||
if ingredient['food']:
|
||||
f = ingredient_parser.get_food(ingredient['food'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit'])
|
||||
amount = ingredient['quantity']
|
||||
note = ingredient['note']
|
||||
original_text = None
|
||||
else:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient['note'])
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
original_text = ingredient['note']
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=original_text, space=self.request.space,
|
||||
))
|
||||
except Exception:
|
||||
pass
|
||||
step = Step.objects.create(instruction=s['text'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
|
||||
recipe.steps.add(step)
|
||||
|
||||
step = recipe.steps.first()
|
||||
if not step: # if there is no step in the exported data
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
recipe.steps.add(step)
|
||||
|
||||
if len(recipe_json['description'].strip()) > 500:
|
||||
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipe_ingredient']:
|
||||
try:
|
||||
if ingredient['food']:
|
||||
f = ingredient_parser.get_food(ingredient['food'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit'])
|
||||
amount = ingredient['quantity']
|
||||
note = ingredient['note']
|
||||
original_text = None
|
||||
else:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient['note'])
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
original_text = ingredient['note']
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=original_text, space=self.request.space,
|
||||
))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if 'tags' in recipe_json and len(recipe_json['tags']) > 0:
|
||||
for k in recipe_json['tags']:
|
||||
if 'name' in k:
|
||||
keyword, created = Keyword.objects.get_or_create(name=k['name'].strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
if 'notes' in recipe_json and len(recipe_json['notes']) > 0:
|
||||
notes_text = "#### Notes \n\n"
|
||||
for n in recipe_json['notes']:
|
||||
notes_text += f'{n["text"]} \n'
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction=notes_text, space=self.request.space,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
|
||||
if 'recipe_yield' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['recipe_yield'])
|
||||
recipe.servings_text = parse_servings_text(recipe_json['recipe_yield'])
|
||||
|
||||
if 'total_time' in recipe_json and recipe_json['total_time'] is not None:
|
||||
recipe.working_time = parse_time(recipe_json['total_time'])
|
||||
|
||||
if 'org_url' in recipe_json:
|
||||
recipe.source_url = recipe_json['org_url']
|
||||
|
||||
recipe.save()
|
||||
|
||||
for f in self.files:
|
||||
if '.zip' in f['name']:
|
||||
import_zip = ZipFile(f['file'])
|
||||
|
||||
@@ -39,7 +39,7 @@ class MealMaster(Integration):
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
83
cookbook/integration/melarecipes.py
Normal file
83
cookbook/integration/melarecipes.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import base64
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
from gettext import gettext as _
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class MelaRecipes(Integration):
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
return [json.loads(file.getvalue().decode("utf-8"))]
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_json = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_json['title'].strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if 'yield' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['yield'])
|
||||
|
||||
if 'cookTime' in recipe_json:
|
||||
recipe.waiting_time = parse_time(recipe_json['cookTime'])
|
||||
|
||||
if 'prepTime' in recipe_json:
|
||||
recipe.working_time = parse_time(recipe_json['prepTime'])
|
||||
|
||||
if 'favorite' in recipe_json and recipe_json['favorite']:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('Favorite'))[0])
|
||||
|
||||
if 'categories' in recipe_json:
|
||||
try:
|
||||
for x in recipe_json['categories']:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
instruction = ''
|
||||
if 'text' in recipe_json:
|
||||
instruction += f'*{recipe_json["text"].strip()}* \n'
|
||||
|
||||
if 'instructions' in recipe_json:
|
||||
instruction += recipe_json["instructions"].strip() + ' \n'
|
||||
|
||||
if 'notes' in recipe_json:
|
||||
instruction += recipe_json["notes"].strip() + ' \n'
|
||||
|
||||
if 'link' in recipe_json:
|
||||
recipe.source_url = recipe_json['link']
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction=instruction, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['ingredients'].split('\n'):
|
||||
if ingredient.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
if recipe_json.get("images", None):
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['images'][0])), filetype='.jpeg')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -1,13 +1,15 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from io import BytesIO, StringIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from PIL import Image
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
from cookbook.models import Ingredient, Keyword, NutritionInformation, Recipe, Step
|
||||
|
||||
|
||||
class NextcloudCookbook(Integration):
|
||||
@@ -31,6 +33,9 @@ class NextcloudCookbook(Integration):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if 'url' in recipe_json:
|
||||
recipe.source_url = recipe_json['url'].strip()
|
||||
|
||||
if 'recipeCategory' in recipe_json:
|
||||
try:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=recipe_json['recipeCategory'])[0])
|
||||
@@ -40,15 +45,21 @@ class NextcloudCookbook(Integration):
|
||||
if 'keywords' in recipe_json:
|
||||
try:
|
||||
for x in recipe_json['keywords'].split(','):
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
if x.strip() != '':
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ingredients_added = False
|
||||
for s in recipe_json['recipeInstructions']:
|
||||
step = Step.objects.create(
|
||||
instruction=s, space=self.request.space,
|
||||
)
|
||||
if 'text' in s:
|
||||
step = Step.objects.create(
|
||||
instruction=s['text'], name=s['name'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
else:
|
||||
step = Step.objects.create(
|
||||
instruction=s, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
if not ingredients_added:
|
||||
if len(recipe_json['description'].strip()) > 500:
|
||||
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
|
||||
@@ -66,11 +77,20 @@ class NextcloudCookbook(Integration):
|
||||
recipe.steps.add(step)
|
||||
|
||||
if 'nutrition' in recipe_json:
|
||||
nutrition = {}
|
||||
try:
|
||||
recipe.nutrition.calories = recipe_json['nutrition']['calories'].replace(' kcal', '').replace(' ', '')
|
||||
recipe.nutrition.proteins = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
recipe.nutrition.fats = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
recipe.nutrition.carbohydrates = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
if 'calories' in recipe_json['nutrition']:
|
||||
nutrition['calories'] = int(re.search(r'\d+', recipe_json['nutrition']['calories']).group())
|
||||
if 'proteinContent' in recipe_json['nutrition']:
|
||||
nutrition['proteins'] = int(re.search(r'\d+', recipe_json['nutrition']['proteinContent']).group())
|
||||
if 'fatContent' in recipe_json['nutrition']:
|
||||
nutrition['fats'] = int(re.search(r'\d+', recipe_json['nutrition']['fatContent']).group())
|
||||
if 'carbohydrateContent' in recipe_json['nutrition']:
|
||||
nutrition['carbohydrates'] = int(re.search(r'\d+', recipe_json['nutrition']['carbohydrateContent']).group())
|
||||
|
||||
if nutrition != {}:
|
||||
recipe.nutrition = NutritionInformation.objects.create(**nutrition, space=self.request.space)
|
||||
recipe.save()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -83,5 +103,90 @@ class NextcloudCookbook(Integration):
|
||||
|
||||
return recipe
|
||||
|
||||
def formatTime(self, min):
|
||||
h = min // 60
|
||||
m = min % 60
|
||||
return f'PT{h}H{m}M0S'
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
export = {}
|
||||
export['name'] = recipe.name
|
||||
export['description'] = recipe.description
|
||||
export['url'] = recipe.source_url
|
||||
export['prepTime'] = self.formatTime(recipe.working_time)
|
||||
export['cookTime'] = self.formatTime(recipe.waiting_time)
|
||||
export['totalTime'] = self.formatTime(recipe.working_time + recipe.waiting_time)
|
||||
export['recipeYield'] = recipe.servings
|
||||
export['image'] = f'/Recipes/{recipe.name}/full.jpg'
|
||||
export['imageUrl'] = f'/Recipes/{recipe.name}/full.jpg'
|
||||
|
||||
recipeKeyword = []
|
||||
for k in recipe.keywords.all():
|
||||
recipeKeyword.append(k.name)
|
||||
|
||||
export['keywords'] = recipeKeyword
|
||||
|
||||
recipeInstructions = []
|
||||
recipeIngredient = []
|
||||
for s in recipe.steps.all():
|
||||
recipeInstructions.append(s.instruction)
|
||||
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
export['recipeIngredient'] = recipeIngredient
|
||||
export['recipeInstructions'] = recipeInstructions
|
||||
|
||||
return "recipe.json", json.dumps(export)
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
export_zip_stream = BytesIO()
|
||||
export_zip_obj = ZipFile(export_zip_stream, 'w')
|
||||
|
||||
for recipe in recipes:
|
||||
if recipe.internal and recipe.space == self.request.space:
|
||||
|
||||
recipe_stream = StringIO()
|
||||
filename, data = self.get_file_from_recipe(recipe)
|
||||
recipe_stream.write(data)
|
||||
export_zip_obj.writestr(f'{recipe.name}/{filename}', recipe_stream.getvalue())
|
||||
recipe_stream.close()
|
||||
|
||||
try:
|
||||
imageByte = recipe.image.file.read()
|
||||
export_zip_obj.writestr(f'{recipe.name}/full.jpg', self.getJPEG(imageByte))
|
||||
export_zip_obj.writestr(f'{recipe.name}/thumb.jpg', self.getThumb(171, imageByte))
|
||||
export_zip_obj.writestr(f'{recipe.name}/thumb16.jpg', self.getThumb(16, imageByte))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
el.exported_recipes += 1
|
||||
el.msg += self.get_recipe_processed_msg(recipe)
|
||||
el.save()
|
||||
|
||||
export_zip_obj.close()
|
||||
|
||||
return [[self.get_export_file_name(), export_zip_stream.getvalue()]]
|
||||
|
||||
def getJPEG(self, imageByte):
|
||||
image = Image.open(BytesIO(imageByte))
|
||||
image = image.convert('RGB')
|
||||
|
||||
bytes = BytesIO()
|
||||
image.save(bytes, "JPEG")
|
||||
return bytes.getvalue()
|
||||
|
||||
def getThumb(self, size, imageByte):
|
||||
image = Image.open(BytesIO(imageByte))
|
||||
|
||||
w, h = image.size
|
||||
m = min(w, h)
|
||||
|
||||
image = image.crop(((w - m) // 2, (h - m) // 2, (w + m) // 2, (h + m) // 2))
|
||||
image = image.resize([size, size], Image.Resampling.LANCZOS)
|
||||
image = image.convert('RGB')
|
||||
|
||||
bytes = BytesIO()
|
||||
image.save(bytes, "JPEG")
|
||||
return bytes.getvalue()
|
||||
|
||||
@@ -1,27 +1,59 @@
|
||||
import json
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
from cookbook.models import Comment, CookLog, Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class OpenEats(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe = Recipe.objects.create(name=file['name'].strip(), created_by=self.request.user, internal=True,
|
||||
|
||||
description = file['info']
|
||||
description_max_length = Recipe._meta.get_field('description').max_length
|
||||
if len(description) > description_max_length:
|
||||
description = description[0:description_max_length]
|
||||
|
||||
recipe = Recipe.objects.create(name=file['name'].strip(), description=description, created_by=self.request.user, internal=True,
|
||||
servings=file['servings'], space=self.request.space, waiting_time=file['cook_time'], working_time=file['prep_time'])
|
||||
|
||||
instructions = ''
|
||||
if file["info"] != '':
|
||||
instructions += file["info"]
|
||||
|
||||
if file["directions"] != '':
|
||||
instructions += file["directions"]
|
||||
|
||||
if file["source"] != '':
|
||||
instructions += file["source"]
|
||||
instructions += '\n' + _('Recipe source:') + f'[{file["source"]}]({file["source"]})'
|
||||
|
||||
step = Step.objects.create(instruction=instructions, space=self.request.space,)
|
||||
cuisine_keyword, created = Keyword.objects.get_or_create(name="Cuisine", space=self.request.space)
|
||||
if file["cuisine"] != '':
|
||||
keyword, created = Keyword.objects.get_or_create(name=file["cuisine"].strip(), space=self.request.space)
|
||||
if created:
|
||||
keyword.move(cuisine_keyword, pos="last-child")
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
course_keyword, created = Keyword.objects.get_or_create(name="Course", space=self.request.space)
|
||||
if file["course"] != '':
|
||||
keyword, created = Keyword.objects.get_or_create(name=file["course"].strip(), space=self.request.space)
|
||||
if created:
|
||||
keyword.move(course_keyword, pos="last-child")
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
for tag in file["tags"]:
|
||||
keyword, created = Keyword.objects.get_or_create(name=tag.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
for comment in file['comments']:
|
||||
Comment.objects.create(recipe=recipe, text=comment['text'], created_by=self.request.user)
|
||||
CookLog.objects.create(recipe=recipe, rating=comment['rating'], created_by=self.request.user, space=self.request.space)
|
||||
|
||||
if file["photo"] != '':
|
||||
recipe.image = f'recipes/openeats-import/{file["photo"]}'
|
||||
recipe.save()
|
||||
|
||||
step = Step.objects.create(instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients']:
|
||||
@@ -38,6 +70,9 @@ class OpenEats(Integration):
|
||||
recipe_json = json.loads(file.read())
|
||||
recipe_dict = {}
|
||||
ingredient_group_dict = {}
|
||||
cuisine_group_dict = {}
|
||||
course_group_dict = {}
|
||||
tag_group_dict = {}
|
||||
|
||||
for o in recipe_json:
|
||||
if o['model'] == 'recipe.recipe':
|
||||
@@ -50,11 +85,27 @@ class OpenEats(Integration):
|
||||
'cook_time': o['fields']['cook_time'],
|
||||
'servings': o['fields']['servings'],
|
||||
'ingredients': [],
|
||||
'photo': o['fields']['photo'],
|
||||
'cuisine': o['fields']['cuisine'],
|
||||
'course': o['fields']['course'],
|
||||
'tags': o['fields']['tags'],
|
||||
'comments': [],
|
||||
}
|
||||
if o['model'] == 'ingredient.ingredientgroup':
|
||||
ingredient_group_dict[o['pk']] = o['fields']['recipe']
|
||||
if o['model'] == 'recipe_groups.cuisine':
|
||||
cuisine_group_dict[o['pk']] = o['fields']['title']
|
||||
if o['model'] == 'recipe_groups.course':
|
||||
course_group_dict[o['pk']] = o['fields']['title']
|
||||
if o['model'] == 'recipe_groups.tag':
|
||||
tag_group_dict[o['pk']] = o['fields']['title']
|
||||
|
||||
for o in recipe_json:
|
||||
if o['model'] == 'rating.rating':
|
||||
recipe_dict[o['fields']['recipe']]["comments"].append({
|
||||
"text": o['fields']['comment'],
|
||||
"rating": o['fields']['rating']
|
||||
})
|
||||
if o['model'] == 'ingredient.ingredient':
|
||||
ingredient = {
|
||||
'food': o['fields']['title'],
|
||||
@@ -63,6 +114,15 @@ class OpenEats(Integration):
|
||||
}
|
||||
recipe_dict[ingredient_group_dict[o['fields']['ingredient_group']]]['ingredients'].append(ingredient)
|
||||
|
||||
for k, r in recipe_dict.items():
|
||||
if r["cuisine"] in cuisine_group_dict:
|
||||
r["cuisine"] = cuisine_group_dict[r["cuisine"]]
|
||||
if r["course"] in course_group_dict:
|
||||
r["course"] = course_group_dict[r["course"]]
|
||||
for index in range(len(r["tags"])):
|
||||
if r["tags"][index] in tag_group_dict:
|
||||
r["tags"][index] = tag_group_dict[r["tags"][index]]
|
||||
|
||||
return list(recipe_dict.values())
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
|
||||
@@ -5,7 +5,11 @@ import re
|
||||
from gettext import gettext as _
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
@@ -26,10 +30,9 @@ class Paprika(Integration):
|
||||
recipe.description = '' if len(recipe_json['description'].strip()) > 500 else recipe_json['description'].strip()
|
||||
|
||||
try:
|
||||
if re.match(r'([0-9])+\s(.)*', recipe_json['servings']):
|
||||
s = recipe_json['servings'].split(' ')
|
||||
recipe.servings = s[0]
|
||||
recipe.servings_text = s[1]
|
||||
if 'servings' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['servings'])
|
||||
recipe.servings_text = parse_servings_text(recipe_json['servings'])
|
||||
|
||||
if len(recipe_json['cook_time'].strip()) > 0:
|
||||
recipe.waiting_time = re.findall(r'\d+', recipe_json['cook_time'])[0]
|
||||
@@ -55,7 +58,7 @@ class Paprika(Integration):
|
||||
pass
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction=instructions, space=self.request.space,
|
||||
instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
if 'description' in recipe_json and len(recipe_json['description'].strip()) > 500:
|
||||
@@ -81,7 +84,14 @@ class Paprika(Integration):
|
||||
|
||||
recipe.steps.add(step)
|
||||
|
||||
if recipe_json.get("photo_data", None):
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['photo_data'])), filetype='.jpeg')
|
||||
try:
|
||||
if recipe_json.get("image_url", None):
|
||||
url = recipe_json.get("image_url", None)
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception:
|
||||
if recipe_json.get("photo_data", None):
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['photo_data'])), filetype='.jpeg')
|
||||
|
||||
return recipe
|
||||
|
||||
@@ -1,21 +1,11 @@
|
||||
import json
|
||||
from io import BytesIO
|
||||
from re import match
|
||||
from zipfile import ZipFile
|
||||
import asyncio
|
||||
from pyppeteer import launch
|
||||
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.serializer import RecipeExportSerializer
|
||||
|
||||
from cookbook.models import ExportLog
|
||||
from asgiref.sync import sync_to_async
|
||||
|
||||
import django.core.management.commands.runserver as runserver
|
||||
import logging
|
||||
from asgiref.sync import sync_to_async
|
||||
from pyppeteer import launch
|
||||
|
||||
from cookbook.integration.integration import Integration
|
||||
|
||||
|
||||
class PDFexport(Integration):
|
||||
|
||||
@@ -42,7 +32,6 @@ class PDFexport(Integration):
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
files = []
|
||||
for recipe in recipes:
|
||||
|
||||
@@ -50,20 +39,18 @@ class PDFexport(Integration):
|
||||
await page.emulateMedia('print')
|
||||
await page.setCookie(cookies)
|
||||
|
||||
await page.goto('http://'+cmd.default_addr+':'+cmd.default_port+'/view/recipe/'+str(recipe.id), {'waitUntil': 'domcontentloaded'})
|
||||
await page.waitForSelector('#printReady');
|
||||
await page.goto('http://' + cmd.default_addr + ':' + cmd.default_port + '/view/recipe/' + str(recipe.id), {'waitUntil': 'domcontentloaded'})
|
||||
await page.waitForSelector('#printReady')
|
||||
|
||||
files.append([recipe.name + '.pdf', await page.pdf(options)])
|
||||
await page.close();
|
||||
await page.close()
|
||||
|
||||
el.exported_recipes += 1
|
||||
el.msg += self.get_recipe_processed_msg(recipe)
|
||||
await sync_to_async(el.save, thread_sensitive=True)()
|
||||
|
||||
|
||||
await browser.close()
|
||||
return files
|
||||
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
return asyncio.run(self.get_files_from_recipes_async(recipes, el, cookie))
|
||||
|
||||
@@ -35,7 +35,7 @@ class Pepperplate(Integration):
|
||||
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
@@ -45,10 +46,11 @@ class Plantoeat(Integration):
|
||||
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
if tags:
|
||||
tags = tags.replace('^',',')
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
@@ -66,8 +68,9 @@ class Plantoeat(Integration):
|
||||
|
||||
if image_url:
|
||||
try:
|
||||
response = requests.get(image_url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
if validators.url(image_url, public=True):
|
||||
response = requests.get(image_url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
@@ -78,7 +81,11 @@ class Plantoeat(Integration):
|
||||
current_recipe = ''
|
||||
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("windows-1250")
|
||||
try:
|
||||
line = fl.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
line = fl.decode("windows-1250")
|
||||
|
||||
if line.startswith('--------------'):
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
@@ -5,6 +5,7 @@ from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
@@ -45,7 +46,7 @@ class RecetteTek(Integration):
|
||||
if not instructions:
|
||||
instructions = ''
|
||||
|
||||
step = Step.objects.create(instruction=instructions, space=self.request.space,)
|
||||
step = Step.objects.create(instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
|
||||
|
||||
# Append the original import url to the step (if it exists)
|
||||
try:
|
||||
@@ -60,7 +61,7 @@ class RecetteTek(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, food, note = ingredient_parser.parse(food)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.strip())
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
@@ -123,11 +124,13 @@ class RecetteTek(Integration):
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(image_file_name)), filetype=get_filetype(image_file_name))
|
||||
else:
|
||||
if file['originalPicture'] != '':
|
||||
response = requests.get(file['originalPicture'])
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
url = file['originalPicture']
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
except Exception as e:
|
||||
print(recipe.name, ': failed to import image ', str(e))
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ class RecipeKeeper(Integration):
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space,)
|
||||
step = Step.objects.create(instruction='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find("div", {"itemprop": "recipeIngredients"}).findChildren("p"):
|
||||
@@ -51,13 +51,20 @@ class RecipeKeeper(Integration):
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=str(ingredient).replace('<p>', '').replace('</p>', ''), space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find("div", {"itemprop": "recipeDirections"}).find_all("p"):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text + ' \n'
|
||||
step.save()
|
||||
|
||||
for s in file.find("div", {"itemprop": "recipeNotes"}).find_all("p"):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text + ' \n'
|
||||
step.save()
|
||||
|
||||
if file.find("span", {"itemprop": "recipeSource"}).text != '':
|
||||
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("span", {"itemprop": "recipeSource"}).text
|
||||
|
||||
@@ -2,8 +2,10 @@ import json
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
@@ -17,25 +19,27 @@ class RecipeSage(Integration):
|
||||
created_by=self.request.user, internal=True,
|
||||
space=self.request.space)
|
||||
|
||||
if file['recipeYield'] != '':
|
||||
recipe.servings = parse_servings(file['recipeYield'])
|
||||
recipe.servings_text = parse_servings_text(file['recipeYield'])
|
||||
|
||||
try:
|
||||
if file['recipeYield'] != '':
|
||||
recipe.servings = int(file['recipeYield'])
|
||||
if 'totalTime' in file and file['totalTime'] != '':
|
||||
recipe.working_time = parse_time(file['totalTime'])
|
||||
|
||||
if file['totalTime'] != '':
|
||||
recipe.waiting_time = int(file['totalTime']) - int(file['timePrep'])
|
||||
|
||||
if file['prepTime'] != '':
|
||||
recipe.working_time = int(file['timePrep'])
|
||||
|
||||
recipe.save()
|
||||
if 'timePrep' in file and file['prepTime'] != '':
|
||||
recipe.working_time = parse_time(file['timePrep'])
|
||||
recipe.waiting_time = parse_time(file['totalTime']) - parse_time(file['timePrep'])
|
||||
except Exception as e:
|
||||
print('failed to parse yield or time ', str(e))
|
||||
print('failed to parse time ', str(e))
|
||||
|
||||
recipe.save()
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
ingredients_added = False
|
||||
for s in file['recipeInstructions']:
|
||||
step = Step.objects.create(
|
||||
instruction=s['text'], space=self.request.space,
|
||||
instruction=s['text'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
if not ingredients_added:
|
||||
ingredients_added = True
|
||||
@@ -51,8 +55,10 @@ class RecipeSage(Integration):
|
||||
|
||||
if len(file['image']) > 0:
|
||||
try:
|
||||
response = requests.get(file['image'][0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = file['image'][0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
@@ -77,14 +83,13 @@ class RecipeSage(Integration):
|
||||
}
|
||||
|
||||
for s in recipe.steps.all():
|
||||
if s.type != Step.TIME:
|
||||
data['recipeInstructions'].append({
|
||||
'@type': 'HowToStep',
|
||||
'text': s.instruction
|
||||
})
|
||||
data['recipeInstructions'].append({
|
||||
'@type': 'HowToStep',
|
||||
'text': s.instruction
|
||||
})
|
||||
|
||||
for i in s.ingredients.all():
|
||||
data['recipeIngredient'].append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
for i in s.ingredients.all():
|
||||
data['recipeIngredient'].append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
75
cookbook/integration/rezeptsuitede.py
Normal file
75
cookbook/integration/rezeptsuitede.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import base64
|
||||
from io import BytesIO
|
||||
from xml import etree
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Rezeptsuitede(Integration):
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
return etree.parse(file).getroot().getchildren()
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_xml = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_xml.find('head').attrib['title'].strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
try:
|
||||
if recipe_xml.find('head').attrib['servingtype']:
|
||||
recipe.servings = parse_servings(recipe_xml.find('head').attrib['servingtype'].strip())
|
||||
recipe.servings_text = parse_servings_text(recipe_xml.find('head').attrib['servingtype'].strip())
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if recipe_xml.find('remark') is not None: # description is a list of <li>'s with text
|
||||
if recipe_xml.find('remark').find('line') is not None:
|
||||
recipe.description = recipe_xml.find('remark').find('line').text[:512]
|
||||
|
||||
for prep in recipe_xml.findall('preparation'):
|
||||
try:
|
||||
if prep.find('step').text:
|
||||
step = Step.objects.create(
|
||||
instruction=prep.find('step').text.strip(), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
if recipe_xml.find('part').find('ingredient') is not None:
|
||||
ingredient_step = recipe.steps.first()
|
||||
if ingredient_step is None:
|
||||
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
|
||||
|
||||
for ingredient in recipe_xml.find('part').findall('ingredient'):
|
||||
f = ingredient_parser.get_food(ingredient.attrib['item'])
|
||||
u = ingredient_parser.get_unit(ingredient.attrib['unit'])
|
||||
amount = 0
|
||||
if ingredient.attrib['qty'].strip() != '':
|
||||
amount, unit, note = ingredient_parser.parse_amount(ingredient.attrib['qty'])
|
||||
ingredient_step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, space=self.request.space, ))
|
||||
|
||||
try:
|
||||
k, created = Keyword.objects.get_or_create(name=recipe_xml.find('head').find('cat').text.strip(), space=self.request.space)
|
||||
recipe.keywords.add(k)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
recipe.save()
|
||||
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_xml.find('head').find('picbin').text)), filetype='.jpeg')
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -12,33 +12,33 @@ class RezKonv(Integration):
|
||||
|
||||
ingredients = []
|
||||
directions = []
|
||||
for line in file.replace('\r', '').split('\n'):
|
||||
for line in file.replace('\r', '').replace('\n\n', '\n').split('\n'):
|
||||
if 'Titel:' in line:
|
||||
title = line.replace('Titel:', '').strip()
|
||||
if 'Kategorien:' in line:
|
||||
tags = line.replace('Kategorien:', '').strip()
|
||||
if ingredient_mode and ('quelle' in line.lower() or 'source' in line.lower()):
|
||||
if ingredient_mode and (
|
||||
'quelle' in line.lower() or 'source' in line.lower() or (line == '' and len(ingredients) > 0)):
|
||||
ingredient_mode = False
|
||||
direction_mode = True
|
||||
if ingredient_mode:
|
||||
if line != '' and '===' not in line and 'Zubereitung' not in line:
|
||||
ingredients.append(line.strip())
|
||||
if direction_mode:
|
||||
if line.strip() != '' and line.strip() != '=====':
|
||||
directions.append(line.strip())
|
||||
if 'Zutaten:' in line:
|
||||
if 'Zutaten:' in line or 'Ingredients' in line or 'Menge:' in line:
|
||||
ingredient_mode = True
|
||||
if 'Zubereitung:' in line:
|
||||
ingredient_mode = False
|
||||
direction_mode = True
|
||||
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True,
|
||||
space=self.request.space)
|
||||
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
instruction=' \n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
@@ -60,7 +60,8 @@ class RezKonv(Integration):
|
||||
def split_recipe_file(self, file):
|
||||
recipe_list = []
|
||||
current_recipe = ''
|
||||
encoding_list = ['windows-1250', 'latin-1'] #TODO build algorithm to try trough encodings and fail if none work, use for all importers
|
||||
# TODO build algorithm to try trough encodings and fail if none work, use for all importers
|
||||
# encoding_list = ['windows-1250', 'latin-1']
|
||||
encoding = 'windows-1250'
|
||||
for fl in file.readlines():
|
||||
try:
|
||||
|
||||
@@ -43,7 +43,7 @@ class Saffron(Integration):
|
||||
|
||||
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, )
|
||||
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
@@ -59,11 +59,11 @@ class Saffron(Integration):
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
|
||||
data = "Title: "+recipe.name if recipe.name else ""+"\n"
|
||||
data += "Description: "+recipe.description if recipe.description else ""+"\n"
|
||||
data = "Title: " + recipe.name if recipe.name else "" + "\n"
|
||||
data += "Description: " + recipe.description if recipe.description else "" + "\n"
|
||||
data += "Source: \n"
|
||||
data += "Original URL: \n"
|
||||
data += "Yield: "+str(recipe.servings)+"\n"
|
||||
data += "Yield: " + str(recipe.servings) + "\n"
|
||||
data += "Cookbook: \n"
|
||||
data += "Section: \n"
|
||||
data += "Image: \n"
|
||||
@@ -71,21 +71,20 @@ class Saffron(Integration):
|
||||
recipeInstructions = []
|
||||
recipeIngredient = []
|
||||
for s in recipe.steps.all():
|
||||
if s.type != Step.TIME:
|
||||
recipeInstructions.append(s.instruction)
|
||||
recipeInstructions.append(s.instruction)
|
||||
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
data += "Ingredients: \n"
|
||||
for ingredient in recipeIngredient:
|
||||
data += ingredient+"\n"
|
||||
data += ingredient + "\n"
|
||||
|
||||
data += "Instructions: \n"
|
||||
for instruction in recipeInstructions:
|
||||
data += instruction+"\n"
|
||||
data += instruction + "\n"
|
||||
|
||||
return recipe.name+'.txt', data
|
||||
return recipe.name + '.txt', data
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
files = []
|
||||
|
||||
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -6,20 +6,22 @@
|
||||
# Translators:
|
||||
# Pavel Solař <pavelsolar86@gmail.com>, 2021
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-02-09 18:01+0100\n"
|
||||
"PO-Revision-Date: 2020-06-02 19:28+0000\n"
|
||||
"Last-Translator: Pavel Solař <pavelsolar86@gmail.com>, 2021\n"
|
||||
"Language-Team: Czech (https://www.transifex.com/django-recipes/teams/110507/cs/)\n"
|
||||
"PO-Revision-Date: 2023-07-31 14:19+0000\n"
|
||||
"Last-Translator: Mára Štěpánek <stepanekm7@gmail.com>\n"
|
||||
"Language-Team: Czech <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/cs/>\n"
|
||||
"Language: cs\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Language: cs\n"
|
||||
"Plural-Forms: nplurals=4; plural=(n == 1 && n % 1 == 0) ? 0 : (n >= 2 && n <= 4 && n % 1 == 0) ? 1: (n % 1 != 0 ) ? 2 : 3;\n"
|
||||
"Plural-Forms: nplurals=4; plural=(n == 1 && n % 1 == 0) ? 0 : (n >= 2 && n "
|
||||
"<= 4 && n % 1 == 0) ? 1: (n % 1 != 0 ) ? 2 : 3;\n"
|
||||
"X-Generator: Weblate 4.15\n"
|
||||
|
||||
#: .\cookbook\filters.py:22 .\cookbook\templates\base.html:87
|
||||
#: .\cookbook\templates\forms\edit_internal_recipe.html:219
|
||||
@@ -34,7 +36,7 @@ msgid ""
|
||||
"try them out!"
|
||||
msgstr ""
|
||||
"Barva horního navigačního menu. Některé barvy neladí se všemi tématy a je "
|
||||
"třeba je vyzkoušet."
|
||||
"třeba je vyzkoušet!"
|
||||
|
||||
#: .\cookbook\forms.py:45
|
||||
msgid "Default Unit to be used when inserting a new ingredient into a recipe."
|
||||
@@ -48,7 +50,7 @@ msgid ""
|
||||
"to fractions automatically)"
|
||||
msgstr ""
|
||||
"Povolit podporu zlomků u množství ingrediencí (desetinná čísla budou "
|
||||
"automaticky převedena na zlomky)."
|
||||
"automaticky převedena na zlomky)"
|
||||
|
||||
#: .\cookbook\forms.py:47
|
||||
msgid ""
|
||||
@@ -173,7 +175,7 @@ msgstr "Potravina, která by měla být nahrazena."
|
||||
|
||||
#: .\cookbook\forms.py:198
|
||||
msgid "Add your comment: "
|
||||
msgstr "Přidat vlastní komentář:"
|
||||
msgstr "Přidat vlastní komentář: "
|
||||
|
||||
#: .\cookbook\forms.py:229
|
||||
msgid "Leave empty for dropbox and enter app password for nextcloud."
|
||||
@@ -551,7 +553,7 @@ msgstr "Cesta musí být v následujícím formátu"
|
||||
|
||||
#: .\cookbook\templates\batch\monitor.html:27
|
||||
msgid "Sync Now!"
|
||||
msgstr "Zahájit synchronizaci"
|
||||
msgstr "Zahájit synchronizaci!"
|
||||
|
||||
#: .\cookbook\templates\batch\waiting.html:4
|
||||
#: .\cookbook\templates\batch\waiting.html:10
|
||||
@@ -1034,7 +1036,7 @@ msgstr "Tento text je kurzívou"
|
||||
#: .\cookbook\templates\markdown_info.html:61
|
||||
#: .\cookbook\templates\markdown_info.html:77
|
||||
msgid "Blockquotes are also possible"
|
||||
msgstr "Lze použít i kvotace "
|
||||
msgstr "Lze použít i kvotace"
|
||||
|
||||
#: .\cookbook\templates\markdown_info.html:84
|
||||
msgid "Lists"
|
||||
@@ -1104,8 +1106,8 @@ msgid ""
|
||||
"rel=\"noreferrer noopener\" target=\"_blank\">this one.</a>"
|
||||
msgstr ""
|
||||
"Ruční vytváření tabulek pomocí značek je složité. Doporučujeme použít "
|
||||
"například <a href=\"https://www.tablesgenerator.com/markdown_tables\" "
|
||||
"rel=\"noreferrer noopener\" target=\"_blank\">tento tabulkový editor</a>."
|
||||
"například <a href=\"https://www.tablesgenerator.com/markdown_tables\" rel="
|
||||
"\"noreferrer noopener\" target=\"_blank\">tento tabulkový editor.</a>"
|
||||
|
||||
#: .\cookbook\templates\markdown_info.html:155
|
||||
#: .\cookbook\templates\markdown_info.html:157
|
||||
@@ -1254,22 +1256,36 @@ msgid ""
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
" <p>Modul jídelníčku umožňuje plánovat jídlo pomocí receptů i poznámek.</p>\n"
|
||||
" <p>Jednoduše vyberte recept ze seznamu naposledy navštívených receptů, nebo ho vyhledejte\n"
|
||||
" s přetáhněte na požadovaný den v rozvrhu. Můžete také přidat poznámku s popiskem\n"
|
||||
" a poté přetáhnout recept pro vytvoření plánu s vlatními popisky. Vytvořením samotné poznámky\n"
|
||||
" je možné přetažením pole poznámky do rozvrhu.</p>\n"
|
||||
" <p>Kliknutím na recept zobrazíte detailní náhled. Odtud lze také přidat položky\n"
|
||||
" do nákupního seznamu. Do nákupního seznamu můžete také přidat všechny recepty na daný den\n"
|
||||
" kliknutím na ikonu nákupního košíku na horní straně tabulky.</p>\n"
|
||||
" <p>V běžném případě se jídelníček plánuje hromadně, proto můžete v nastavení definovat\n"
|
||||
" se kterými uživateli si přejete jídelníčky sdílet.\n"
|
||||
" <p>Modul jídelníčku umožňuje plánovat jídlo "
|
||||
"pomocí receptů i poznámek.</p>\n"
|
||||
" <p>Jednoduše vyberte recept ze seznamu naposledy "
|
||||
"navštívených receptů, nebo ho vyhledejte\n"
|
||||
" s přetáhněte na požadovaný den v rozvrhu. "
|
||||
"Můžete také přidat poznámku s popiskem\n"
|
||||
" a poté přetáhnout recept pro vytvoření plánu "
|
||||
"s vlatními popisky. Vytvořením samotné poznámky\n"
|
||||
" je možné přetažením pole poznámky do "
|
||||
"rozvrhu.</p>\n"
|
||||
" <p>Kliknutím na recept zobrazíte detailní "
|
||||
"náhled. Odtud lze také přidat položky\n"
|
||||
" do nákupního seznamu. Do nákupního seznamu "
|
||||
"můžete také přidat všechny recepty na daný den\n"
|
||||
" kliknutím na ikonu nákupního košíku na horní "
|
||||
"straně tabulky.</p>\n"
|
||||
" <p>V běžném případě se jídelníček plánuje "
|
||||
"hromadně, proto můžete v nastavení definovat\n"
|
||||
" se kterými uživateli si přejete jídelníčky "
|
||||
"sdílet.\n"
|
||||
" </p>\n"
|
||||
" <p>Můžete také upravovat typy jídel, které si přejete naplánovat. Pokud budete sdílet jídelníček \n"
|
||||
" <p>Můžete také upravovat typy jídel, které si "
|
||||
"přejete naplánovat. Pokud budete sdílet jídelníček \n"
|
||||
" s někým, kdo\n"
|
||||
" má přidána jiná jídla, jeho typy jídel se objeví i ve vašem seznamu. Pro předcházení\n"
|
||||
" má přidána jiná jídla, jeho typy jídel se "
|
||||
"objeví i ve vašem seznamu. Pro předcházení\n"
|
||||
" duplicitám (např. Ostatní, Jiná)\n"
|
||||
" pojmenujte váš typ jídla stejně, jako uživatel se kterým své seznamy sdílíte. Tím budou seznamy sloučeny.</p>\n"
|
||||
" pojmenujte váš typ jídla stejně, jako "
|
||||
"uživatel se kterým své seznamy sdílíte. Tím budou seznamy\n"
|
||||
" sloučeny.</p>\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\meal_plan_entry.html:6
|
||||
@@ -1331,12 +1347,12 @@ msgstr "Obrázek receptu"
|
||||
#: .\cookbook\templates\recipes_table.html:46
|
||||
#: .\cookbook\templates\url_import.html:55
|
||||
msgid "Preparation time ca."
|
||||
msgstr "Doba přípravy cca"
|
||||
msgstr "Doba přípravy cca."
|
||||
|
||||
#: .\cookbook\templates\recipes_table.html:52
|
||||
#: .\cookbook\templates\url_import.html:60
|
||||
msgid "Waiting time ca."
|
||||
msgstr "Doba čekání cca"
|
||||
msgstr "Doba čekání cca."
|
||||
|
||||
#: .\cookbook\templates\recipes_table.html:55
|
||||
msgid "External"
|
||||
@@ -1384,7 +1400,7 @@ msgid ""
|
||||
" in the following examples:"
|
||||
msgstr ""
|
||||
"Použijte tajný klíč jako autorizační hlavičku definovanou slovním klíčem, "
|
||||
"jak je uvedeno v následujících příkladech."
|
||||
"jak je uvedeno v následujících příkladech:"
|
||||
|
||||
#: .\cookbook\templates\settings.html:94
|
||||
msgid "or"
|
||||
@@ -1806,7 +1822,7 @@ msgstr "Import není pro tohoto poskytovatele implementován!"
|
||||
|
||||
#: .\cookbook\views\import_export.py:58
|
||||
msgid "Exporting is not implemented for this provider"
|
||||
msgstr "Eport není pro tohoto poskytovatele implementován!"
|
||||
msgstr "Export není pro tohoto poskytovatele implementován!"
|
||||
|
||||
#: .\cookbook\views\lists.py:42
|
||||
msgid "Import Log"
|
||||
@@ -1838,7 +1854,7 @@ msgstr "Komentář uložen!"
|
||||
|
||||
#: .\cookbook\views\views.py:152
|
||||
msgid "This recipe is already linked to the book!"
|
||||
msgstr "Tento recept už v kuchařce existuje."
|
||||
msgstr "Tento recept už v kuchařce existuje!"
|
||||
|
||||
#: .\cookbook\views\views.py:158
|
||||
msgid "Bookmark saved!"
|
||||
|
||||
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2982
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
2982
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/el/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/el/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2732
cookbook/locale/el/LC_MESSAGES/django.po
Normal file
2732
cookbook/locale/el/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-11-04 12:31+0100\n"
|
||||
"PO-Revision-Date: 2021-11-06 14:06+0000\n"
|
||||
"Last-Translator: Nicklas Yli-Länttä <admin@timanttikuutio.eu>\n"
|
||||
"PO-Revision-Date: 2022-03-18 16:31+0000\n"
|
||||
"Last-Translator: Stefan Werner <werner@iki.fi>\n"
|
||||
"Language-Team: Finnish <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/fi/>\n"
|
||||
"Language: fi\n"
|
||||
@@ -17,7 +17,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=n != 1;\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
"X-Generator: Weblate 4.10.1\n"
|
||||
|
||||
#: .\cookbook\filters.py:23 .\cookbook\templates\base.html:125
|
||||
#: .\cookbook\templates\forms\ingredients.html:34
|
||||
@@ -31,10 +31,12 @@ msgid ""
|
||||
"Color of the top navigation bar. Not all colors work with all themes, just "
|
||||
"try them out!"
|
||||
msgstr ""
|
||||
"Ylänavigointipalkin väri. Ei kaikki värit toimi kaikkien teemojen kanssa; "
|
||||
"kokeile!"
|
||||
|
||||
#: .\cookbook\forms.py:55
|
||||
msgid "Default Unit to be used when inserting a new ingredient into a recipe."
|
||||
msgstr ""
|
||||
msgstr "Oletusmittayksikkö uuden aineksen lisäämisessä."
|
||||
|
||||
#: .\cookbook\forms.py:57
|
||||
msgid ""
|
||||
@@ -2435,7 +2437,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\new.py:225
|
||||
msgid "Email to user could not be send, please share link manually."
|
||||
msgid "Email could not be sent to user. Please share the link manually."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:127
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
2529
cookbook/locale/he/LC_MESSAGES/django.po
Normal file
2529
cookbook/locale/he/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -11,8 +11,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-02-09 18:01+0100\n"
|
||||
"PO-Revision-Date: 2021-10-13 12:50+0000\n"
|
||||
"Last-Translator: Hrachya Kocharyan <hkocharyan@ctemplar.com>\n"
|
||||
"PO-Revision-Date: 2023-01-08 17:55+0000\n"
|
||||
"Last-Translator: Joachim Weber <joachim.weber@gmx.de>\n"
|
||||
"Language-Team: Armenian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/hy/>\n"
|
||||
"Language: hy\n"
|
||||
@@ -20,7 +20,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
"X-Generator: Weblate 4.15\n"
|
||||
|
||||
#: .\cookbook\filters.py:22 .\cookbook\templates\base.html:87
|
||||
#: .\cookbook\templates\forms\edit_internal_recipe.html:219
|
||||
@@ -410,7 +410,7 @@ msgstr "Դուրս գալ"
|
||||
|
||||
#: .\cookbook\templates\account\logout.html:11
|
||||
msgid "Are you sure you want to sign out?"
|
||||
msgstr "Համոզվա՞ծ եք, որ ցանկանում եք դուրս գալ:"
|
||||
msgstr "Համոզվա՞ծ եք, որ ցանկանում եք դուրս գալ՞"
|
||||
|
||||
#: .\cookbook\templates\account\password_reset.html:5
|
||||
#: .\cookbook\templates\account\password_reset_done.html:5
|
||||
|
||||
BIN
cookbook/locale/id/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/id/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2647
cookbook/locale/id/LC_MESSAGES/django.po
Normal file
2647
cookbook/locale/id/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user