mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-25 11:19:39 -05:00
Compare commits
784 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82d1a75d80 | ||
|
|
50429207c5 | ||
|
|
589bc1f1aa | ||
|
|
824dcefc1a | ||
|
|
3f8c952237 | ||
|
|
077db58de0 | ||
|
|
3c527fd112 | ||
|
|
cb363d6321 | ||
|
|
39656152d3 | ||
|
|
22c88e5269 | ||
|
|
89550e8345 | ||
|
|
9846c4df18 | ||
|
|
924d1cb71b | ||
|
|
44236f611e | ||
|
|
012dea5a0c | ||
|
|
820c9b704f | ||
|
|
ed92926ec4 | ||
|
|
bc560ee76d | ||
|
|
b6c4130e4b | ||
|
|
b0ca391bb4 | ||
|
|
45a6b1d386 | ||
|
|
4626ffcbc5 | ||
|
|
c3a9cc94fa | ||
|
|
a8eb8bb8d7 | ||
|
|
b14c9aa68c | ||
|
|
b03db7ad36 | ||
|
|
cc706a1195 | ||
|
|
b1028f49d6 | ||
|
|
6d0cc96cc8 | ||
|
|
969a91e751 | ||
|
|
f47470a9ad | ||
|
|
9ad9fe275d | ||
|
|
33448c98c0 | ||
|
|
90e389f2fa | ||
|
|
af7acd7473 | ||
|
|
dfa0794281 | ||
|
|
a36d42df84 | ||
|
|
269dded046 | ||
|
|
826ccc2760 | ||
|
|
7190dc17a7 | ||
|
|
5e332bb88c | ||
|
|
fb21622bfe | ||
|
|
191c38db8f | ||
|
|
71132fe992 | ||
|
|
d1d568a9d3 | ||
|
|
68d4fb3b59 | ||
|
|
b4c26682c7 | ||
|
|
e2cfb53ec4 | ||
|
|
274b17a860 | ||
|
|
97bcf1111b | ||
|
|
3b9d221258 | ||
|
|
d8bcb8bcb6 | ||
|
|
5cb0f1761a | ||
|
|
516b551528 | ||
|
|
f43d4d3971 | ||
|
|
e0dd70027b | ||
|
|
79efd94d6f | ||
|
|
f16870c59e | ||
|
|
c690bc18a0 | ||
|
|
394f24c29f | ||
|
|
c2a8214290 | ||
|
|
4d0cfc95e4 | ||
|
|
972c103538 | ||
|
|
2f62f51dc2 | ||
|
|
56ee173c07 | ||
|
|
774c633d5c | ||
|
|
93dd35fde3 | ||
|
|
666e4d282f | ||
|
|
ed6ca613ff | ||
|
|
285364e12a | ||
|
|
9a46a91652 | ||
|
|
4e4078f3da | ||
|
|
41c9290ba8 | ||
|
|
4460fe013f | ||
|
|
558a5d6554 | ||
|
|
388f2f441f | ||
|
|
11e8af4c46 | ||
|
|
387893e1ef | ||
|
|
78dc1bf9ec | ||
|
|
12638096b1 | ||
|
|
3e82199c44 | ||
|
|
b4bcf5c032 | ||
|
|
afdd92c903 | ||
|
|
1462300eda | ||
|
|
d0417d09db | ||
|
|
39c8da8305 | ||
|
|
c9af9277ae | ||
|
|
5e2be34f7b | ||
|
|
73a2476a79 | ||
|
|
f61032cc74 | ||
|
|
f6956388c7 | ||
|
|
7e9b303e8b | ||
|
|
f617dedfa2 | ||
|
|
942c26c581 | ||
|
|
a00d90398e | ||
|
|
61e6d855ec | ||
|
|
6b59f53273 | ||
|
|
798457e7e2 | ||
|
|
6176eeb024 | ||
|
|
56252a707a | ||
|
|
9c649d743c | ||
|
|
00b1ca5454 | ||
|
|
f2e1648556 | ||
|
|
b1945edf04 | ||
|
|
9f20db0ed3 | ||
|
|
852756f099 | ||
|
|
452617ef30 | ||
|
|
4e972835e5 | ||
|
|
5e4d983b79 | ||
|
|
0de8065212 | ||
|
|
1a6e677c06 | ||
|
|
8307828528 | ||
|
|
12a1f261db | ||
|
|
79e400ce6f | ||
|
|
d51de1bd79 | ||
|
|
6b3f1aa038 | ||
|
|
7d7803a07e | ||
|
|
559c574fd6 | ||
|
|
5e0131acd9 | ||
|
|
d560b7a143 | ||
|
|
15e5366dbb | ||
|
|
6ccfdd6f2e | ||
|
|
d2b79990cb | ||
|
|
01bb84e40b | ||
|
|
f66b422f68 | ||
|
|
abab970f08 | ||
|
|
edaa6de71d | ||
|
|
85a65127cf | ||
|
|
99035190f4 | ||
|
|
f1611fbafd | ||
|
|
e42ff2fb8b | ||
|
|
bc93071167 | ||
|
|
410fa58d47 | ||
|
|
06fd03fbde | ||
|
|
d169456c78 | ||
|
|
7785aa4904 | ||
|
|
33798fe47e | ||
|
|
0522b15cfd | ||
|
|
8cfd3995d0 | ||
|
|
84cdd8bb78 | ||
|
|
ad0177235d | ||
|
|
e5782151a1 | ||
|
|
adf4dafd01 | ||
|
|
4214ef4a9f | ||
|
|
1df0ad202f | ||
|
|
e35cbba8b2 | ||
|
|
e6fa660c8f | ||
|
|
8aefdb71bb | ||
|
|
5da8c6fe7b | ||
|
|
520697e988 | ||
|
|
7fe6fd3462 | ||
|
|
85b3941539 | ||
|
|
6f5ea7bb48 | ||
|
|
e9a2b101d8 | ||
|
|
c01faff135 | ||
|
|
bcee0007a5 | ||
|
|
f5ec956e08 | ||
|
|
56926d55ba | ||
|
|
55cfe9e9e7 | ||
|
|
31bdd97a56 | ||
|
|
eb60cbdd6b | ||
|
|
39ccf7bbcf | ||
|
|
f92ee32c01 | ||
|
|
5aecf7e61c | ||
|
|
20435450f3 | ||
|
|
13e5fb4143 | ||
|
|
bdc6434839 | ||
|
|
796609de37 | ||
|
|
8759e8dd73 | ||
|
|
f8bf54189e | ||
|
|
3e2988f998 | ||
|
|
2e5571f0a9 | ||
|
|
c97bb900a3 | ||
|
|
b1ad5ef205 | ||
|
|
9994b6f9c2 | ||
|
|
a8a590a942 | ||
|
|
4e13fb3b8c | ||
|
|
24f331c208 | ||
|
|
16d0fc38f9 | ||
|
|
5e4cac52d6 | ||
|
|
b489a2d849 | ||
|
|
7c5707e0c0 | ||
|
|
946699a335 | ||
|
|
4888e2d476 | ||
|
|
44b2c02034 | ||
|
|
c150c7f84e | ||
|
|
97503a68d8 | ||
|
|
126a2d870e | ||
|
|
02bad8cfb9 | ||
|
|
d9465c7f9d | ||
|
|
ead3168d80 | ||
|
|
a71bba307e | ||
|
|
d2a652891c | ||
|
|
a70ac42717 | ||
|
|
a7bcf105dc | ||
|
|
8be02b4e74 | ||
|
|
9ba9cda1c0 | ||
|
|
4310282dc3 | ||
|
|
c2c08391cc | ||
|
|
bc9d077b9d | ||
|
|
fe0f739bd5 | ||
|
|
e5b11a34f6 | ||
|
|
1df7a4df91 | ||
|
|
d401c143ec | ||
|
|
00a59baa92 | ||
|
|
327c83ce32 | ||
|
|
3371102e64 | ||
|
|
aec396e214 | ||
|
|
2b52b5c264 | ||
|
|
19c24a85a1 | ||
|
|
c147903f1e | ||
|
|
9dedc5b8fa | ||
|
|
d781cbe743 | ||
|
|
37bd2017b0 | ||
|
|
2de8070156 | ||
|
|
f70377c59b | ||
|
|
6fc4151de5 | ||
|
|
1fa001aad3 | ||
|
|
b84e03c58b | ||
|
|
e9dac25ff4 | ||
|
|
611787dbb6 | ||
|
|
bfbfb1d2a8 | ||
|
|
d9662f7fa5 | ||
|
|
9e44944b1d | ||
|
|
4de9a7ff89 | ||
|
|
32a663c5d7 | ||
|
|
3bee5ed35a | ||
|
|
bee5d6b7eb | ||
|
|
00ed9b07b6 | ||
|
|
2279bba838 | ||
|
|
57f5343c77 | ||
|
|
da8262a9b5 | ||
|
|
f0cf4a23e4 | ||
|
|
489c81c378 | ||
|
|
730344e326 | ||
|
|
7e6b1d3638 | ||
|
|
15f65cd711 | ||
|
|
dba205dafb | ||
|
|
5ae149a1b6 | ||
|
|
4bb2307007 | ||
|
|
be0088aec6 | ||
|
|
c56710ae0c | ||
|
|
1a420bc002 | ||
|
|
545e4f7af4 | ||
|
|
d2a148ae7d | ||
|
|
580591a69e | ||
|
|
409b438776 | ||
|
|
549175b56d | ||
|
|
0e3f5006b1 | ||
|
|
54043a0ae5 | ||
|
|
36fdc8cd9e | ||
|
|
87cf3b2289 | ||
|
|
adb4071fdb | ||
|
|
2a20f5e6e2 | ||
|
|
00f7ae3d66 | ||
|
|
f1f4e7ca8e | ||
|
|
6d7b3b8bfa | ||
|
|
7ebccf564d | ||
|
|
0421a1aa6c | ||
|
|
c118ab9a3c | ||
|
|
02a12cf724 | ||
|
|
f28ca41b7b | ||
|
|
6e677cf3cd | ||
|
|
d30a23f7ef | ||
|
|
88fea6f25d | ||
|
|
fc0b5bd738 | ||
|
|
5174f9939c | ||
|
|
8f9a489c7e | ||
|
|
fc72efac04 | ||
|
|
72f57cf671 | ||
|
|
85b95d1e96 | ||
|
|
35dee43f0b | ||
|
|
fb683bf230 | ||
|
|
a852f581ba | ||
|
|
cc417f1499 | ||
|
|
7f9da4c4fb | ||
|
|
31d3f9abee | ||
|
|
f9670e9833 | ||
|
|
465af8c1a4 | ||
|
|
ffe743e233 | ||
|
|
6b09731a55 | ||
|
|
182a94e0c7 | ||
|
|
2adaedfd1a | ||
|
|
5074326471 | ||
|
|
4807a16a0f | ||
|
|
af044f1002 | ||
|
|
cdf77c8796 | ||
|
|
e68bedf7eb | ||
|
|
5e21e7fa8e | ||
|
|
f49b39b216 | ||
|
|
0d24292f52 | ||
|
|
f3b7016be8 | ||
|
|
0f77c831c9 | ||
|
|
be48e57453 | ||
|
|
3b45ca18af | ||
|
|
da1b22c148 | ||
|
|
9dab21f972 | ||
|
|
89a5f92ace | ||
|
|
7be705f6a1 | ||
|
|
8e60566311 | ||
|
|
33e5bb7d0a | ||
|
|
0cf63cd715 | ||
|
|
5dc7bf5b0e | ||
|
|
c4c66aa640 | ||
|
|
f64be72a98 | ||
|
|
a3ed2bdcac | ||
|
|
996b8bedac | ||
|
|
a05a785e22 | ||
|
|
b470602317 | ||
|
|
cf8ab02d0e | ||
|
|
60043fff59 | ||
|
|
16c0189b80 | ||
|
|
36c30f9e11 | ||
|
|
12a8582a9a | ||
|
|
13b91e5b91 | ||
|
|
d02b253242 | ||
|
|
16528c4c89 | ||
|
|
6442e174b3 | ||
|
|
fd325c1797 | ||
|
|
12491d1302 | ||
|
|
b7a4613310 | ||
|
|
39f5fca89b | ||
|
|
2902262503 | ||
|
|
b49393357a | ||
|
|
cc1a69eac0 | ||
|
|
13d498658c | ||
|
|
cad93b2dd1 | ||
|
|
f0b8bac221 | ||
|
|
13ef843edb | ||
|
|
ca9c96647e | ||
|
|
902ef3cd1e | ||
|
|
0b69bcddcc | ||
|
|
9089fc7ad3 | ||
|
|
6d866ae62b | ||
|
|
9fa82c2ddb | ||
|
|
0ca29cd677 | ||
|
|
54c9e200a0 | ||
|
|
fc67525dcb | ||
|
|
37e292cab9 | ||
|
|
e391abd23d | ||
|
|
947986277a | ||
|
|
b2a10f269c | ||
|
|
dc076d25d6 | ||
|
|
845408244b | ||
|
|
e06c82297d | ||
|
|
459be74a7c | ||
|
|
37e81275b5 | ||
|
|
8417b0ec3f | ||
|
|
7d834ee088 | ||
|
|
eb119b7443 | ||
|
|
cc342cbae3 | ||
|
|
75ae26fd28 | ||
|
|
70e6585669 | ||
|
|
94f58f4608 | ||
|
|
5478a8d49a | ||
|
|
23180622e8 | ||
|
|
62187fbbdf | ||
|
|
bd6b04f95e | ||
|
|
b315d6e171 | ||
|
|
35bb3c9eb1 | ||
|
|
84e7850e91 | ||
|
|
4b40d75d1d | ||
|
|
5423019a14 | ||
|
|
e8c5c610b7 | ||
|
|
3f0cef59b8 | ||
|
|
867c3595ff | ||
|
|
631dd58c1f | ||
|
|
ba235b26b7 | ||
|
|
e54e850241 | ||
|
|
d0cb7a79f9 | ||
|
|
40c85c512c | ||
|
|
ca5eb7b2b6 | ||
|
|
cfd24de72a | ||
|
|
54acfe3e39 | ||
|
|
574a6ab5f4 | ||
|
|
39070d32bd | ||
|
|
9aa3d2d87a | ||
|
|
02926516b9 | ||
|
|
215f561623 | ||
|
|
e2c2f5d757 | ||
|
|
d887405ab3 | ||
|
|
00deb75195 | ||
|
|
b228b0f42a | ||
|
|
3d5ff23433 | ||
|
|
1a24f34499 | ||
|
|
8459b40743 | ||
|
|
75cb5d2d4c | ||
|
|
12ad6af8c3 | ||
|
|
cf24e1014a | ||
|
|
bd1b40dd94 | ||
|
|
95d4bfb2bd | ||
|
|
23caac9d09 | ||
|
|
ece4f6e32d | ||
|
|
5e7d1ba827 | ||
|
|
a88214eea6 | ||
|
|
7ec5646338 | ||
|
|
c020bea41e | ||
|
|
e6f79a6fa3 | ||
|
|
0ab430ea82 | ||
|
|
3d95657b8a | ||
|
|
726157a062 | ||
|
|
f8793f3ec8 | ||
|
|
09929beeb9 | ||
|
|
2a1b2c18fc | ||
|
|
0cc3df71d2 | ||
|
|
e124c211ac | ||
|
|
dc2f62dc9d | ||
|
|
38921f1254 | ||
|
|
4fec9a493e | ||
|
|
71c5adda79 | ||
|
|
cffa731106 | ||
|
|
c7f75fe58f | ||
|
|
2eed5143fe | ||
|
|
6e4ea518d9 | ||
|
|
a898d722d6 | ||
|
|
904358bb00 | ||
|
|
6605b87c5c | ||
|
|
64688ca5e1 | ||
|
|
e9a1a06bda | ||
|
|
a8da28f877 | ||
|
|
70b2bd6ccf | ||
|
|
8ed5d52ddf | ||
|
|
f7af0741fe | ||
|
|
3ec4afb02f | ||
|
|
3f77b73a61 | ||
|
|
9e62d8a3a3 | ||
|
|
9ef21241bf | ||
|
|
5e77adf7e6 | ||
|
|
4df0a46701 | ||
|
|
f186404628 | ||
|
|
8e3ec91f3c | ||
|
|
2605addf34 | ||
|
|
1ab3e57b83 | ||
|
|
2f36ae5112 | ||
|
|
acc19ca65e | ||
|
|
ea213e2dfd | ||
|
|
02cf3264a3 | ||
|
|
a0b1186558 | ||
|
|
27e47718bb | ||
|
|
f78dd209bd | ||
|
|
2f8b479fdd | ||
|
|
b4e0b51f5b | ||
|
|
eedce4dcfd | ||
|
|
006be92180 | ||
|
|
1fae004785 | ||
|
|
239a88cd24 | ||
|
|
22b432a6ae | ||
|
|
c88566a4ae | ||
|
|
5f8e371793 | ||
|
|
94d9ac03ea | ||
|
|
897ac97423 | ||
|
|
24aeae6de9 | ||
|
|
ce941db3be | ||
|
|
5ff91ee47f | ||
|
|
ce1f55ffd1 | ||
|
|
8700e2df69 | ||
|
|
f4df84b609 | ||
|
|
ba473123ba | ||
|
|
98a54ef38f | ||
|
|
7fdc9c7cb8 | ||
|
|
dc3b1566d7 | ||
|
|
5429c4d557 | ||
|
|
dabcea6ba7 | ||
|
|
e91790f5ac | ||
|
|
51076d4ced | ||
|
|
1cb37fe2d2 | ||
|
|
61a9f0647b | ||
|
|
ac2ab62050 | ||
|
|
c50efac00e | ||
|
|
bf16e61a1f | ||
|
|
d464633c70 | ||
|
|
b78d0ec30b | ||
|
|
da09602834 | ||
|
|
5ead4967a5 | ||
|
|
8bb7ce2062 | ||
|
|
0068c75e31 | ||
|
|
5de7fa9d48 | ||
|
|
3dc3592783 | ||
|
|
43a082a51a | ||
|
|
4c264673df | ||
|
|
d537d73c6a | ||
|
|
5c227ecc57 | ||
|
|
b03fa4fdf2 | ||
|
|
38219a22ca | ||
|
|
9d6a5efa72 | ||
|
|
aaa0520a6d | ||
|
|
eb0f231a80 | ||
|
|
17f3da5a37 | ||
|
|
608039b7e4 | ||
|
|
bb424cc3d6 | ||
|
|
9eaf0f9530 | ||
|
|
b44bb552e0 | ||
|
|
c86ff27bef | ||
|
|
be6bb5f039 | ||
|
|
e40b73f420 | ||
|
|
5b3d8a3aaf | ||
|
|
fd4051c04a | ||
|
|
5f0fa24c2a | ||
|
|
0d676e1957 | ||
|
|
8539e365fb | ||
|
|
dd25ea748b | ||
|
|
2b74c1c47b | ||
|
|
9961746f1f | ||
|
|
b1c0334947 | ||
|
|
25a41bd293 | ||
|
|
ac25beddda | ||
|
|
9d38186404 | ||
|
|
51d9ffbb4e | ||
|
|
e23d514d89 | ||
|
|
0697116a21 | ||
|
|
165bb00040 | ||
|
|
07947199c0 | ||
|
|
92aadb4743 | ||
|
|
bd89de6f4d | ||
|
|
dca567b5fa | ||
|
|
a015c2f566 | ||
|
|
946de2e7e3 | ||
|
|
690c486bb2 | ||
|
|
d9d0676bed | ||
|
|
9c8a410e50 | ||
|
|
22296de03c | ||
|
|
9228f9c339 | ||
|
|
609d4477ae | ||
|
|
fc0cfe3133 | ||
|
|
ebe1aa33f3 | ||
|
|
e451adbfdb | ||
|
|
852c64d9f4 | ||
|
|
d96b6dbefb | ||
|
|
fbbd57a4ae | ||
|
|
e5cc990111 | ||
|
|
78482313d8 | ||
|
|
5b601695e1 | ||
|
|
d6a67335ed | ||
|
|
4f7b68c882 | ||
|
|
1359782d30 | ||
|
|
966bd5fd33 | ||
|
|
759264e1d5 | ||
|
|
65c0f3d786 | ||
|
|
f1535b4b54 | ||
|
|
fcb5f51a78 | ||
|
|
50fc98dcb3 | ||
|
|
c1a1fe1047 | ||
|
|
b3a60f70ff | ||
|
|
5ee0033983 | ||
|
|
a28e653386 | ||
|
|
8d568526be | ||
|
|
6643008baf | ||
|
|
44a19e03e8 | ||
|
|
b5207d91ba | ||
|
|
f1d54675c3 | ||
|
|
b057b6006c | ||
|
|
f92a62da4e | ||
|
|
27c695a393 | ||
|
|
7945c1a431 | ||
|
|
7c31cb9993 | ||
|
|
f6d46cae79 | ||
|
|
0e49615a40 | ||
|
|
534792cd1a | ||
|
|
89891d11d7 | ||
|
|
cb86ece0c4 | ||
|
|
e7976fab46 | ||
|
|
11cb40702d | ||
|
|
877b7799df | ||
|
|
7f35d462b6 | ||
|
|
fff5c2387b | ||
|
|
3de6f6faf8 | ||
|
|
e6f80497a9 | ||
|
|
6c122e4ed0 | ||
|
|
1836056bd1 | ||
|
|
cdc95d8fd2 | ||
|
|
d3c3339d5e | ||
|
|
50992d3411 | ||
|
|
2ae3e7f5b9 | ||
|
|
99ea193237 | ||
|
|
3427f7bbea | ||
|
|
44704be22b | ||
|
|
356dec636f | ||
|
|
355f181574 | ||
|
|
b365f9adc1 | ||
|
|
af1282c268 | ||
|
|
9ccceb2199 | ||
|
|
bdea6440a8 | ||
|
|
4e1f147b37 | ||
|
|
30d0d340c2 | ||
|
|
d99748638d | ||
|
|
889089b4cd | ||
|
|
e7de6f62b6 | ||
|
|
f02eac8ac6 | ||
|
|
b30c282a13 | ||
|
|
c2135de941 | ||
|
|
e2890d1363 | ||
|
|
0dc2abbdd8 | ||
|
|
deeed4b65b | ||
|
|
30e4ee855c | ||
|
|
2ab1560aed | ||
|
|
d5b7d440fe | ||
|
|
df88b1ddd2 | ||
|
|
e368488933 | ||
|
|
37f0f7a0b7 | ||
|
|
e2b887b449 | ||
|
|
d653b8f9e6 | ||
|
|
b8f74af41c | ||
|
|
07f78bb7b8 | ||
|
|
1679d820a7 | ||
|
|
b3bbb0b156 | ||
|
|
8afd73394d | ||
|
|
2afab2aec8 | ||
|
|
f41e4c62d4 | ||
|
|
c804064155 | ||
|
|
eeee36844d | ||
|
|
d4d4495f2b | ||
|
|
0eee2d8ba5 | ||
|
|
fbfe00bfec | ||
|
|
6e70bc3b5d | ||
|
|
2dde6ae663 | ||
|
|
9bda43a670 | ||
|
|
ba49535bba | ||
|
|
21b69f06e5 | ||
|
|
1390b01f07 | ||
|
|
b9dc0da249 | ||
|
|
6615bbb532 | ||
|
|
b8ef6b3888 | ||
|
|
1fc9f74e60 | ||
|
|
ded092ed23 | ||
|
|
007b7294d9 | ||
|
|
b3fcfdfc96 | ||
|
|
151461508f | ||
|
|
9affc583a3 | ||
|
|
cac72df7ba | ||
|
|
a6c81d8168 | ||
|
|
7aa3e49e8c | ||
|
|
ab378f5332 | ||
|
|
24b0d7f8e6 | ||
|
|
5561a9621b | ||
|
|
3512ab7515 | ||
|
|
12df4abc80 | ||
|
|
f768aef0b9 | ||
|
|
a6dcef4467 | ||
|
|
699a6a42be | ||
|
|
9380e376c6 | ||
|
|
04dd51d089 | ||
|
|
fe19f81bcd | ||
|
|
e3711f121f | ||
|
|
075fd2fd9b | ||
|
|
662cfde691 | ||
|
|
0a6264228a | ||
|
|
3f67c6d220 | ||
|
|
4fa6fc10f8 | ||
|
|
ada811a868 | ||
|
|
00132c6afa | ||
|
|
da1af693a2 | ||
|
|
a9d9c03cc0 | ||
|
|
0e1522db12 | ||
|
|
df04d6b58d | ||
|
|
b098b6d148 | ||
|
|
24bc6a1a8a | ||
|
|
64abf6d785 | ||
|
|
f156722456 | ||
|
|
d641a3fa48 | ||
|
|
29760defd0 | ||
|
|
5eb822089d | ||
|
|
53414ec1df | ||
|
|
cd3f0ba739 | ||
|
|
74152d2409 | ||
|
|
8f7a4a31c8 | ||
|
|
c5f11c1625 | ||
|
|
f0eb295696 | ||
|
|
c86fe0690b | ||
|
|
2f088d03bb | ||
|
|
1195dd445a | ||
|
|
71c5835909 | ||
|
|
423e042492 | ||
|
|
37c87b4375 | ||
|
|
43666e1f63 | ||
|
|
d5dd64acd2 | ||
|
|
b0c3ea0934 | ||
|
|
ab446590a2 | ||
|
|
3478ff17f8 | ||
|
|
932e7c8912 | ||
|
|
6e7a769d79 | ||
|
|
8602de6ce1 | ||
|
|
faee71943f | ||
|
|
0e2a27ad41 | ||
|
|
c064e8970f | ||
|
|
b8ef7ae39d | ||
|
|
702f3de061 | ||
|
|
f278d7f6d0 | ||
|
|
221c1b523b | ||
|
|
d00b6a2e85 | ||
|
|
e03c285f14 | ||
|
|
0ec1f334c0 | ||
|
|
b637cbeabc | ||
|
|
943f873855 | ||
|
|
49cebb400a | ||
|
|
0ed9225c05 | ||
|
|
991fbdad3a | ||
|
|
34e95cd25b | ||
|
|
8bc06fef34 | ||
|
|
6aba5c3661 | ||
|
|
876bd49ee5 | ||
|
|
9435c5a380 | ||
|
|
94c915e23a | ||
|
|
9ac8641c13 | ||
|
|
0fe06cf2df | ||
|
|
f872f994f1 | ||
|
|
87c2ff73e8 | ||
|
|
27bb4c9bb8 | ||
|
|
c10e0fd7bc | ||
|
|
48d302020c | ||
|
|
b31b3ccd23 | ||
|
|
ac647c5ee8 | ||
|
|
3cec891aa1 | ||
|
|
3633b9724b | ||
|
|
420b5c093f | ||
|
|
9bb55dd746 | ||
|
|
1759ad3587 | ||
|
|
956693b7ca | ||
|
|
7b2117c019 | ||
|
|
d48fe26a35 | ||
|
|
7fd5fca0cf | ||
|
|
37e215a4ea | ||
|
|
ab5400efad | ||
|
|
258ecd476c | ||
|
|
378938812c | ||
|
|
60b494abeb | ||
|
|
34be1dc1d7 | ||
|
|
d89a4620f0 | ||
|
|
dea83b5720 | ||
|
|
d9ebe3e0fb | ||
|
|
135dde247f | ||
|
|
eb7a667202 | ||
|
|
b3cc9967f5 | ||
|
|
7276cea3d5 | ||
|
|
c0c996622e | ||
|
|
5556555bca | ||
|
|
55a84494c9 | ||
|
|
74d778dcb8 | ||
|
|
156d68f1b8 | ||
|
|
cb59a6340d | ||
|
|
5eb013cc2f | ||
|
|
dafb26b500 | ||
|
|
d9416a42dc | ||
|
|
ad88eff9e3 | ||
|
|
4d4f623adf | ||
|
|
ac9c9cd4e3 | ||
|
|
580eeef6b7 | ||
|
|
f25f5a26cf | ||
|
|
972d43c2a2 | ||
|
|
2a7475c435 | ||
|
|
71b41efe6c | ||
|
|
33a7fee1cc | ||
|
|
fa7fb644ea | ||
|
|
13b996171a | ||
|
|
77bb3870bf | ||
|
|
9863303a5e | ||
|
|
0caccc3da8 | ||
|
|
b75427b86d | ||
|
|
054c4ec61a | ||
|
|
8da21f9914 | ||
|
|
99ba512862 | ||
|
|
eab59fcbd8 | ||
|
|
484da2200e | ||
|
|
330bb6d954 | ||
|
|
d4b6c8da04 | ||
|
|
a5ef438cfe | ||
|
|
de196c716b | ||
|
|
cb248a1f19 | ||
|
|
df2f1b2b7c | ||
|
|
36e26d8009 | ||
|
|
a5973de02b | ||
|
|
68f272bc25 | ||
|
|
b66a5c1ee9 | ||
|
|
bfc42638a4 | ||
|
|
a8c9689b43 | ||
|
|
26ff3f56ea | ||
|
|
a49993e399 | ||
|
|
9f42226224 | ||
|
|
8f4c00df0b | ||
|
|
6cebec86c5 | ||
|
|
8f5b017857 | ||
|
|
483bc8f1b7 | ||
|
|
ba493e3e19 | ||
|
|
095befd9b7 | ||
|
|
0ae1ecd867 |
@@ -68,6 +68,10 @@ SHOPPING_MIN_AUTOSYNC_INTERVAL=5
|
||||
# when unset: 1 (true) - this is temporary until an appropriate amount of time has passed for everyone to migrate
|
||||
GUNICORN_MEDIA=0
|
||||
|
||||
# GUNICORN SERVER RELATED SETTINGS (see https://docs.gunicorn.org/en/stable/design.html#how-many-workers for recommended settings)
|
||||
# GUNICORN_WORKERS=1
|
||||
# GUNICORN_THREADS=1
|
||||
|
||||
# S3 Media settings: store mediafiles in s3 or any compatible storage backend (e.g. minio)
|
||||
# as long as S3_ACCESS_KEY is not set S3 features are disabled
|
||||
# S3_ACCESS_KEY=
|
||||
@@ -77,6 +81,7 @@ GUNICORN_MEDIA=0
|
||||
# S3_QUERYSTRING_AUTH=1 # default true, set to 0 to serve media from a public bucket without signed urls
|
||||
# S3_QUERYSTRING_EXPIRE=3600 # number of seconds querystring are valid for
|
||||
# S3_ENDPOINT_URL= # when using a custom endpoint like minio
|
||||
# S3_CUSTOM_DOMAIN= # when using a CDN/proxy to S3 (see https://github.com/TandoorRecipes/recipes/issues/1943)
|
||||
|
||||
# Email Settings, see https://docs.djangoproject.com/en/3.2/ref/settings/#email-host
|
||||
# Required for email confirmation and password reset (automatically activates if host is set)
|
||||
@@ -92,7 +97,7 @@ GUNICORN_MEDIA=0
|
||||
# ACCOUNT_EMAIL_SUBJECT_PREFIX=
|
||||
|
||||
# allow authentication via reverse proxy (e.g. authelia), leave off if you dont know what you are doing
|
||||
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
|
||||
# see docs for more information https://docs.tandoor.dev/features/authentication/
|
||||
# when unset: 0 (false)
|
||||
REVERSE_PROXY_AUTH=0
|
||||
|
||||
@@ -121,7 +126,7 @@ REVERSE_PROXY_AUTH=0
|
||||
# ENABLE_METRICS=0
|
||||
|
||||
# allows you to setup OAuth providers
|
||||
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
|
||||
# see docs for more information https://docs.tandoor.dev/features/authentication/
|
||||
# SOCIAL_PROVIDERS = allauth.socialaccount.providers.github, allauth.socialaccount.providers.nextcloud,
|
||||
|
||||
# Should a newly created user from a social provider get assigned to the default space and given permission by default ?
|
||||
|
||||
11
.github/workflows/ci.yml
vendored
11
.github/workflows/ci.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Continuous Integration
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -14,13 +14,13 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '14'
|
||||
node-version: '16'
|
||||
- name: Install Vue dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
@@ -29,7 +29,8 @@ jobs:
|
||||
run: yarn build
|
||||
- name: Install Django dependencies
|
||||
run: |
|
||||
sudo apt-get install -y libsasl2-dev python-dev libldap2-dev libssl-dev
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y libsasl2-dev python3-dev libldap2-dev libssl-dev
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
python3 manage.py collectstatic --noinput
|
||||
|
||||
48
.github/workflows/docker-publish-beta-raspi.yml
vendored
Normal file
48
.github/workflows/docker-publish-beta-raspi.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: publish beta raspi image docker
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'beta'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = 'beta'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: beta-raspi
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
||||
1
.github/workflows/docker-publish-beta.yml
vendored
1
.github/workflows/docker-publish-beta.yml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: beta
|
||||
platform: linux/amd64,linux/arm64
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
|
||||
45
.github/workflows/docker-publish-latest-raspi.yml
vendored
Normal file
45
.github/workflows/docker-publish-latest-raspi.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: publish latest raspi image docker
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}-raspi
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}-raspi'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
tag: latest-raspi
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
1
.github/workflows/docker-publish-latest.yml
vendored
1
.github/workflows/docker-publish-latest.yml
vendored
@@ -38,6 +38,7 @@ jobs:
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
platform: linux/amd64,linux/arm64
|
||||
tag: latest
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
47
.github/workflows/docker-publish-release-raspi.yml
vendored
Normal file
47
.github/workflows/docker-publish-release-raspi.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: publish tagged raspi release docker
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build image job
|
||||
steps:
|
||||
- name: Checkout master
|
||||
uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}-raspi
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
1
.github/workflows/docker-publish-release.yml
vendored
1
.github/workflows/docker-publish-release.yml
vendored
@@ -40,6 +40,7 @@ jobs:
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
platform: linux/amd64,linux/arm64
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -84,3 +84,4 @@ cookbook/static/vue
|
||||
vue/webpack-stats.json
|
||||
cookbook/templates/sw.js
|
||||
.prettierignore
|
||||
vue/.yarn
|
||||
|
||||
1
.idea/dictionaries/vabene1111_PC.xml
generated
1
.idea/dictionaries/vabene1111_PC.xml
generated
@@ -6,6 +6,7 @@
|
||||
<w>csrftoken</w>
|
||||
<w>gunicorn</w>
|
||||
<w>ical</w>
|
||||
<w>invitelink</w>
|
||||
<w>mealie</w>
|
||||
<w>pepperplate</w>
|
||||
<w>safron</w>
|
||||
|
||||
2
.idea/recipes.iml
generated
2
.idea/recipes.iml
generated
@@ -18,7 +18,7 @@
|
||||
<excludeFolder url="file://$MODULE_DIR$/staticfiles" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.9 (recipes)" jdkType="Python SDK" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.11 (recipes)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="TemplatesService">
|
||||
|
||||
@@ -15,7 +15,7 @@ WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev libressl-dev libffi-dev cargo openssl-dev openldap-dev python3-dev && \
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev git && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
|
||||
33
Dockerfile-raspi
Normal file
33
Dockerfile-raspi
Normal file
@@ -0,0 +1,33 @@
|
||||
# builds of cryptography for raspberry pi (or better arm v7) fail for some
|
||||
FROM python:3.9-alpine3.15
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap gcompat
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
#This port will be used by gunicorn.
|
||||
EXPOSE 8080
|
||||
|
||||
#Create app dir and install requirements.
|
||||
RUN mkdir /opt/recipes
|
||||
WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev zlib-dev jpeg-dev libwebp-dev python3-dev git && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip install wheel==0.37.1 && \
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir --no-binary=Pillow && \
|
||||
apk --purge del .build-deps
|
||||
|
||||
#Copy project and execute it.
|
||||
COPY . ./
|
||||
RUN chmod +x boot.sh
|
||||
ENTRYPOINT ["/opt/recipes/boot.sh"]
|
||||
12
README.md
12
README.md
@@ -61,9 +61,17 @@ a public page.
|
||||
|
||||
Documentation can be found [here](https://docs.tandoor.dev/).
|
||||
|
||||
## Contributing
|
||||
## Support our work
|
||||
Tandoor is developed by volunteers in their free time just because its fun. That said earning
|
||||
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
|
||||
Because of that there are several ways you can support us
|
||||
|
||||
You can help out with the ongoing development by looking for potential bugs in our code base, or by contributing new features. We are always welcoming new pull requests containing bug fixes, refactors and new features. We have a list of tasks and bugs on our issue tracker on Github. Please comment on issues if you want to contribute with, to avoid duplicating effort.
|
||||
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
|
||||
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
|
||||
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
|
||||
|
||||
## Contributing
|
||||
Contributions are welcome but please read [this](https://docs.tandoor.dev/contribute/#contributing-code) **BEFORE** contributing anything!
|
||||
|
||||
## Your Feedback
|
||||
|
||||
|
||||
@@ -6,5 +6,4 @@ Since this software is still considered beta/WIP support is always only given fo
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please open a normal public issue if you have any security related concerns. If you feel like the issue should not be discussed in
|
||||
public just open a generic issue and we will discuss further communication there (since GitHub does not allow everyone to create a security advisory :/).
|
||||
Please use GitHub Security Advisories to report any kind of security vulnerabilities.
|
||||
|
||||
4
boot.sh
4
boot.sh
@@ -2,6 +2,8 @@
|
||||
source venv/bin/activate
|
||||
|
||||
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
|
||||
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
|
||||
NGINX_CONF_FILE=/opt/recipes/nginx/conf.d/Recipes.conf
|
||||
|
||||
display_warning() {
|
||||
@@ -63,4 +65,4 @@ echo "Done"
|
||||
|
||||
chmod -R 755 /opt/recipes/mediafiles
|
||||
|
||||
exec gunicorn -b :$TANDOOR_PORT --access-logfile - --error-logfile - --log-level INFO recipes.wsgi
|
||||
exec gunicorn -b :$TANDOOR_PORT --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level INFO recipes.wsgi
|
||||
|
||||
@@ -15,7 +15,7 @@ from .models import (BookmarkletImport, Comment, CookLog, Food, FoodInheritField
|
||||
Recipe, RecipeBook, RecipeBookEntry, RecipeImport, SearchPreference, ShareLink,
|
||||
ShoppingList, ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
|
||||
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation)
|
||||
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation, UserSpace)
|
||||
|
||||
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
@@ -32,45 +32,11 @@ admin.site.unregister(Group)
|
||||
@admin.action(description='Delete all data from a space')
|
||||
def delete_space_action(modeladmin, request, queryset):
|
||||
for space in queryset:
|
||||
CookLog.objects.filter(space=space).delete()
|
||||
ViewLog.objects.filter(space=space).delete()
|
||||
ImportLog.objects.filter(space=space).delete()
|
||||
BookmarkletImport.objects.filter(space=space).delete()
|
||||
|
||||
Comment.objects.filter(recipe__space=space).delete()
|
||||
Keyword.objects.filter(space=space).delete()
|
||||
Ingredient.objects.filter(space=space).delete()
|
||||
Food.objects.filter(space=space).delete()
|
||||
Unit.objects.filter(space=space).delete()
|
||||
Step.objects.filter(space=space).delete()
|
||||
NutritionInformation.objects.filter(space=space).delete()
|
||||
RecipeBookEntry.objects.filter(book__space=space).delete()
|
||||
RecipeBook.objects.filter(space=space).delete()
|
||||
MealType.objects.filter(space=space).delete()
|
||||
MealPlan.objects.filter(space=space).delete()
|
||||
ShareLink.objects.filter(space=space).delete()
|
||||
Recipe.objects.filter(space=space).delete()
|
||||
|
||||
RecipeImport.objects.filter(space=space).delete()
|
||||
SyncLog.objects.filter(sync__space=space).delete()
|
||||
Sync.objects.filter(space=space).delete()
|
||||
Storage.objects.filter(space=space).delete()
|
||||
|
||||
ShoppingListEntry.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingListRecipe.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingList.objects.filter(space=space).delete()
|
||||
|
||||
SupermarketCategoryRelation.objects.filter(supermarket__space=space).delete()
|
||||
SupermarketCategory.objects.filter(space=space).delete()
|
||||
Supermarket.objects.filter(space=space).delete()
|
||||
|
||||
InviteLink.objects.filter(space=space).delete()
|
||||
UserFile.objects.filter(space=space).delete()
|
||||
Automation.objects.filter(space=space).delete()
|
||||
space.safe_delete()
|
||||
|
||||
|
||||
class SpaceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'created_by', 'max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
|
||||
list_display = ('name', 'created_by', 'max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing', 'use_plural')
|
||||
search_fields = ('name', 'created_by__username')
|
||||
list_filter = ('max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
|
||||
date_hierarchy = 'created_at'
|
||||
@@ -80,15 +46,23 @@ class SpaceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(Space, SpaceAdmin)
|
||||
|
||||
|
||||
class UserSpaceAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'space',)
|
||||
search_fields = ('user__username', 'space__name',)
|
||||
|
||||
|
||||
admin.site.register(UserSpace, UserSpaceAdmin)
|
||||
|
||||
|
||||
class UserPreferenceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'space', 'theme', 'nav_color', 'default_page', 'search_style',) # TODO add new fields
|
||||
search_fields = ('user__username', 'space__name')
|
||||
list_filter = ('theme', 'nav_color', 'default_page', 'search_style')
|
||||
list_display = ('name', 'theme', 'nav_color', 'default_page',)
|
||||
search_fields = ('user__username',)
|
||||
list_filter = ('theme', 'nav_color', 'default_page',)
|
||||
date_hierarchy = 'created_at'
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(UserPreference, UserPreferenceAdmin)
|
||||
@@ -101,7 +75,7 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
@@ -203,7 +177,7 @@ class RecipeAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def created_by(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
actions = [rebuild_index]
|
||||
@@ -242,7 +216,7 @@ class CommentAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(Comment, CommentAdmin)
|
||||
@@ -261,7 +235,7 @@ class RecipeBookAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def user_name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(RecipeBook, RecipeBookAdmin)
|
||||
@@ -279,7 +253,7 @@ class MealPlanAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def user(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(MealPlan, MealPlanAdmin)
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
import django_filters
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.forms import MultiSelectWidget
|
||||
from cookbook.models import Food, Keyword, Recipe
|
||||
|
||||
with scopes_disabled():
|
||||
class RecipeFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(method='filter_name')
|
||||
keywords = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Keyword.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_keywords'
|
||||
)
|
||||
foods = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Food.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_foods',
|
||||
label=_('Ingredients')
|
||||
)
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(data, *args, **kwargs)
|
||||
self.filters['foods'].queryset = Food.objects.filter(space=space).all()
|
||||
self.filters['keywords'].queryset = Keyword.objects.filter(space=space).all()
|
||||
|
||||
@staticmethod
|
||||
def filter_keywords(queryset, name, value):
|
||||
if not name == 'keywords':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(keywords=x)
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_foods(queryset, name, value):
|
||||
if not name == 'foods':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(steps__ingredients__food__name=x).distinct()
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_name(queryset, name, value):
|
||||
if not name == 'name':
|
||||
return queryset
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2',
|
||||
'django.db.backends.postgresql']:
|
||||
queryset = queryset.annotate(similarity=TrigramSimilarity('name', value), ).filter(
|
||||
Q(similarity__gt=0.1) | Q(name__unaccent__icontains=value)).order_by('-similarity')
|
||||
else:
|
||||
queryset = queryset.filter(name__icontains=value)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ['name', 'keywords', 'foods', 'internal']
|
||||
@@ -37,19 +37,15 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
prefix = 'preference'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if x := kwargs.get('instance', None):
|
||||
space = x.space
|
||||
else:
|
||||
space = kwargs.pop('space')
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userpreference__space=space).all()
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userspace__space=space).all()
|
||||
|
||||
class Meta:
|
||||
model = UserPreference
|
||||
fields = (
|
||||
'default_unit', 'use_fractions', 'use_kj', 'theme', 'nav_color',
|
||||
'sticky_navbar', 'default_page', 'show_recent', 'search_style',
|
||||
'plan_share', 'ingredient_decimals', 'comments', 'left_handed',
|
||||
'sticky_navbar', 'default_page', 'plan_share', 'ingredient_decimals', 'comments', 'left_handed',
|
||||
)
|
||||
|
||||
labels = {
|
||||
@@ -60,8 +56,6 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
'nav_color': _('Navbar color'),
|
||||
'sticky_navbar': _('Sticky navbar'),
|
||||
'default_page': _('Default page'),
|
||||
'show_recent': _('Show recent recipes'),
|
||||
'search_style': _('Search style'),
|
||||
'plan_share': _('Plan sharing'),
|
||||
'ingredient_decimals': _('Ingredient decimal places'),
|
||||
'shopping_auto_sync': _('Shopping list auto sync period'),
|
||||
@@ -71,23 +65,21 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
|
||||
help_texts = {
|
||||
'nav_color': _('Color of the top navigation bar. Not all colors work with all themes, just try them out!'),
|
||||
# noqa: E501
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'), # noqa: E501
|
||||
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'),
|
||||
'use_fractions': _(
|
||||
'Enables support for fractions in ingredient amounts (e.g. convert decimals to fractions automatically)'),
|
||||
# noqa: E501
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'), # noqa: E501
|
||||
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'),
|
||||
'plan_share': _('Users with whom newly created meal plans should be shared by default.'),
|
||||
'shopping_share': _('Users with whom to share shopping lists.'),
|
||||
# noqa: E501
|
||||
'show_recent': _('Show recently viewed recipes on search page.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'), # noqa: E501
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'),
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'), # noqa: E501
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoexclude_onhand': _('Exclude ingredients that are on hand.'),
|
||||
'left_handed': _('Will optimize the UI for use with your left hand.')
|
||||
@@ -162,6 +154,7 @@ class ImportExportBase(forms.Form):
|
||||
COOKBOOKAPP = 'COOKBOOKAPP'
|
||||
COPYMETHAT = 'COPYMETHAT'
|
||||
COOKMATE = 'COOKMATE'
|
||||
REZEPTSUITEDE = 'REZEPTSUITEDE'
|
||||
PDF = 'PDF'
|
||||
|
||||
type = forms.ChoiceField(choices=(
|
||||
@@ -170,7 +163,7 @@ class ImportExportBase(forms.Form):
|
||||
(PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'), (DOMESTICA, 'Domestica'),
|
||||
(MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
|
||||
(COOKMATE, 'Cookmate')
|
||||
(COOKMATE, 'Cookmate'), (REZEPTSUITEDE, 'Recipesuite.de')
|
||||
))
|
||||
|
||||
|
||||
@@ -339,9 +332,9 @@ class MealPlanForm(forms.ModelForm):
|
||||
)
|
||||
|
||||
help_texts = {
|
||||
'shared': _('You can list default users to share recipes with in the settings.'), # noqa: E501
|
||||
'shared': _('You can list default users to share recipes with in the settings.'),
|
||||
'note': _('You can use markdown to format this field. See the <a href="/docs/markdown/">docs here</a>')
|
||||
# noqa: E501
|
||||
|
||||
}
|
||||
|
||||
widgets = {
|
||||
@@ -496,8 +489,8 @@ class ShoppingPreferenceForm(forms.ModelForm):
|
||||
help_texts = {
|
||||
'shopping_share': _('Users will see all items you add to your shopping list. They must add you to see items on their list.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoinclude_related': _('When adding a meal plan to the shopping list (manually or automatically), include all related recipes.'),
|
||||
@@ -541,11 +534,13 @@ class SpacePreferenceForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = Space
|
||||
|
||||
fields = ('food_inherit', 'reset_food_inherit', 'show_facet_count')
|
||||
fields = ('food_inherit', 'reset_food_inherit', 'show_facet_count', 'use_plural')
|
||||
|
||||
help_texts = {
|
||||
'food_inherit': _('Fields on food that should be inherited by default.'),
|
||||
'show_facet_count': _('Show recipe counts on search filters'), }
|
||||
'show_facet_count': _('Show recipe counts on search filters'),
|
||||
'use_plural': _('Use the plural form for units and food inside this space.'),
|
||||
}
|
||||
|
||||
widgets = {
|
||||
'food_inherit': MultiSelectWidget
|
||||
|
||||
@@ -14,7 +14,7 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
|
||||
def is_open_for_signup(self, request):
|
||||
"""
|
||||
Whether to allow sign ups.
|
||||
Whether to allow sign-ups.
|
||||
"""
|
||||
signup_token = False
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
@@ -31,7 +31,10 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
default = datetime.datetime.now()
|
||||
c = caches['default'].get_or_set(email, default, timeout=360)
|
||||
if c == default:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
try:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
except Exception: # dont fail signup just because confirmation mail could not be send
|
||||
pass
|
||||
else:
|
||||
messages.add_message(self.request, messages.ERROR, _('In order to prevent spam, the requested email was not send. Please wait a few minutes and try again.'))
|
||||
else:
|
||||
|
||||
@@ -10,4 +10,5 @@ def context_settings(request):
|
||||
'TERMS_URL': settings.TERMS_URL,
|
||||
'PRIVACY_URL': settings.PRIVACY_URL,
|
||||
'IMPRINT_URL': settings.IMPRINT_URL,
|
||||
'SHOPPING_MIN_AUTOSYNC_INTERVAL': settings.SHOPPING_MIN_AUTOSYNC_INTERVAL,
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ class IngredientParser:
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').all():
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.food_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
|
||||
@@ -37,7 +37,7 @@ class IngredientParser:
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').all():
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.unit_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
@@ -59,7 +59,7 @@ class IngredientParser:
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).first():
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return food
|
||||
|
||||
@@ -78,7 +78,7 @@ class IngredientParser:
|
||||
except KeyError:
|
||||
return unit
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).first():
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return unit
|
||||
|
||||
@@ -126,6 +126,8 @@ class IngredientParser:
|
||||
amount = 0
|
||||
unit = None
|
||||
note = ''
|
||||
if x.strip() == '':
|
||||
return amount, unit, note
|
||||
|
||||
did_check_frac = False
|
||||
end = 0
|
||||
@@ -221,8 +223,8 @@ class IngredientParser:
|
||||
|
||||
# some people/languages put amount and unit at the end of the ingredient string
|
||||
# if something like this is detected move it to the beginning so the parser can handle it
|
||||
if len(ingredient) < 1000 and re.search(r'^([A-z])+(.)*[1-9](\d)*\s([A-z])+', ingredient):
|
||||
match = re.search(r'[1-9](\d)*\s([A-z])+', ingredient)
|
||||
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
|
||||
match = re.search(r'[1-9](\d)*\s*([^\W\d_])+', ingredient)
|
||||
print(f'reording from {ingredient} to {ingredient[match.start():match.end()] + " " + ingredient.replace(ingredient[match.start():match.end()], "")}')
|
||||
ingredient = ingredient[match.start():match.end()] + ' ' + ingredient.replace(ingredient[match.start():match.end()], '')
|
||||
|
||||
@@ -232,6 +234,17 @@ class IngredientParser:
|
||||
match = re.search('\((.[^\(])+\)', ingredient)
|
||||
ingredient = ingredient[:match.start()] + ingredient[match.end():] + ' ' + ingredient[match.start():match.end()]
|
||||
|
||||
# leading spaces before commas result in extra tokens, clean them out
|
||||
ingredient = ingredient.replace(' ,', ',')
|
||||
|
||||
# handle "(from) - (to)" amounts by using the minimum amount and adding the range to the description
|
||||
# "10.5 - 200 g XYZ" => "100 g XYZ (10.5 - 200)"
|
||||
ingredient = re.sub("^(\d+|\d+[\\.,]\d+) - (\d+|\d+[\\.,]\d+) (.*)", "\\1 \\3 (\\1 - \\2)", ingredient)
|
||||
|
||||
# if amount and unit are connected add space in between
|
||||
if re.match('([0-9])+([A-z])+\s', ingredient):
|
||||
ingredient = re.sub(r'(?<=([a-z])|\d)(?=(?(1)\d|[a-z]))', ' ', ingredient)
|
||||
|
||||
tokens = ingredient.split() # split at each space into tokens
|
||||
if len(tokens) == 1:
|
||||
# there only is one argument, that must be the food
|
||||
@@ -303,4 +316,7 @@ class IngredientParser:
|
||||
note = food + ' ' + note
|
||||
food = food[:Food._meta.get_field('name').max_length]
|
||||
|
||||
if len(food.strip()) == 0:
|
||||
raise ValueError(f'Error parsing string {ingredient}, food cannot be empty')
|
||||
|
||||
return amount, unit, food, note[:Ingredient._meta.get_field('note').max_length].strip()
|
||||
|
||||
@@ -35,6 +35,7 @@ Negative examples:
|
||||
u'<p>del.icio.us</p>'
|
||||
|
||||
"""
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import markdown
|
||||
|
||||
@@ -64,7 +65,7 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
|
||||
else:
|
||||
url = 'http://' + url
|
||||
|
||||
el = markdown.util.etree.Element("a")
|
||||
el = Element("a")
|
||||
el.set('href', url)
|
||||
el.text = markdown.util.AtomicString(text)
|
||||
return el
|
||||
@@ -73,9 +74,9 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
|
||||
class UrlizeExtension(markdown.Extension):
|
||||
""" Urlize Extension for Python-Markdown. """
|
||||
|
||||
def extendMarkdown(self, md, md_globals):
|
||||
def extendMarkdown(self, md):
|
||||
""" Replace autolink with UrlizePattern """
|
||||
md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
|
||||
md.inlinePatterns.register(UrlizePattern(URLIZE_RE, md), 'autolink', 120)
|
||||
|
||||
|
||||
def makeExtension(*args, **kwargs):
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasScope, TokenHasReadWriteScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
|
||||
from cookbook.models import ShareLink, Recipe, UserPreference
|
||||
from cookbook.models import ShareLink, Recipe, UserSpace
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -27,11 +31,12 @@ def get_allowed_groups(groups_required):
|
||||
return groups_allowed
|
||||
|
||||
|
||||
def has_group_permission(user, groups):
|
||||
def has_group_permission(user, groups, no_cache=False):
|
||||
"""
|
||||
Tests if a given user is member of a certain group (or any higher group)
|
||||
Superusers always bypass permission checks.
|
||||
Unauthenticated users can't be member of any group thus always return false.
|
||||
:param no_cache: (optional) do not return cached results, always check agains DB
|
||||
:param user: django auth user object
|
||||
:param groups: list or tuple of groups the user should be checked for
|
||||
:return: True if user is in allowed groups, false otherwise
|
||||
@@ -39,10 +44,23 @@ def has_group_permission(user, groups):
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
groups_allowed = get_allowed_groups(groups)
|
||||
|
||||
CACHE_KEY = hash((inspect.stack()[0][3], (user.pk, user.username, user.email), groups_allowed))
|
||||
if not no_cache:
|
||||
cached_result = cache.get(CACHE_KEY, default=None)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
result = False
|
||||
if user.is_authenticated:
|
||||
if bool(user.groups.filter(name__in=groups_allowed)):
|
||||
return True
|
||||
return False
|
||||
if user_space := user.userspace_set.filter(active=True):
|
||||
if len(user_space) != 1:
|
||||
result = False # do not allow any group permission if more than one space is active, needs to be changed when simultaneous multi-space-tenancy is added
|
||||
elif bool(user_space.first().groups.filter(name__in=groups_allowed)):
|
||||
result = True
|
||||
|
||||
cache.set(CACHE_KEY, result, timeout=10)
|
||||
return result
|
||||
|
||||
|
||||
def is_object_owner(user, obj):
|
||||
@@ -50,7 +68,6 @@ def is_object_owner(user, obj):
|
||||
Tests if a given user is the owner of a given object
|
||||
test performed by checking user against the objects user
|
||||
and create_by field (if exists)
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of object, false otherwise
|
||||
@@ -63,11 +80,25 @@ def is_object_owner(user, obj):
|
||||
return False
|
||||
|
||||
|
||||
def is_space_owner(user, obj):
|
||||
"""
|
||||
Tests if a given user is the owner the space of a given object
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of the objects space, false otherwise
|
||||
"""
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
try:
|
||||
return obj.get_space().get_owner() == user
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def is_object_shared(user, obj):
|
||||
"""
|
||||
Tests if a given user is shared for a given object
|
||||
test performed by checking user against the objects shared table
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is shared for object, false otherwise
|
||||
@@ -88,15 +119,15 @@ def share_link_valid(recipe, share):
|
||||
"""
|
||||
try:
|
||||
CACHE_KEY = f'recipe_share_{recipe.pk}_{share}'
|
||||
if c := caches['default'].get(CACHE_KEY, False):
|
||||
if c := cache.get(CACHE_KEY, False):
|
||||
return c
|
||||
|
||||
if link := ShareLink.objects.filter(recipe=recipe, uuid=share, abuse_blocked=False).first():
|
||||
if 0 < settings.SHARING_LIMIT < link.request_count:
|
||||
if 0 < settings.SHARING_LIMIT < link.request_count and not link.space.no_sharing_limit:
|
||||
return False
|
||||
link.request_count += 1
|
||||
link.save()
|
||||
caches['default'].set(CACHE_KEY, True, timeout=3)
|
||||
cache.set(CACHE_KEY, True, timeout=3)
|
||||
return True
|
||||
return False
|
||||
except ValidationError:
|
||||
@@ -163,7 +194,7 @@ class OwnerRequiredMixin(object):
|
||||
|
||||
try:
|
||||
obj = self.get_object()
|
||||
if obj.get_space() != request.space:
|
||||
if not request.user.userspace.filter(space=obj.get_space()).exists():
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You do not have the required permissions to view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('index'))
|
||||
@@ -181,7 +212,7 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
verifies user has ownership over object
|
||||
(either user or created_by or user is request user)
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!') # noqa: E501
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated
|
||||
@@ -190,6 +221,28 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
return is_object_owner(request.user, obj)
|
||||
|
||||
|
||||
class CustomIsOwnerReadOnly(CustomIsOwner):
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return super().has_object_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
|
||||
class CustomIsSpaceOwner(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for django rest framework views
|
||||
verifies if the user is the owner of the space the object belongs to
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated and request.space.created_by == request.user
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return is_space_owner(request.user, obj)
|
||||
|
||||
|
||||
# TODO function duplicate/too similar name
|
||||
class CustomIsShared(permissions.BasePermission):
|
||||
"""
|
||||
@@ -261,6 +314,73 @@ class CustomIsShare(permissions.BasePermission):
|
||||
return False
|
||||
|
||||
|
||||
class CustomRecipePermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for recipe api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a guest or a share link is given and the request is safe
|
||||
share = request.query_params.get('share', None)
|
||||
return has_group_permission(request.user, ['guest']) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
share = request.query_params.get('share', None)
|
||||
if share:
|
||||
return share_link_valid(obj, share)
|
||||
else:
|
||||
if obj.private:
|
||||
return ((obj.created_by == request.user) or (request.user in obj.shared.all())) and obj.space == request.space
|
||||
else:
|
||||
return has_group_permission(request.user, ['guest']) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for user api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # a space filtered user list is visible for everyone
|
||||
return has_group_permission(request.user, ['guest'])
|
||||
|
||||
def has_object_permission(self, request, view, obj): # object write permissions are only available for user
|
||||
if request.method in SAFE_METHODS and 'pk' in view.kwargs and has_group_permission(request.user, ['guest']) and request.space in obj.userspace_set.all():
|
||||
return True
|
||||
elif request.user == obj:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class CustomTokenHasScope(TokenHasScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return request.user.is_authenticated
|
||||
|
||||
|
||||
class CustomTokenHasReadWriteScope(TokenHasReadWriteScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasReadWriteScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def above_space_limit(space): # TODO add file storage limit
|
||||
"""
|
||||
Test if the space has reached any limit (e.g. max recipes, users, ..)
|
||||
@@ -290,7 +410,27 @@ def above_space_user_limit(space):
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal limit else false, message)
|
||||
"""
|
||||
limit = space.max_users != 0 and UserPreference.objects.filter(space=space).count() > space.max_users
|
||||
limit = space.max_users != 0 and UserSpace.objects.filter(space=space).count() > space.max_users
|
||||
if limit:
|
||||
return True, _('You have more users than allowed in your space.')
|
||||
return False, ''
|
||||
|
||||
|
||||
def switch_user_active_space(user, space):
|
||||
"""
|
||||
Switch the currently active space of a user by setting all spaces to inactive and activating the one passed
|
||||
:param user: user to change active space for
|
||||
:param space: space to activate user for
|
||||
:return user space object or none if not found/no permission
|
||||
"""
|
||||
try:
|
||||
us = UserSpace.objects.get(space=space, user=user)
|
||||
if not us.active:
|
||||
UserSpace.objects.filter(user=user).update(active=False)
|
||||
us.active = True
|
||||
us.save()
|
||||
return us
|
||||
else:
|
||||
return us
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
@@ -1,182 +1,191 @@
|
||||
import json
|
||||
import re
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import unquote
|
||||
# import json
|
||||
# import re
|
||||
# from json import JSONDecodeError
|
||||
# from urllib.parse import unquote
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import Tag
|
||||
from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
# from bs4 import BeautifulSoup
|
||||
# from bs4.element import Tag
|
||||
# from recipe_scrapers import scrape_html, scrape_me
|
||||
# from recipe_scrapers._exceptions import NoSchemaFoundInWildMode
|
||||
# from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
# from cookbook.helper import recipe_url_import as helper
|
||||
# from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
|
||||
|
||||
def get_recipe_from_source(text, url, request):
|
||||
def build_node(k, v):
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
return node
|
||||
# def get_recipe_from_source(text, url, request):
|
||||
# def build_node(k, v):
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# return node
|
||||
|
||||
def get_children_dict(children):
|
||||
kid_list = []
|
||||
for k, v in children.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
return kid_list
|
||||
# def get_children_dict(children):
|
||||
# kid_list = []
|
||||
# for k, v in children.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# return kid_list
|
||||
|
||||
def get_children_list(children):
|
||||
kid_list = []
|
||||
for kid in children:
|
||||
if type(kid) == list:
|
||||
node = {
|
||||
'name': "unknown list",
|
||||
'value': "unknown list",
|
||||
'children': get_children_list(kid)
|
||||
}
|
||||
kid_list.append(node)
|
||||
elif type(kid) == dict:
|
||||
for k, v in kid.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
else:
|
||||
kid_list.append({
|
||||
'name': normalize_string(str(kid)),
|
||||
'value': normalize_string(str(kid))
|
||||
})
|
||||
return kid_list
|
||||
# def get_children_list(children):
|
||||
# kid_list = []
|
||||
# for kid in children:
|
||||
# if type(kid) == list:
|
||||
# node = {
|
||||
# 'name': "unknown list",
|
||||
# 'value': "unknown list",
|
||||
# 'children': get_children_list(kid)
|
||||
# }
|
||||
# kid_list.append(node)
|
||||
# elif type(kid) == dict:
|
||||
# for k, v in kid.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# else:
|
||||
# kid_list.append({
|
||||
# 'name': normalize_string(str(kid)),
|
||||
# 'value': normalize_string(str(kid))
|
||||
# })
|
||||
# return kid_list
|
||||
|
||||
recipe_tree = []
|
||||
parse_list = []
|
||||
html_data = []
|
||||
images = []
|
||||
text = unquote(text)
|
||||
# recipe_tree = []
|
||||
# parse_list = []
|
||||
# soup = BeautifulSoup(text, "html.parser")
|
||||
# html_data = get_from_html(soup)
|
||||
# images = get_images_from_source(soup, url)
|
||||
# text = unquote(text)
|
||||
# scrape = None
|
||||
|
||||
try:
|
||||
parse_list.append(remove_graph(json.loads(text)))
|
||||
if not url and 'url' in parse_list[0]:
|
||||
url = parse_list[0]['url']
|
||||
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
# if url and not text:
|
||||
# try:
|
||||
# scrape = scrape_me(url_path=url, wild_mode=True)
|
||||
# except(NoSchemaFoundInWildMode):
|
||||
# pass
|
||||
|
||||
except JSONDecodeError:
|
||||
soup = BeautifulSoup(text, "html.parser")
|
||||
html_data = get_from_html(soup)
|
||||
images += get_images_from_source(soup, url)
|
||||
for el in soup.find_all('script', type='application/ld+json'):
|
||||
el = remove_graph(el)
|
||||
if not url and 'url' in el:
|
||||
url = el['url']
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
for el in soup.find_all(type='application/json'):
|
||||
el = remove_graph(el)
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
# if not scrape:
|
||||
# try:
|
||||
# parse_list.append(remove_graph(json.loads(text)))
|
||||
# if not url and 'url' in parse_list[0]:
|
||||
# url = parse_list[0]['url']
|
||||
# scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
|
||||
recipe_json = helper.get_from_scraper(scrape, request)
|
||||
# except JSONDecodeError:
|
||||
# for el in soup.find_all('script', type='application/ld+json'):
|
||||
# el = remove_graph(el)
|
||||
# if not url and 'url' in el:
|
||||
# url = el['url']
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# for el in soup.find_all(type='application/json'):
|
||||
# el = remove_graph(el)
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# scrape = text_scraper(text, url=url)
|
||||
|
||||
for el in parse_list:
|
||||
temp_tree = []
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
except TypeError:
|
||||
continue
|
||||
# recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
for k, v in el.items():
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
temp_tree.append(node)
|
||||
# # TODO: DEPRECATE recipe_tree & html_data. first validate it isn't used anywhere
|
||||
# for el in parse_list:
|
||||
# temp_tree = []
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# except TypeError:
|
||||
# continue
|
||||
|
||||
if '@type' in el and el['@type'] == 'Recipe':
|
||||
recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
else:
|
||||
recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
# for k, v in el.items():
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# temp_tree.append(node)
|
||||
|
||||
return recipe_json, recipe_tree, html_data, images
|
||||
# if '@type' in el and el['@type'] == 'Recipe':
|
||||
# recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
# else:
|
||||
# recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
|
||||
# return recipe_json, recipe_tree, html_data, images
|
||||
|
||||
|
||||
def get_from_html(soup):
|
||||
INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
html = []
|
||||
for s in soup.strings:
|
||||
if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
html.append(s)
|
||||
return html
|
||||
# def get_from_html(soup):
|
||||
# INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
# html = []
|
||||
# for s in soup.strings:
|
||||
# if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
# html.append(s)
|
||||
# return html
|
||||
|
||||
|
||||
def get_images_from_source(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
# def get_images_from_source(soup, url):
|
||||
# sources = ['src', 'srcset', 'data-src']
|
||||
# images = []
|
||||
# img_tags = soup.find_all('img')
|
||||
# if url:
|
||||
# site = get_host_name(url)
|
||||
# prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
# urls = []
|
||||
# for img in img_tags:
|
||||
# for src in sources:
|
||||
# try:
|
||||
# urls.append(img[src])
|
||||
# except KeyError:
|
||||
# pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
# for u in urls:
|
||||
# u = u.split('?')[0]
|
||||
# filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
# if filename:
|
||||
# if (('http' not in u) and (url)):
|
||||
# # sometimes an image source can be relative
|
||||
# # if it is provide the base url
|
||||
# u = '{}://{}{}'.format(prot, site, u)
|
||||
# if 'http' in u:
|
||||
# images.append(u)
|
||||
# return images
|
||||
|
||||
|
||||
def remove_graph(el):
|
||||
# recipes type might be wrapped in @graph type
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
if '@graph' in el:
|
||||
for x in el['@graph']:
|
||||
if '@type' in x and x['@type'] == 'Recipe':
|
||||
el = x
|
||||
except (TypeError, JSONDecodeError):
|
||||
pass
|
||||
return el
|
||||
# def remove_graph(el):
|
||||
# # recipes type might be wrapped in @graph type
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# if '@graph' in el:
|
||||
# for x in el['@graph']:
|
||||
# if '@type' in x and x['@type'] == 'Recipe':
|
||||
# el = x
|
||||
# except (TypeError, JSONDecodeError):
|
||||
# pass
|
||||
# return el
|
||||
|
||||
@@ -3,17 +3,16 @@ from collections import Counter
|
||||
from datetime import date, timedelta
|
||||
|
||||
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity
|
||||
from django.core.cache import cache
|
||||
from django.core.cache import caches
|
||||
from django.db.models import Avg, Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When
|
||||
from django.db.models import (Avg, Case, Count, Exists, F, Func, Max, OuterRef, Q, Subquery, Value, When, FilteredRelation)
|
||||
from django.db.models.functions import Coalesce, Lower, Substr
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.filters import RecipeFilter
|
||||
from cookbook.helper.HelperFunctions import Round, str2bool
|
||||
from cookbook.helper.permission_helper import has_group_permission
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, RecipeBook, SearchFields,
|
||||
from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, SearchFields,
|
||||
SearchPreference, ViewLog)
|
||||
from recipes import settings
|
||||
|
||||
@@ -23,7 +22,7 @@ from recipes import settings
|
||||
class RecipeSearch():
|
||||
_postgres = settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']
|
||||
|
||||
def __init__(self, request, **params):
|
||||
def __init__(self, request, **params):
|
||||
self._request = request
|
||||
self._queryset = None
|
||||
if f := params.get('filter', None):
|
||||
@@ -37,7 +36,13 @@ class RecipeSearch():
|
||||
else:
|
||||
self._params = {**(params or {})}
|
||||
if self._request.user.is_authenticated:
|
||||
self._search_prefs = request.user.searchpreference
|
||||
CACHE_KEY = f'search_pref_{request.user.id}'
|
||||
cached_result = cache.get(CACHE_KEY, default=None)
|
||||
if cached_result is not None:
|
||||
self._search_prefs = cached_result
|
||||
else:
|
||||
self._search_prefs = request.user.searchpreference
|
||||
cache.set(CACHE_KEY, self._search_prefs, timeout=10)
|
||||
else:
|
||||
self._search_prefs = SearchPreference()
|
||||
self._string = self._params.get('query').strip() if self._params.get('query', None) else None
|
||||
@@ -112,19 +117,20 @@ class RecipeSearch():
|
||||
)
|
||||
self.search_rank = None
|
||||
self.orderby = []
|
||||
self._default_sort = ['-favorite'] # TODO add user setting
|
||||
self._filters = None
|
||||
self._fuzzy_match = None
|
||||
|
||||
def get_queryset(self, queryset):
|
||||
self._queryset = queryset
|
||||
self._queryset = self._queryset.prefetch_related('keywords')
|
||||
|
||||
self._build_sort_order()
|
||||
self._recently_viewed(num_recent=self._num_recent)
|
||||
self._cooked_on_filter(cooked_date=self._cookedon)
|
||||
self._created_on_filter(created_date=self._createdon)
|
||||
self._updated_on_filter(updated_date=self._updatedon)
|
||||
self._viewed_on_filter(viewed_date=self._viewedon)
|
||||
self._favorite_recipes(timescooked=self._timescooked)
|
||||
self._favorite_recipes(times_cooked=self._timescooked)
|
||||
self._new_recipes()
|
||||
self.keyword_filters(**self._keywords)
|
||||
self.food_filters(**self._foods)
|
||||
@@ -147,11 +153,11 @@ class RecipeSearch():
|
||||
|
||||
def _build_sort_order(self):
|
||||
if self._random:
|
||||
self._queryset = self._queryset.order_by("?")
|
||||
self.orderby = ['?']
|
||||
else:
|
||||
order = []
|
||||
# TODO add userpreference for default sort order and replace '-favorite'
|
||||
default_order = ['-favorite']
|
||||
default_order = ['-name']
|
||||
# recent and new_recipe are always first; they float a few recipes to the top
|
||||
if self._num_recent:
|
||||
order += ['-recent']
|
||||
@@ -208,7 +214,7 @@ class RecipeSearch():
|
||||
else:
|
||||
self._queryset = self._queryset.annotate(simularity=Coalesce(Subquery(simularity), 0.0))
|
||||
if self._sort_includes('score') and self._fulltext_include and self._fuzzy_match is not None:
|
||||
self._queryset = self._queryset.annotate(score=F('rank')+F('simularity'))
|
||||
self._queryset = self._queryset.annotate(score=F('rank') + F('simularity'))
|
||||
else:
|
||||
query_filter = Q()
|
||||
for f in [x + '__unaccent__iexact' if x in self._unaccent_include else x + '__iexact' for x in SearchFields.objects.all().values_list('field', flat=True)]:
|
||||
@@ -289,25 +295,25 @@ class RecipeSearch():
|
||||
'recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
|
||||
self._queryset = self._queryset.annotate(recent=Coalesce(Max(Case(When(pk__in=num_recent_recipes.values('recipe'), then='viewlog__pk'))), Value(0)))
|
||||
|
||||
def _favorite_recipes(self, timescooked=None):
|
||||
if self._sort_includes('favorite') or timescooked:
|
||||
lessthan = '-' in (timescooked or []) or not self._sort_includes('-favorite')
|
||||
if lessthan:
|
||||
def _favorite_recipes(self, times_cooked=None):
|
||||
if self._sort_includes('favorite') or times_cooked:
|
||||
less_than = '-' in (times_cooked or []) or not self._sort_includes('-favorite')
|
||||
if less_than:
|
||||
default = 1000
|
||||
else:
|
||||
default = 0
|
||||
favorite_recipes = CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk')
|
||||
).values('recipe').annotate(count=Count('pk', distinct=True)).values('count')
|
||||
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
|
||||
if timescooked is None:
|
||||
if times_cooked is None:
|
||||
return
|
||||
|
||||
if timescooked == '0':
|
||||
if times_cooked == '0':
|
||||
self._queryset = self._queryset.filter(favorite=0)
|
||||
elif lessthan:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(timescooked[1:])).exclude(favorite=0)
|
||||
elif less_than:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked[1:])).exclude(favorite=0)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(timescooked))
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(times_cooked))
|
||||
|
||||
def keyword_filters(self, **kwargs):
|
||||
if all([kwargs[x] is None for x in kwargs]):
|
||||
@@ -507,10 +513,10 @@ class RecipeSearch():
|
||||
shopping_users = [*self._request.user.get_shopping_share(), self._request.user]
|
||||
|
||||
onhand_filter = (
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users) # or substitute food onhand
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users) # or substitute food onhand
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
)
|
||||
makenow_recipes = Recipe.objects.annotate(
|
||||
count_food=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__isnull=False), distinct=True),
|
||||
@@ -519,36 +525,40 @@ class RecipeSearch():
|
||||
steps__ingredients__food__recipe__isnull=True), distinct=True),
|
||||
has_child_sub=Case(When(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users), then=Value(1)), default=Value(0)),
|
||||
has_sibling_sub=Case(When(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users), then=Value(1)), default=Value(0))
|
||||
).annotate(missingfood=F('count_food')-F('count_onhand')-F('count_ignore_shopping')).filter(missingfood=missing)
|
||||
).annotate(missingfood=F('count_food') - F('count_onhand') - F('count_ignore_shopping')).filter(missingfood=missing)
|
||||
self._queryset = self._queryset.distinct().filter(id__in=makenow_recipes.values('id'))
|
||||
|
||||
@ staticmethod
|
||||
@staticmethod
|
||||
def __children_substitute_filter(shopping_users=None):
|
||||
children_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen*OuterRef('depth')),
|
||||
path__startswith=OuterRef('path'),
|
||||
depth__gt=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
).annotate(child_onhand=Coalesce(Func('pk', function='Count'), 0)).values('child_onhand')
|
||||
)
|
||||
return Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0).filter(substitute_children=True
|
||||
).annotate(child_onhand=Coalesce(Subquery(children_onhand_subquery), 0)).exclude(child_onhand=0)
|
||||
).exclude(depth=1, numchild=0
|
||||
).filter(substitute_children=True
|
||||
).annotate(child_onhand_count=Exists(children_onhand_subquery)
|
||||
).filter(child_onhand_count=True)
|
||||
|
||||
@ staticmethod
|
||||
@staticmethod
|
||||
def __sibling_substitute_filter(shopping_users=None):
|
||||
sibling_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen*(OuterRef('depth')-1)),
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)),
|
||||
depth=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
).annotate(sibling_onhand=Coalesce(Func('pk', function='Count'), 0)).values('sibling_onhand')
|
||||
)
|
||||
return Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0).filter(substitute_siblings=True
|
||||
).annotate(sibling_onhand=Coalesce(Subquery(sibling_onhand_subquery), 0)).exclude(sibling_onhand=0)
|
||||
).exclude(depth=1, numchild=0
|
||||
).filter(substitute_siblings=True
|
||||
).annotate(sibling_onhand=Exists(sibling_onhand_subquery)
|
||||
).filter(sibling_onhand=True)
|
||||
|
||||
|
||||
class RecipeFacet():
|
||||
@@ -561,7 +571,7 @@ class RecipeFacet():
|
||||
|
||||
self._request = request
|
||||
self._queryset = queryset
|
||||
self.hash_key = hash_key or str(hash(frozenset(self._queryset.values_list('pk'))))
|
||||
self.hash_key = hash_key or str(hash(self._queryset.query))
|
||||
self._SEARCH_CACHE_KEY = f"recipes_filter_{self.hash_key}"
|
||||
self._cache_timeout = cache_timeout
|
||||
self._cache = caches['default'].get(self._SEARCH_CACHE_KEY, {})
|
||||
@@ -741,7 +751,7 @@ class RecipeFacet():
|
||||
).filter(depth=depth, count__gt=0
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
return queryset.filter(depth=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
def _food_queryset(self, queryset, food=None):
|
||||
depth = getattr(food, 'depth', 0) + 1
|
||||
@@ -753,13 +763,3 @@ class RecipeFacet():
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth__lte=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
|
||||
def old_search(request):
|
||||
if has_group_permission(request.user, ('guest',)):
|
||||
params = dict(request.GET)
|
||||
params['internal'] = None
|
||||
f = RecipeFilter(params,
|
||||
queryset=Recipe.objects.filter(space=request.user.userpreference.space).all().order_by(Lower('name').asc()),
|
||||
space=request.space)
|
||||
return f.qs
|
||||
|
||||
@@ -7,11 +7,12 @@ from django.utils.dateparse import parse_duration
|
||||
from django.utils.translation import gettext as _
|
||||
from isodate import parse_duration as iso_parse_duration
|
||||
from isodate.isoerror import ISO8601Error
|
||||
from recipe_scrapers._utils import get_minutes
|
||||
from pytube import YouTube
|
||||
from recipe_scrapers._utils import get_host_name, get_minutes
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.models import Keyword
|
||||
from cookbook.models import Keyword, Automation
|
||||
|
||||
|
||||
# from recipe_scrapers._utils import get_minutes ## temporary until/unless upstream incorporates get_minutes() PR
|
||||
@@ -21,7 +22,7 @@ def get_from_scraper(scrape, request):
|
||||
# converting the scrape_me object to the existing json format based on ld+json
|
||||
recipe_json = {}
|
||||
try:
|
||||
recipe_json['name'] = parse_name(scrape.title() or None)
|
||||
recipe_json['name'] = parse_name(scrape.title()[:128] or None)
|
||||
except Exception:
|
||||
recipe_json['name'] = None
|
||||
if not recipe_json['name']:
|
||||
@@ -43,14 +44,9 @@ def get_from_scraper(scrape, request):
|
||||
recipe_json['internal'] = True
|
||||
|
||||
try:
|
||||
servings = scrape.yields() or None
|
||||
servings = scrape.schema.data.get('recipeYield') or 1 # dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
except Exception:
|
||||
servings = None
|
||||
if not servings:
|
||||
try:
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
except Exception:
|
||||
servings = 1
|
||||
servings = 1
|
||||
|
||||
recipe_json['servings'] = parse_servings(servings)
|
||||
recipe_json['servings_text'] = parse_servings_text(servings)
|
||||
@@ -114,12 +110,25 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if source_url := scrape.canonical_url():
|
||||
try:
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
except Exception:
|
||||
pass
|
||||
recipe_json['source_url'] = ''
|
||||
|
||||
try:
|
||||
if scrape.author():
|
||||
keywords.append(scrape.author())
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
|
||||
@@ -129,51 +138,101 @@ def get_from_scraper(scrape, request):
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
|
||||
recipe_json['steps'] = []
|
||||
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
except Exception:
|
||||
pass
|
||||
if len(recipe_json['steps']) == 0:
|
||||
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
|
||||
|
||||
if len(parse_description(description)) > 256: # split at 256 as long descriptions dont look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f'*{parse_description(description)}* \n\n' + recipe_json['steps'][0]['instruction']
|
||||
parsed_description = parse_description(description)
|
||||
# TODO notify user about limit if reached
|
||||
# limits exist to limit the attack surface for dos style attacks
|
||||
automations = Automation.objects.filter(type=Automation.DESCRIPTION_REPLACE, space=request.space, disabled=False).only('param_1', 'param_2', 'param_3').all().order_by('order')[:512]
|
||||
for a in automations:
|
||||
if re.match(a.param_1, (recipe_json['source_url'])[:512]):
|
||||
parsed_description = re.sub(a.param_2, a.param_3, parsed_description, count=1)
|
||||
|
||||
if len(parsed_description) > 256: # split at 256 as long descriptions don't look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f'*{parsed_description}* \n\n' + recipe_json['steps'][0]['instruction']
|
||||
else:
|
||||
recipe_json['description'] = parse_description(description)[:512]
|
||||
recipe_json['description'] = parsed_description[:512]
|
||||
|
||||
try:
|
||||
for x in scrape.ingredients():
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredient = {
|
||||
'amount': amount,
|
||||
'food': {
|
||||
'name': ingredient,
|
||||
},
|
||||
'unit': None,
|
||||
'note': note,
|
||||
'original_text': x
|
||||
}
|
||||
if unit:
|
||||
ingredient['unit'] = {'name': unit, }
|
||||
recipe_json['steps'][0]['ingredients'].append(ingredient)
|
||||
except Exception:
|
||||
recipe_json['steps'][0]['ingredients'].append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': None,
|
||||
if x.strip() != '':
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredient = {
|
||||
'amount': amount,
|
||||
'food': {
|
||||
'name': x,
|
||||
'name': ingredient,
|
||||
},
|
||||
'note': '',
|
||||
'unit': None,
|
||||
'note': note,
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
if unit:
|
||||
ingredient['unit'] = {'name': unit, }
|
||||
recipe_json['steps'][0]['ingredients'].append(ingredient)
|
||||
except Exception:
|
||||
recipe_json['steps'][0]['ingredients'].append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': None,
|
||||
'food': {
|
||||
'name': x,
|
||||
},
|
||||
'note': '',
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if recipe_json['source_url']:
|
||||
automations = Automation.objects.filter(type=Automation.INSTRUCTION_REPLACE, space=request.space, disabled=False).only('param_1', 'param_2', 'param_3').order_by('order').all()[:512]
|
||||
for a in automations:
|
||||
if re.match(a.param_1, (recipe_json['source_url'])[:512]):
|
||||
for s in recipe_json['steps']:
|
||||
s['instruction'] = re.sub(a.param_2, a.param_3, s['instruction'])
|
||||
|
||||
return recipe_json
|
||||
|
||||
|
||||
def get_from_youtube_scraper(url, request):
|
||||
"""A YouTube Information Scraper."""
|
||||
kw, created = Keyword.objects.get_or_create(name='YouTube', space=request.space)
|
||||
default_recipe_json = {
|
||||
'name': '',
|
||||
'internal': True,
|
||||
'description': '',
|
||||
'servings': 1,
|
||||
'working_time': 0,
|
||||
'waiting_time': 0,
|
||||
'image': "",
|
||||
'keywords': [{'name': kw.name, 'label': kw.name, 'id': kw.pk}],
|
||||
'source_url': url,
|
||||
'steps': [
|
||||
{
|
||||
'ingredients': [],
|
||||
'instruction': ''
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
try:
|
||||
video = YouTube(url=url)
|
||||
default_recipe_json['name'] = video.title
|
||||
default_recipe_json['image'] = video.thumbnail_url
|
||||
default_recipe_json['steps'][0]['instruction'] = video.description
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return default_recipe_json
|
||||
|
||||
|
||||
def parse_name(name):
|
||||
if type(name) == list:
|
||||
try:
|
||||
@@ -263,7 +322,12 @@ def parse_servings_text(servings):
|
||||
servings = re.sub("\d+", '', servings).strip()
|
||||
except Exception:
|
||||
servings = ''
|
||||
return servings
|
||||
if type(servings) == list:
|
||||
try:
|
||||
servings = parse_servings_text(servings[1])
|
||||
except Exception:
|
||||
pass
|
||||
return str(servings)[:32]
|
||||
|
||||
|
||||
def parse_time(recipe_time):
|
||||
@@ -331,3 +395,32 @@ def iso_duration_to_minutes(string):
|
||||
string
|
||||
).groupdict()
|
||||
return int(match['days'] or 0) * 24 * 60 + int(match['hours'] or 0) * 60 + int(match['minutes'] or 0)
|
||||
|
||||
|
||||
def get_images_from_soup(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
@@ -14,6 +15,12 @@ class ScopeMiddleware:
|
||||
|
||||
def __call__(self, request):
|
||||
prefix = settings.JS_REVERSE_SCRIPT_PREFIX or ''
|
||||
|
||||
# need to disable scopes for writing requests into userpref and enable for loading ?
|
||||
if request.path.startswith(prefix + '/api/user-preference/'):
|
||||
with scopes_disabled():
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.is_authenticated:
|
||||
|
||||
if request.path.startswith(prefix + '/admin/'):
|
||||
@@ -26,24 +33,35 @@ class ScopeMiddleware:
|
||||
if request.path.startswith(prefix + '/accounts/'):
|
||||
return self.get_response(request)
|
||||
|
||||
with scopes_disabled():
|
||||
if request.user.userpreference.space is None and not reverse('account_logout') in request.path:
|
||||
return views.no_space(request)
|
||||
if request.path.startswith(prefix + '/switch-space/'):
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
with scopes_disabled():
|
||||
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.space_overview(request)
|
||||
|
||||
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
|
||||
user_space = request.user.userspace_set.filter(active=True).first()
|
||||
|
||||
if not user_space:
|
||||
return views.space_overview(request)
|
||||
|
||||
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.no_groups(request)
|
||||
|
||||
request.space = request.user.userpreference.space
|
||||
request.space = user_space.space
|
||||
# with scopes_disabled():
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
else:
|
||||
if request.path.startswith(prefix + '/api/'):
|
||||
try:
|
||||
if auth := TokenAuthentication().authenticate(request):
|
||||
request.space = auth[0].userpreference.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
if auth := OAuth2Authentication().authenticate(request):
|
||||
user_space = auth[0].userspace_set.filter(active=True).first()
|
||||
if user_space:
|
||||
request.space = user_space.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
except AuthenticationFailed:
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from json import JSONDecodeError
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from recipe_scrapers import SCRAPERS, get_host_name
|
||||
from recipe_scrapers._factory import SchemaScraperFactory
|
||||
from recipe_scrapers._schemaorg import SchemaOrg
|
||||
@@ -26,17 +27,17 @@ def text_scraper(text, url=None):
|
||||
class TextScraper(scraper_class):
|
||||
def __init__(
|
||||
self,
|
||||
page_data,
|
||||
url=None
|
||||
html=None,
|
||||
url=None,
|
||||
):
|
||||
self.wild_mode = False
|
||||
self.meta_http_equiv = False
|
||||
self.soup = BeautifulSoup(page_data, "html.parser")
|
||||
self.soup = BeautifulSoup(html, "html.parser")
|
||||
self.url = url
|
||||
self.recipe = None
|
||||
try:
|
||||
self.schema = SchemaOrg(page_data)
|
||||
self.schema = SchemaOrg(html)
|
||||
except (JSONDecodeError, AttributeError):
|
||||
pass
|
||||
|
||||
return TextScraper(text, url)
|
||||
return TextScraper(url=url, html=text)
|
||||
|
||||
@@ -22,10 +22,25 @@ class IngredientObject(object):
|
||||
else:
|
||||
self.amount = f"<scalable-number v-bind:number='{bleach.clean(str(ingredient.amount))}' v-bind:factor='ingredient_factor'></scalable-number>"
|
||||
if ingredient.unit:
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
if ingredient.unit.plural_name in (None, ""):
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
else:
|
||||
if ingredient.always_use_plural_unit or ingredient.amount > 1 and not ingredient.no_amount:
|
||||
self.unit = bleach.clean(ingredient.unit.plural_name)
|
||||
else:
|
||||
self.unit = bleach.clean(str(ingredient.unit))
|
||||
else:
|
||||
self.unit = ""
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
if ingredient.food:
|
||||
if ingredient.food.plural_name in (None, ""):
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
else:
|
||||
if ingredient.always_use_plural_food or ingredient.amount > 1 and not ingredient.no_amount:
|
||||
self.food = bleach.clean(str(ingredient.food.plural_name))
|
||||
else:
|
||||
self.food = bleach.clean(str(ingredient.food))
|
||||
else:
|
||||
self.food = ""
|
||||
self.note = bleach.clean(str(ingredient.note))
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@@ -6,11 +6,13 @@ from gettext import gettext as _
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
import yaml
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.helper.recipe_url_import import (get_from_scraper, get_images_from_soup,
|
||||
iso_duration_to_minutes)
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
@@ -23,7 +25,10 @@ class CookBookApp(Integration):
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_html = file.getvalue().decode("utf-8")
|
||||
|
||||
recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
|
||||
# recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
|
||||
scrape = text_scraper(text=recipe_html)
|
||||
recipe_json = get_from_scraper(scrape, self.request)
|
||||
images = list(dict.fromkeys(get_images_from_soup(scrape.soup, None)))
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_json['name'].strip(),
|
||||
@@ -41,7 +46,8 @@ class CookBookApp(Integration):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction=recipe_json['recipeInstructions'], space=self.request.space, )
|
||||
# assuming import files only contain single step
|
||||
step = Step.objects.create(instruction=recipe_json['steps'][0]['instruction'], space=self.request.space, )
|
||||
|
||||
if 'nutrition' in recipe_json:
|
||||
step.instruction = step.instruction + '\n\n' + recipe_json['nutrition']
|
||||
@@ -50,17 +56,21 @@ class CookBookApp(Integration):
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipeIngredient']:
|
||||
f = ingredient_parser.get_food(ingredient['ingredient']['text'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit']['text'])
|
||||
for ingredient in recipe_json['steps'][0]['ingredients']:
|
||||
f = ingredient_parser.get_food(ingredient['food']['name'])
|
||||
u = None
|
||||
if unit := ingredient.get('unit', None):
|
||||
u = ingredient_parser.get_unit(unit.get('name', None))
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=ingredient['amount'], note=ingredient['note'], space=self.request.space,
|
||||
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
|
||||
))
|
||||
|
||||
if len(images) > 0:
|
||||
try:
|
||||
response = requests.get(images[0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = images[0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from io import BytesIO
|
||||
from gettext import gettext as _
|
||||
|
||||
import requests
|
||||
import validators
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
@@ -28,43 +29,53 @@ class Cookmate(Integration):
|
||||
name=recipe_xml.find('title').text.strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if recipe_xml.find('preptime') is not None:
|
||||
if recipe_xml.find('preptime') is not None and recipe_xml.find('preptime').text is not None:
|
||||
recipe.working_time = parse_time(recipe_xml.find('preptime').text.strip())
|
||||
|
||||
if recipe_xml.find('cooktime') is not None:
|
||||
if recipe_xml.find('cooktime') is not None and recipe_xml.find('cooktime').text is not None:
|
||||
recipe.waiting_time = parse_time(recipe_xml.find('cooktime').text.strip())
|
||||
|
||||
if recipe_xml.find('quantity') is not None:
|
||||
if recipe_xml.find('quantity') is not None and recipe_xml.find('quantity').text is not None:
|
||||
recipe.servings = parse_servings(recipe_xml.find('quantity').text.strip())
|
||||
recipe.servings_text = parse_servings_text(recipe_xml.find('quantity').text.strip())
|
||||
|
||||
if recipe_xml.find('url') is not None:
|
||||
if recipe_xml.find('url') is not None and recipe_xml.find('url').text is not None:
|
||||
recipe.source_url = recipe_xml.find('url').text.strip()
|
||||
|
||||
if recipe_xml.find('description') is not None: # description is a list of <li>'s with text
|
||||
if len(recipe_xml.find('description')) > 0:
|
||||
recipe.description = recipe_xml.find('description')[0].text[:512]
|
||||
|
||||
for step in recipe_xml.find('recipetext').getchildren():
|
||||
step = Step.objects.create(
|
||||
instruction=step.text.strip(), space=self.request.space,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
if recipe_text := recipe_xml.find('recipetext'):
|
||||
for step in recipe_text.getchildren():
|
||||
if step.text:
|
||||
step = Step.objects.create(
|
||||
instruction=step.text.strip(), space=self.request.space,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
for ingredient in recipe_xml.find('ingredient').getchildren():
|
||||
if ingredient.text.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
recipe.steps.first().ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
if recipe_ingredients := recipe_xml.find('ingredient'):
|
||||
ingredient_step = recipe.steps.first()
|
||||
if ingredient_step is None:
|
||||
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
|
||||
|
||||
for ingredient in recipe_ingredients.getchildren():
|
||||
if ingredient.text:
|
||||
if ingredient.text.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
ingredient_step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
|
||||
if recipe_xml.find('imageurl') is not None:
|
||||
try:
|
||||
response = requests.get(recipe_xml.find('imageurl').text.strip())
|
||||
url = recipe_xml.find('imageurl').text.strip()
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
@@ -2,11 +2,10 @@ import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes, parse_servings
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
@@ -22,18 +21,21 @@ class CopyMeThat(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
# 'file' comes is as a beautifulsoup object
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip(), created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
try:
|
||||
source = file.find("a", {"id": "original_link"}).text
|
||||
except AttributeError:
|
||||
source = None
|
||||
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip()[:128], source_url=source, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
for category in file.find_all("span", {"class": "recipeCategory"}):
|
||||
keyword, created = Keyword.objects.get_or_create(name=category.text, space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
|
||||
try:
|
||||
recipe.servings = parse_servings(file.find("a", {"id": "recipeYield"}).text.strip())
|
||||
recipe.working_time = iso_duration_to_minutes(file.find("span", {"meta": "prepTime"}).text.strip())
|
||||
recipe.waiting_time = iso_duration_to_minutes(file.find("span", {"meta": "cookTime"}).text.strip())
|
||||
recipe.description = (file.find("div ", {"id": "description"}).text.strip())[:512]
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
@@ -43,36 +45,65 @@ class CopyMeThat(Integration):
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find_all("li", {"class": "recipeIngredient"}):
|
||||
if ingredient.text == "":
|
||||
continue
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find_all("li", {"class": "instruction"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
|
||||
for s in file.find_all("li", {"class": "recipeNote"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
|
||||
try:
|
||||
if file.find("a", {"id": "original_link"}).text != '':
|
||||
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("a", {"id": "original_link"}).text
|
||||
step.save()
|
||||
if len(file.find("span", {"id": "made_this"}).text.strip()) > 0:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('I made this'))[0])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
ingredients = file.find("ul", {"id": "recipeIngredients"})
|
||||
if isinstance(ingredients, Tag):
|
||||
for ingredient in ingredients.children:
|
||||
if not isinstance(ingredient, Tag) or not ingredient.text.strip() or "recipeIngredient_spacer" in ingredient['class']:
|
||||
continue
|
||||
if any(x in ingredient['class'] for x in ["recipeIngredient_subheader", "recipeIngredient_note"]):
|
||||
step.ingredients.add(Ingredient.objects.create(is_header=True, note=ingredient.text.strip()[:256], original_text=ingredient.text.strip(), space=self.request.space, ))
|
||||
else:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space, ))
|
||||
|
||||
instructions = file.find("ol", {"id": "recipeInstructions"})
|
||||
if isinstance(instructions, Tag):
|
||||
for instruction in instructions.children:
|
||||
if not isinstance(instruction, Tag) or instruction.text == "":
|
||||
continue
|
||||
if "instruction_subheader" in instruction['class']:
|
||||
if step.instruction:
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
step.name = instruction.text.strip()[:128]
|
||||
else:
|
||||
step.instruction += instruction.text.strip() + ' \n\n'
|
||||
|
||||
notes = file.find_all("li", {"class": "recipeNote"})
|
||||
if notes:
|
||||
step.instruction += '*Notes:* \n\n'
|
||||
|
||||
for n in notes:
|
||||
if n.text == "":
|
||||
continue
|
||||
step.instruction += '*' + n.text.strip() + '* \n\n'
|
||||
|
||||
description = ''
|
||||
try:
|
||||
description = file.find("div", {"id": "description"}).text.strip()
|
||||
except AttributeError:
|
||||
pass
|
||||
if len(description) <= 512:
|
||||
recipe.description = description
|
||||
else:
|
||||
recipe.description = description[:480] + ' ... (full description below)'
|
||||
step.instruction += '*Description:* \n\n*' + description + '* \n\n'
|
||||
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
|
||||
# import the Primary recipe image that is stored in the Zip
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
import time
|
||||
import traceback
|
||||
import datetime
|
||||
import json
|
||||
import traceback
|
||||
import uuid
|
||||
from io import BytesIO, StringIO
|
||||
from io import BytesIO
|
||||
from zipfile import BadZipFile, ZipFile
|
||||
|
||||
import lxml
|
||||
from django.core.cache import cache
|
||||
import datetime
|
||||
|
||||
from bs4 import Tag
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files import File
|
||||
from django.db import IntegrityError
|
||||
@@ -20,8 +16,7 @@ from django.utils.translation import gettext as _
|
||||
from django_scopes import scope
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.forms import ImportExportBase
|
||||
from cookbook.helper.image_processing import get_filetype, handle_image
|
||||
from cookbook.helper.image_processing import handle_image
|
||||
from cookbook.models import Keyword, Recipe
|
||||
from recipes.settings import DEBUG
|
||||
from recipes.settings import EXPORT_FILE_CACHE_DURATION
|
||||
@@ -43,7 +38,7 @@ class Integration:
|
||||
self.export_type = export_type
|
||||
self.ignored_recipes = []
|
||||
|
||||
description = f'Imported by {request.user.get_user_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
description = f'Imported by {request.user.get_user_display_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
icon = '📥'
|
||||
|
||||
try:
|
||||
@@ -169,7 +164,7 @@ class Integration:
|
||||
|
||||
for z in file_list:
|
||||
try:
|
||||
if not hasattr(z, 'filename'):
|
||||
if not hasattr(z, 'filename') or type(z) == Tag:
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
@@ -182,7 +177,7 @@ class Integration:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
il.total_recipes += len(data_list)
|
||||
for d in data_list:
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from io import BytesIO, StringIO
|
||||
from zipfile import ZipFile
|
||||
from PIL import Image
|
||||
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step, NutritionInformation
|
||||
|
||||
|
||||
class NextcloudCookbook(Integration):
|
||||
@@ -70,12 +71,21 @@ class NextcloudCookbook(Integration):
|
||||
recipe.steps.add(step)
|
||||
|
||||
if 'nutrition' in recipe_json:
|
||||
nutrition = {}
|
||||
try:
|
||||
recipe.nutrition.calories = recipe_json['nutrition']['calories'].replace(' kcal', '').replace(' ', '')
|
||||
recipe.nutrition.proteins = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
recipe.nutrition.fats = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
recipe.nutrition.carbohydrates = recipe_json['nutrition']['calories'].replace(' g', '').replace(',', '.').replace(' ', '')
|
||||
except Exception:
|
||||
if 'calories' in recipe_json['nutrition']:
|
||||
nutrition['calories'] = int(re.search(r'\d+', recipe_json['nutrition']['calories']).group())
|
||||
if 'proteinContent' in recipe_json['nutrition']:
|
||||
nutrition['proteins'] = int(re.search(r'\d+', recipe_json['nutrition']['proteinContent']).group())
|
||||
if 'fatContent' in recipe_json['nutrition']:
|
||||
nutrition['fats'] = int(re.search(r'\d+', recipe_json['nutrition']['fatContent']).group())
|
||||
if 'carbohydrateContent' in recipe_json['nutrition']:
|
||||
nutrition['carbohydrates'] = int(re.search(r'\d+', recipe_json['nutrition']['carbohydrateContent']).group())
|
||||
|
||||
if nutrition != {}:
|
||||
recipe.nutrition = NutritionInformation.objects.create(**nutrition, space=self.request.space)
|
||||
recipe.save()
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
for f in self.files:
|
||||
@@ -87,5 +97,92 @@ class NextcloudCookbook(Integration):
|
||||
|
||||
return recipe
|
||||
|
||||
def formatTime(self, min):
|
||||
h = min//60
|
||||
m = min % 60
|
||||
return f'PT{h}H{m}M0S'
|
||||
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
export = {}
|
||||
export['name'] = recipe.name
|
||||
export['description'] = recipe.description
|
||||
export['url'] = recipe.source_url
|
||||
export['prepTime'] = self.formatTime(recipe.working_time)
|
||||
export['cookTime'] = self.formatTime(recipe.waiting_time)
|
||||
export['totalTime'] = self.formatTime(recipe.working_time+recipe.waiting_time)
|
||||
export['recipeYield'] = recipe.servings
|
||||
export['image'] = f'/Recipes/{recipe.name}/full.jpg'
|
||||
export['imageUrl'] = f'/Recipes/{recipe.name}/full.jpg'
|
||||
|
||||
recipeKeyword = []
|
||||
for k in recipe.keywords.all():
|
||||
recipeKeyword.append(k.name)
|
||||
|
||||
export['keywords'] = recipeKeyword
|
||||
|
||||
recipeInstructions = []
|
||||
recipeIngredient = []
|
||||
for s in recipe.steps.all():
|
||||
recipeInstructions.append(s.instruction)
|
||||
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
export['recipeIngredient'] = recipeIngredient
|
||||
export['recipeInstructions'] = recipeInstructions
|
||||
|
||||
|
||||
return "recipe.json", json.dumps(export)
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
export_zip_stream = BytesIO()
|
||||
export_zip_obj = ZipFile(export_zip_stream, 'w')
|
||||
|
||||
for recipe in recipes:
|
||||
if recipe.internal and recipe.space == self.request.space:
|
||||
|
||||
recipe_stream = StringIO()
|
||||
filename, data = self.get_file_from_recipe(recipe)
|
||||
recipe_stream.write(data)
|
||||
export_zip_obj.writestr(f'{recipe.name}/{filename}', recipe_stream.getvalue())
|
||||
recipe_stream.close()
|
||||
|
||||
try:
|
||||
imageByte = recipe.image.file.read()
|
||||
export_zip_obj.writestr(f'{recipe.name}/full.jpg', self.getJPEG(imageByte))
|
||||
export_zip_obj.writestr(f'{recipe.name}/thumb.jpg', self.getThumb(171, imageByte))
|
||||
export_zip_obj.writestr(f'{recipe.name}/thumb16.jpg', self.getThumb(16, imageByte))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
el.exported_recipes += 1
|
||||
el.msg += self.get_recipe_processed_msg(recipe)
|
||||
el.save()
|
||||
|
||||
export_zip_obj.close()
|
||||
|
||||
return [[ self.get_export_file_name(), export_zip_stream.getvalue() ]]
|
||||
|
||||
def getJPEG(self, imageByte):
|
||||
image = Image.open(BytesIO(imageByte))
|
||||
image = image.convert('RGB')
|
||||
|
||||
bytes = BytesIO()
|
||||
image.save(bytes, "JPEG")
|
||||
return bytes.getvalue()
|
||||
|
||||
def getThumb(self, size, imageByte):
|
||||
image = Image.open(BytesIO(imageByte))
|
||||
|
||||
w, h = image.size
|
||||
m = min(w, h)
|
||||
|
||||
image = image.crop(((w-m)//2, (h-m)//2, (w+m)//2, (h+m)//2))
|
||||
image = image.resize([size, size], Image.Resampling.LANCZOS)
|
||||
image = image.convert('RGB')
|
||||
|
||||
bytes = BytesIO()
|
||||
image.save(bytes, "JPEG")
|
||||
return bytes.getvalue()
|
||||
|
||||
@@ -27,7 +27,7 @@ class Paprika(Integration):
|
||||
recipe.description = '' if len(recipe_json['description'].strip()) > 500 else recipe_json['description'].strip()
|
||||
|
||||
try:
|
||||
if 'servings' in recipe_json['servings']:
|
||||
if 'servings' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['servings'])
|
||||
recipe.servings_text = parse_servings_text(recipe_json['servings'])
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ class Plantoeat(Integration):
|
||||
)
|
||||
|
||||
if tags:
|
||||
tags = tags.replace('^',',')
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
@@ -78,7 +79,11 @@ class Plantoeat(Integration):
|
||||
current_recipe = ''
|
||||
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("windows-1250")
|
||||
try:
|
||||
line = fl.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
line = fl.decode("windows-1250")
|
||||
|
||||
if line.startswith('--------------'):
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
@@ -5,6 +5,7 @@ from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
@@ -60,7 +61,7 @@ class RecetteTek(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, food, note = ingredient_parser.parse(food)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.strip())
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
@@ -123,11 +124,13 @@ class RecetteTek(Integration):
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(image_file_name)), filetype=get_filetype(image_file_name))
|
||||
else:
|
||||
if file['originalPicture'] != '':
|
||||
response = requests.get(file['originalPicture'])
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
url = file['originalPicture']
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
except Exception as e:
|
||||
print(recipe.name, ': failed to import image ', str(e))
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ class RecipeKeeper(Integration):
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space,)
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find("div", {"itemprop": "recipeIngredients"}).findChildren("p"):
|
||||
@@ -51,13 +51,20 @@ class RecipeKeeper(Integration):
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=str(ingredient).replace('<p>', '').replace('</p>', ''), space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find("div", {"itemprop": "recipeDirections"}).find_all("p"):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text + ' \n'
|
||||
step.save()
|
||||
|
||||
for s in file.find("div", {"itemprop": "recipeNotes"}).find_all("p"):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text + ' \n'
|
||||
step.save()
|
||||
|
||||
if file.find("span", {"itemprop": "recipeSource"}).text != '':
|
||||
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("span", {"itemprop": "recipeSource"}).text
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
@@ -51,8 +52,10 @@ class RecipeSage(Integration):
|
||||
|
||||
if len(file['image']) > 0:
|
||||
try:
|
||||
response = requests.get(file['image'][0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = file['image'][0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
|
||||
72
cookbook/integration/rezeptsuitede.py
Normal file
72
cookbook/integration/rezeptsuitede.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import base64
|
||||
from io import BytesIO
|
||||
from xml import etree
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_time, parse_servings, parse_servings_text
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Recipe, Step, Keyword
|
||||
|
||||
|
||||
class Rezeptsuitede(Integration):
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
return etree.parse(file).getroot().getchildren()
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_xml = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_xml.find('head').attrib['title'].strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if recipe_xml.find('head').attrib['servingtype']:
|
||||
recipe.servings = parse_servings(recipe_xml.find('head').attrib['servingtype'].strip())
|
||||
recipe.servings_text = parse_servings_text(recipe_xml.find('head').attrib['servingtype'].strip())
|
||||
|
||||
if recipe_xml.find('remark') is not None: # description is a list of <li>'s with text
|
||||
if recipe_xml.find('remark').find('line') is not None:
|
||||
recipe.description = recipe_xml.find('remark').find('line').text[:512]
|
||||
|
||||
for prep in recipe_xml.findall('preparation'):
|
||||
try:
|
||||
if prep.find('step').text:
|
||||
step = Step.objects.create(
|
||||
instruction=prep.find('step').text.strip(), space=self.request.space,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
if recipe_xml.find('part').find('ingredient') is not None:
|
||||
ingredient_step = recipe.steps.first()
|
||||
if ingredient_step is None:
|
||||
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
|
||||
|
||||
for ingredient in recipe_xml.find('part').findall('ingredient'):
|
||||
f = ingredient_parser.get_food(ingredient.attrib['item'])
|
||||
u = ingredient_parser.get_unit(ingredient.attrib['unit'])
|
||||
amount, unit, note = ingredient_parser.parse_amount(ingredient.attrib['qty'])
|
||||
ingredient_step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, space=self.request.space, ))
|
||||
|
||||
try:
|
||||
k, created = Keyword.objects.get_or_create(name=recipe_xml.find('head').find('cat').text.strip(), space=self.request.space)
|
||||
recipe.keywords.add(k)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
recipe.save()
|
||||
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_xml.find('head').find('picbin').text)), filetype='.jpeg')
|
||||
except:
|
||||
pass
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -6,20 +6,22 @@
|
||||
# Translators:
|
||||
# Pavel Solař <pavelsolar86@gmail.com>, 2021
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-02-09 18:01+0100\n"
|
||||
"PO-Revision-Date: 2020-06-02 19:28+0000\n"
|
||||
"Last-Translator: Pavel Solař <pavelsolar86@gmail.com>, 2021\n"
|
||||
"Language-Team: Czech (https://www.transifex.com/django-recipes/teams/110507/cs/)\n"
|
||||
"PO-Revision-Date: 2023-01-08 17:55+0000\n"
|
||||
"Last-Translator: Joachim Weber <joachim.weber@gmx.de>\n"
|
||||
"Language-Team: Czech <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/cs/>\n"
|
||||
"Language: cs\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Language: cs\n"
|
||||
"Plural-Forms: nplurals=4; plural=(n == 1 && n % 1 == 0) ? 0 : (n >= 2 && n <= 4 && n % 1 == 0) ? 1: (n % 1 != 0 ) ? 2 : 3;\n"
|
||||
"Plural-Forms: nplurals=4; plural=(n == 1 && n % 1 == 0) ? 0 : (n >= 2 && n "
|
||||
"<= 4 && n % 1 == 0) ? 1: (n % 1 != 0 ) ? 2 : 3;\n"
|
||||
"X-Generator: Weblate 4.15\n"
|
||||
|
||||
#: .\cookbook\filters.py:22 .\cookbook\templates\base.html:87
|
||||
#: .\cookbook\templates\forms\edit_internal_recipe.html:219
|
||||
@@ -173,7 +175,7 @@ msgstr "Potravina, která by měla být nahrazena."
|
||||
|
||||
#: .\cookbook\forms.py:198
|
||||
msgid "Add your comment: "
|
||||
msgstr "Přidat vlastní komentář:"
|
||||
msgstr "Přidat vlastní komentář: "
|
||||
|
||||
#: .\cookbook\forms.py:229
|
||||
msgid "Leave empty for dropbox and enter app password for nextcloud."
|
||||
|
||||
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2983
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
2983
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/el/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/el/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2610
cookbook/locale/el/LC_MESSAGES/django.po
Normal file
2610
cookbook/locale/el/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -11,8 +11,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-02-09 18:01+0100\n"
|
||||
"PO-Revision-Date: 2021-10-13 12:50+0000\n"
|
||||
"Last-Translator: Hrachya Kocharyan <hkocharyan@ctemplar.com>\n"
|
||||
"PO-Revision-Date: 2023-01-08 17:55+0000\n"
|
||||
"Last-Translator: Joachim Weber <joachim.weber@gmx.de>\n"
|
||||
"Language-Team: Armenian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/hy/>\n"
|
||||
"Language: hy\n"
|
||||
@@ -20,7 +20,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
"X-Generator: Weblate 4.15\n"
|
||||
|
||||
#: .\cookbook\filters.py:22 .\cookbook\templates\base.html:87
|
||||
#: .\cookbook\templates\forms\edit_internal_recipe.html:219
|
||||
@@ -410,7 +410,7 @@ msgstr "Դուրս գալ"
|
||||
|
||||
#: .\cookbook\templates\account\logout.html:11
|
||||
msgid "Are you sure you want to sign out?"
|
||||
msgstr "Համոզվա՞ծ եք, որ ցանկանում եք դուրս գալ:"
|
||||
msgstr "Համոզվա՞ծ եք, որ ցանկանում եք դուրս գալ՞"
|
||||
|
||||
#: .\cookbook\templates\account\password_reset.html:5
|
||||
#: .\cookbook\templates\account\password_reset_done.html:5
|
||||
|
||||
BIN
cookbook/locale/id/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/id/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2647
cookbook/locale/id/LC_MESSAGES/django.po
Normal file
2647
cookbook/locale/id/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-09-13 22:40+0200\n"
|
||||
"PO-Revision-Date: 2022-04-07 19:32+0000\n"
|
||||
"Last-Translator: Artem Aksenov <artemmillerr@gmail.com>\n"
|
||||
"PO-Revision-Date: 2022-11-30 19:09+0000\n"
|
||||
"Last-Translator: Alex <kovsharoff@gmail.com>\n"
|
||||
"Language-Team: Russian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/ru/>\n"
|
||||
"Language: ru\n"
|
||||
@@ -18,7 +18,7 @@ msgstr ""
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
|
||||
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
|
||||
"X-Generator: Weblate 4.10.1\n"
|
||||
"X-Generator: Weblate 4.14.1\n"
|
||||
|
||||
#: .\cookbook\filters.py:23 .\cookbook\templates\base.html:125
|
||||
#: .\cookbook\templates\forms\ingredients.html:34
|
||||
@@ -396,8 +396,9 @@ msgstr ""
|
||||
#: .\cookbook\templates\include\log_cooking.html:16
|
||||
#: .\cookbook\templates\url_import.html:224
|
||||
#: .\cookbook\templates\url_import.html:455
|
||||
#, fuzzy
|
||||
msgid "Servings"
|
||||
msgstr ""
|
||||
msgstr "Порции"
|
||||
|
||||
#: .\cookbook\integration\safron.py:25
|
||||
msgid "Waiting time"
|
||||
@@ -468,7 +469,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:198 .\cookbook\templates\base.html:90
|
||||
msgid "Books"
|
||||
msgstr ""
|
||||
msgstr "Книги"
|
||||
|
||||
#: .\cookbook\models.py:206
|
||||
msgid "Small"
|
||||
|
||||
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
2621
cookbook/locale/tr/id/LC_MESSAGES/django.po
Normal file
2621
cookbook/locale/tr/id/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/uk/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/uk/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2626
cookbook/locale/uk/LC_MESSAGES/django.po
Normal file
2626
cookbook/locale/uk/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
8
cookbook/management/commands/export.py
Normal file
8
cookbook/management/commands/export.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from django.core.management.commands.dumpdata import Command as DumpdataCommand
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
|
||||
class Command(DumpdataCommand):
|
||||
def handle(self, *args, **options):
|
||||
with scopes_disabled():
|
||||
return super().handle(*args, **options)
|
||||
8
cookbook/management/commands/import.py
Normal file
8
cookbook/management/commands/import.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from django.core.management.commands.loaddata import Command as LoaddataCommand
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
|
||||
class Command(LoaddataCommand):
|
||||
def handle(self, *args, **options):
|
||||
with scopes_disabled():
|
||||
return super().handle(*args, **options)
|
||||
@@ -4,11 +4,12 @@ from django.db import migrations
|
||||
|
||||
|
||||
def create_default_space(apps, schema_editor):
|
||||
Space = apps.get_model('cookbook', 'Space')
|
||||
Space.objects.create(
|
||||
name='Default',
|
||||
message=''
|
||||
)
|
||||
# Space = apps.get_model('cookbook', 'Space')
|
||||
# Space.objects.create(
|
||||
# name='Default',
|
||||
# message=''
|
||||
# )
|
||||
pass # Beginning with the multi space tenancy version (~something around 1.3) a default space is no longer needed as the first user can create it after setup
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -9,8 +9,15 @@ from django.utils import translation
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
from cookbook.models import (Index, PermissionModelMixin, Recipe, Step, allSearchFields,
|
||||
nameSearchField)
|
||||
from cookbook.models import (Index, PermissionModelMixin, Recipe, Step, SearchFields)
|
||||
|
||||
|
||||
def allSearchFields():
|
||||
return list(SearchFields.objects.values_list('id', flat=True))
|
||||
|
||||
|
||||
def nameSearchField():
|
||||
return [SearchFields.objects.get(name='Name').id]
|
||||
|
||||
|
||||
def set_default_search_vector(apps, schema_editor):
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from django.db import migrations, models
|
||||
from cookbook.models import nameSearchField
|
||||
|
||||
from cookbook.models import SearchFields
|
||||
|
||||
|
||||
def nameSearchField():
|
||||
return [SearchFields.objects.get(name='Name').id]
|
||||
|
||||
|
||||
def add_default_trigram(apps, schema_editor):
|
||||
|
||||
45
cookbook/migrations/0174_alter_food_substitute_userspace.py
Normal file
45
cookbook/migrations/0174_alter_food_substitute_userspace.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Generated by Django 4.0.4 on 2022-05-31 14:10
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
|
||||
def migrate_space_permissions(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
UserPreference = apps.get_model('cookbook', 'UserPreference')
|
||||
UserSpace = apps.get_model('cookbook', 'UserSpace')
|
||||
|
||||
for up in UserPreference.objects.exclude(space=None).all():
|
||||
us = UserSpace.objects.create(user=up.user, space=up.space, active=True)
|
||||
us.groups.set(up.user.groups.all())
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('auth', '0012_alter_user_first_name_max_length'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('cookbook', '0173_recipe_source_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='food',
|
||||
name='substitute',
|
||||
field=models.ManyToManyField(blank=True, to='cookbook.food'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UserSpace',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('active', models.BooleanField(default=False)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('groups', models.ManyToManyField(to='auth.group')),
|
||||
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.RunPython(migrate_space_permissions)
|
||||
]
|
||||
17
cookbook/migrations/0175_remove_userpreference_space.py
Normal file
17
cookbook/migrations/0175_remove_userpreference_space.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.0.4 on 2022-05-31 14:56
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0174_alter_food_substitute_userspace'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='userpreference',
|
||||
name='space',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.0.4 on 2022-06-14 14:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0175_remove_userpreference_space'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='searchpreference',
|
||||
name='icontains',
|
||||
field=models.ManyToManyField(blank=True, related_name='icontains_fields', to='cookbook.searchfields'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='searchpreference',
|
||||
name='trigram',
|
||||
field=models.ManyToManyField(blank=True, related_name='trigram_fields', to='cookbook.searchfields'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='searchpreference',
|
||||
name='unaccent',
|
||||
field=models.ManyToManyField(blank=True, related_name='unaccent_fields', to='cookbook.searchfields'),
|
||||
),
|
||||
]
|
||||
18
cookbook/migrations/0177_recipe_show_ingredient_overview.py
Normal file
18
cookbook/migrations/0177_recipe_show_ingredient_overview.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.0.4 on 2022-06-26 10:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0176_alter_searchpreference_icontains_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recipe',
|
||||
name='show_ingredient_overview',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user