mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-25 11:19:39 -05:00
Compare commits
957 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16c0189b80 | ||
|
|
36c30f9e11 | ||
|
|
12a8582a9a | ||
|
|
13b91e5b91 | ||
|
|
d02b253242 | ||
|
|
16528c4c89 | ||
|
|
6442e174b3 | ||
|
|
fd325c1797 | ||
|
|
12491d1302 | ||
|
|
b7a4613310 | ||
|
|
39f5fca89b | ||
|
|
2902262503 | ||
|
|
b49393357a | ||
|
|
cc1a69eac0 | ||
|
|
13d498658c | ||
|
|
cad93b2dd1 | ||
|
|
f0b8bac221 | ||
|
|
13ef843edb | ||
|
|
902ef3cd1e | ||
|
|
0b69bcddcc | ||
|
|
9089fc7ad3 | ||
|
|
6d866ae62b | ||
|
|
9fa82c2ddb | ||
|
|
0ca29cd677 | ||
|
|
54c9e200a0 | ||
|
|
fc67525dcb | ||
|
|
37e292cab9 | ||
|
|
e391abd23d | ||
|
|
947986277a | ||
|
|
b2a10f269c | ||
|
|
dc076d25d6 | ||
|
|
845408244b | ||
|
|
e06c82297d | ||
|
|
459be74a7c | ||
|
|
37e81275b5 | ||
|
|
8417b0ec3f | ||
|
|
7d834ee088 | ||
|
|
eb119b7443 | ||
|
|
cc342cbae3 | ||
|
|
75ae26fd28 | ||
|
|
94f58f4608 | ||
|
|
5478a8d49a | ||
|
|
23180622e8 | ||
|
|
62187fbbdf | ||
|
|
bd6b04f95e | ||
|
|
b315d6e171 | ||
|
|
35bb3c9eb1 | ||
|
|
84e7850e91 | ||
|
|
4b40d75d1d | ||
|
|
5423019a14 | ||
|
|
e8c5c610b7 | ||
|
|
3f0cef59b8 | ||
|
|
867c3595ff | ||
|
|
631dd58c1f | ||
|
|
ba235b26b7 | ||
|
|
e54e850241 | ||
|
|
40c85c512c | ||
|
|
ca5eb7b2b6 | ||
|
|
574a6ab5f4 | ||
|
|
39070d32bd | ||
|
|
9aa3d2d87a | ||
|
|
02926516b9 | ||
|
|
215f561623 | ||
|
|
e2c2f5d757 | ||
|
|
d887405ab3 | ||
|
|
00deb75195 | ||
|
|
b228b0f42a | ||
|
|
3d5ff23433 | ||
|
|
1a24f34499 | ||
|
|
8459b40743 | ||
|
|
75cb5d2d4c | ||
|
|
bd1b40dd94 | ||
|
|
95d4bfb2bd | ||
|
|
23caac9d09 | ||
|
|
ece4f6e32d | ||
|
|
5e7d1ba827 | ||
|
|
a88214eea6 | ||
|
|
7ec5646338 | ||
|
|
c020bea41e | ||
|
|
e6f79a6fa3 | ||
|
|
0ab430ea82 | ||
|
|
3d95657b8a | ||
|
|
726157a062 | ||
|
|
f8793f3ec8 | ||
|
|
09929beeb9 | ||
|
|
2a1b2c18fc | ||
|
|
0cc3df71d2 | ||
|
|
e124c211ac | ||
|
|
dc2f62dc9d | ||
|
|
38921f1254 | ||
|
|
4fec9a493e | ||
|
|
71c5adda79 | ||
|
|
cffa731106 | ||
|
|
c7f75fe58f | ||
|
|
2eed5143fe | ||
|
|
6e4ea518d9 | ||
|
|
a898d722d6 | ||
|
|
904358bb00 | ||
|
|
6605b87c5c | ||
|
|
64688ca5e1 | ||
|
|
e9a1a06bda | ||
|
|
a8da28f877 | ||
|
|
70b2bd6ccf | ||
|
|
8ed5d52ddf | ||
|
|
f7af0741fe | ||
|
|
3ec4afb02f | ||
|
|
3f77b73a61 | ||
|
|
9e62d8a3a3 | ||
|
|
9ef21241bf | ||
|
|
5e77adf7e6 | ||
|
|
4df0a46701 | ||
|
|
f186404628 | ||
|
|
8e3ec91f3c | ||
|
|
2605addf34 | ||
|
|
1ab3e57b83 | ||
|
|
2f36ae5112 | ||
|
|
acc19ca65e | ||
|
|
ea213e2dfd | ||
|
|
02cf3264a3 | ||
|
|
a0b1186558 | ||
|
|
27e47718bb | ||
|
|
f78dd209bd | ||
|
|
b4e0b51f5b | ||
|
|
eedce4dcfd | ||
|
|
006be92180 | ||
|
|
1fae004785 | ||
|
|
239a88cd24 | ||
|
|
22b432a6ae | ||
|
|
c88566a4ae | ||
|
|
5f8e371793 | ||
|
|
94d9ac03ea | ||
|
|
897ac97423 | ||
|
|
24aeae6de9 | ||
|
|
ce941db3be | ||
|
|
5ff91ee47f | ||
|
|
ce1f55ffd1 | ||
|
|
8700e2df69 | ||
|
|
f4df84b609 | ||
|
|
ba473123ba | ||
|
|
98a54ef38f | ||
|
|
7fdc9c7cb8 | ||
|
|
dc3b1566d7 | ||
|
|
5429c4d557 | ||
|
|
dabcea6ba7 | ||
|
|
e91790f5ac | ||
|
|
51076d4ced | ||
|
|
1cb37fe2d2 | ||
|
|
61a9f0647b | ||
|
|
ac2ab62050 | ||
|
|
c50efac00e | ||
|
|
bf16e61a1f | ||
|
|
d464633c70 | ||
|
|
b78d0ec30b | ||
|
|
da09602834 | ||
|
|
5ead4967a5 | ||
|
|
8bb7ce2062 | ||
|
|
0068c75e31 | ||
|
|
5de7fa9d48 | ||
|
|
3dc3592783 | ||
|
|
43a082a51a | ||
|
|
4c264673df | ||
|
|
d537d73c6a | ||
|
|
5c227ecc57 | ||
|
|
b03fa4fdf2 | ||
|
|
38219a22ca | ||
|
|
9d6a5efa72 | ||
|
|
aaa0520a6d | ||
|
|
eb0f231a80 | ||
|
|
17f3da5a37 | ||
|
|
608039b7e4 | ||
|
|
bb424cc3d6 | ||
|
|
9eaf0f9530 | ||
|
|
b44bb552e0 | ||
|
|
c86ff27bef | ||
|
|
be6bb5f039 | ||
|
|
e40b73f420 | ||
|
|
5b3d8a3aaf | ||
|
|
fd4051c04a | ||
|
|
5f0fa24c2a | ||
|
|
0d676e1957 | ||
|
|
8539e365fb | ||
|
|
dd25ea748b | ||
|
|
2b74c1c47b | ||
|
|
9961746f1f | ||
|
|
b1c0334947 | ||
|
|
25a41bd293 | ||
|
|
ac25beddda | ||
|
|
9d38186404 | ||
|
|
51d9ffbb4e | ||
|
|
e23d514d89 | ||
|
|
0697116a21 | ||
|
|
165bb00040 | ||
|
|
07947199c0 | ||
|
|
92aadb4743 | ||
|
|
bd89de6f4d | ||
|
|
dca567b5fa | ||
|
|
a015c2f566 | ||
|
|
946de2e7e3 | ||
|
|
690c486bb2 | ||
|
|
d9d0676bed | ||
|
|
9c8a410e50 | ||
|
|
22296de03c | ||
|
|
9228f9c339 | ||
|
|
609d4477ae | ||
|
|
fc0cfe3133 | ||
|
|
ebe1aa33f3 | ||
|
|
e451adbfdb | ||
|
|
852c64d9f4 | ||
|
|
d96b6dbefb | ||
|
|
fbbd57a4ae | ||
|
|
e5cc990111 | ||
|
|
78482313d8 | ||
|
|
5b601695e1 | ||
|
|
d6a67335ed | ||
|
|
4f7b68c882 | ||
|
|
1359782d30 | ||
|
|
966bd5fd33 | ||
|
|
759264e1d5 | ||
|
|
65c0f3d786 | ||
|
|
f1535b4b54 | ||
|
|
fcb5f51a78 | ||
|
|
50fc98dcb3 | ||
|
|
c1a1fe1047 | ||
|
|
b3a60f70ff | ||
|
|
5ee0033983 | ||
|
|
a28e653386 | ||
|
|
8d568526be | ||
|
|
6643008baf | ||
|
|
44a19e03e8 | ||
|
|
b5207d91ba | ||
|
|
f1d54675c3 | ||
|
|
b057b6006c | ||
|
|
f92a62da4e | ||
|
|
27c695a393 | ||
|
|
7945c1a431 | ||
|
|
7c31cb9993 | ||
|
|
f6d46cae79 | ||
|
|
0e49615a40 | ||
|
|
534792cd1a | ||
|
|
89891d11d7 | ||
|
|
cb86ece0c4 | ||
|
|
e7976fab46 | ||
|
|
11cb40702d | ||
|
|
877b7799df | ||
|
|
7f35d462b6 | ||
|
|
fff5c2387b | ||
|
|
3de6f6faf8 | ||
|
|
e6f80497a9 | ||
|
|
6c122e4ed0 | ||
|
|
1836056bd1 | ||
|
|
cdc95d8fd2 | ||
|
|
d3c3339d5e | ||
|
|
50992d3411 | ||
|
|
2ae3e7f5b9 | ||
|
|
99ea193237 | ||
|
|
3427f7bbea | ||
|
|
44704be22b | ||
|
|
356dec636f | ||
|
|
355f181574 | ||
|
|
b365f9adc1 | ||
|
|
af1282c268 | ||
|
|
9ccceb2199 | ||
|
|
bdea6440a8 | ||
|
|
4e1f147b37 | ||
|
|
30d0d340c2 | ||
|
|
d99748638d | ||
|
|
889089b4cd | ||
|
|
e7de6f62b6 | ||
|
|
f02eac8ac6 | ||
|
|
b30c282a13 | ||
|
|
c2135de941 | ||
|
|
e2890d1363 | ||
|
|
0dc2abbdd8 | ||
|
|
deeed4b65b | ||
|
|
30e4ee855c | ||
|
|
2ab1560aed | ||
|
|
d5b7d440fe | ||
|
|
df88b1ddd2 | ||
|
|
e368488933 | ||
|
|
37f0f7a0b7 | ||
|
|
e2b887b449 | ||
|
|
d653b8f9e6 | ||
|
|
b8f74af41c | ||
|
|
07f78bb7b8 | ||
|
|
1679d820a7 | ||
|
|
b3bbb0b156 | ||
|
|
8afd73394d | ||
|
|
2afab2aec8 | ||
|
|
f41e4c62d4 | ||
|
|
c804064155 | ||
|
|
eeee36844d | ||
|
|
d4d4495f2b | ||
|
|
0eee2d8ba5 | ||
|
|
fbfe00bfec | ||
|
|
6e70bc3b5d | ||
|
|
2dde6ae663 | ||
|
|
9bda43a670 | ||
|
|
ba49535bba | ||
|
|
21b69f06e5 | ||
|
|
1390b01f07 | ||
|
|
b9dc0da249 | ||
|
|
6615bbb532 | ||
|
|
b8ef6b3888 | ||
|
|
1fc9f74e60 | ||
|
|
ded092ed23 | ||
|
|
007b7294d9 | ||
|
|
b3fcfdfc96 | ||
|
|
151461508f | ||
|
|
9affc583a3 | ||
|
|
cac72df7ba | ||
|
|
a6c81d8168 | ||
|
|
7aa3e49e8c | ||
|
|
ab378f5332 | ||
|
|
24b0d7f8e6 | ||
|
|
5561a9621b | ||
|
|
3512ab7515 | ||
|
|
12df4abc80 | ||
|
|
f768aef0b9 | ||
|
|
a6dcef4467 | ||
|
|
699a6a42be | ||
|
|
9380e376c6 | ||
|
|
04dd51d089 | ||
|
|
fe19f81bcd | ||
|
|
e3711f121f | ||
|
|
075fd2fd9b | ||
|
|
662cfde691 | ||
|
|
0a6264228a | ||
|
|
3f67c6d220 | ||
|
|
4fa6fc10f8 | ||
|
|
ada811a868 | ||
|
|
00132c6afa | ||
|
|
da1af693a2 | ||
|
|
a9d9c03cc0 | ||
|
|
0e1522db12 | ||
|
|
df04d6b58d | ||
|
|
b098b6d148 | ||
|
|
24bc6a1a8a | ||
|
|
64abf6d785 | ||
|
|
f156722456 | ||
|
|
d641a3fa48 | ||
|
|
29760defd0 | ||
|
|
5eb822089d | ||
|
|
53414ec1df | ||
|
|
cd3f0ba739 | ||
|
|
74152d2409 | ||
|
|
8f7a4a31c8 | ||
|
|
c5f11c1625 | ||
|
|
f0eb295696 | ||
|
|
c86fe0690b | ||
|
|
2f088d03bb | ||
|
|
1195dd445a | ||
|
|
71c5835909 | ||
|
|
423e042492 | ||
|
|
37c87b4375 | ||
|
|
43666e1f63 | ||
|
|
d5dd64acd2 | ||
|
|
b0c3ea0934 | ||
|
|
ab446590a2 | ||
|
|
3478ff17f8 | ||
|
|
932e7c8912 | ||
|
|
6e7a769d79 | ||
|
|
8602de6ce1 | ||
|
|
faee71943f | ||
|
|
0e2a27ad41 | ||
|
|
c064e8970f | ||
|
|
b8ef7ae39d | ||
|
|
702f3de061 | ||
|
|
f278d7f6d0 | ||
|
|
221c1b523b | ||
|
|
d00b6a2e85 | ||
|
|
e03c285f14 | ||
|
|
0ec1f334c0 | ||
|
|
b637cbeabc | ||
|
|
943f873855 | ||
|
|
49cebb400a | ||
|
|
0ed9225c05 | ||
|
|
991fbdad3a | ||
|
|
34e95cd25b | ||
|
|
8bc06fef34 | ||
|
|
6aba5c3661 | ||
|
|
876bd49ee5 | ||
|
|
9435c5a380 | ||
|
|
94c915e23a | ||
|
|
9ac8641c13 | ||
|
|
0fe06cf2df | ||
|
|
f872f994f1 | ||
|
|
87c2ff73e8 | ||
|
|
27bb4c9bb8 | ||
|
|
c10e0fd7bc | ||
|
|
48d302020c | ||
|
|
b31b3ccd23 | ||
|
|
ac647c5ee8 | ||
|
|
3cec891aa1 | ||
|
|
3633b9724b | ||
|
|
420b5c093f | ||
|
|
9bb55dd746 | ||
|
|
1759ad3587 | ||
|
|
956693b7ca | ||
|
|
7b2117c019 | ||
|
|
d48fe26a35 | ||
|
|
7fd5fca0cf | ||
|
|
37e215a4ea | ||
|
|
ab5400efad | ||
|
|
258ecd476c | ||
|
|
378938812c | ||
|
|
60b494abeb | ||
|
|
34be1dc1d7 | ||
|
|
d89a4620f0 | ||
|
|
dea83b5720 | ||
|
|
d9ebe3e0fb | ||
|
|
135dde247f | ||
|
|
eb7a667202 | ||
|
|
b3cc9967f5 | ||
|
|
7276cea3d5 | ||
|
|
c0c996622e | ||
|
|
5556555bca | ||
|
|
55a84494c9 | ||
|
|
74d778dcb8 | ||
|
|
156d68f1b8 | ||
|
|
cb59a6340d | ||
|
|
5eb013cc2f | ||
|
|
dafb26b500 | ||
|
|
d9416a42dc | ||
|
|
ad88eff9e3 | ||
|
|
4d4f623adf | ||
|
|
ac9c9cd4e3 | ||
|
|
580eeef6b7 | ||
|
|
f25f5a26cf | ||
|
|
972d43c2a2 | ||
|
|
2a7475c435 | ||
|
|
71b41efe6c | ||
|
|
33a7fee1cc | ||
|
|
fa7fb644ea | ||
|
|
13b996171a | ||
|
|
77bb3870bf | ||
|
|
9863303a5e | ||
|
|
0caccc3da8 | ||
|
|
b75427b86d | ||
|
|
054c4ec61a | ||
|
|
8da21f9914 | ||
|
|
99ba512862 | ||
|
|
eab59fcbd8 | ||
|
|
484da2200e | ||
|
|
330bb6d954 | ||
|
|
d4b6c8da04 | ||
|
|
a5ef438cfe | ||
|
|
de196c716b | ||
|
|
cb248a1f19 | ||
|
|
df2f1b2b7c | ||
|
|
36e26d8009 | ||
|
|
a5973de02b | ||
|
|
68f272bc25 | ||
|
|
b66a5c1ee9 | ||
|
|
bfc42638a4 | ||
|
|
a8c9689b43 | ||
|
|
26ff3f56ea | ||
|
|
a49993e399 | ||
|
|
9f42226224 | ||
|
|
8f4c00df0b | ||
|
|
6cebec86c5 | ||
|
|
8f5b017857 | ||
|
|
9915a3eebf | ||
|
|
19c2d3bcf1 | ||
|
|
9259f306ec | ||
|
|
483bc8f1b7 | ||
|
|
4f33101319 | ||
|
|
3cef470134 | ||
|
|
ba493e3e19 | ||
|
|
93c53e5fc8 | ||
|
|
d931feadf5 | ||
|
|
fe32ff15b3 | ||
|
|
a44dea64b8 | ||
|
|
54af76e9cf | ||
|
|
fcfef255c1 | ||
|
|
2914c20522 | ||
|
|
825b7b7cf9 | ||
|
|
b9fb78c24d | ||
|
|
2fbce7d84d | ||
|
|
69a23f34b4 | ||
|
|
9f90306f6c | ||
|
|
1fb6f96571 | ||
|
|
0b8dd63510 | ||
|
|
b79bc0d9a8 | ||
|
|
8149192455 | ||
|
|
66c0cc070a | ||
|
|
e2ab3a0efb | ||
|
|
e0b7d1a8f0 | ||
|
|
012a1a7915 | ||
|
|
2af36a3db4 | ||
|
|
8df3009cb2 | ||
|
|
161ae9879a | ||
|
|
71a60a46be | ||
|
|
93acac1f3b | ||
|
|
b4ebd98ee8 | ||
|
|
78c0c5c213 | ||
|
|
30d5587fbe | ||
|
|
e4223787be | ||
|
|
3850287deb | ||
|
|
7fae95e248 | ||
|
|
b037203b8f | ||
|
|
9b132e71f2 | ||
|
|
1a21659b5e | ||
|
|
1a1dd092d0 | ||
|
|
9adc1f7266 | ||
|
|
6953f763d2 | ||
|
|
4ecf77f431 | ||
|
|
c4f5b160a6 | ||
|
|
d8f6dbc58f | ||
|
|
8d3747a304 | ||
|
|
1740913a14 | ||
|
|
3cf0395a18 | ||
|
|
42dfc9d126 | ||
|
|
d7bd731c73 | ||
|
|
9e86abb004 | ||
|
|
dc8ce0f6a4 | ||
|
|
2ddb0c719a | ||
|
|
05383a2bc3 | ||
|
|
48c0252893 | ||
|
|
82fd6f1860 | ||
|
|
694022506d | ||
|
|
45a86a22e3 | ||
|
|
1100826ed8 | ||
|
|
d1065c8ac4 | ||
|
|
2fdd9edde1 | ||
|
|
a84ab0c049 | ||
|
|
d9dd0a594e | ||
|
|
f0d59a8c9c | ||
|
|
d50fb69ce9 | ||
|
|
8bc13fc91f | ||
|
|
2ce06a8154 | ||
|
|
0a0c0b069f | ||
|
|
47b62aa390 | ||
|
|
9a2f91d3d4 | ||
|
|
2df940ee40 | ||
|
|
67a5d8f1bd | ||
|
|
297a8d4c8b | ||
|
|
976bce5fdd | ||
|
|
8c89438b97 | ||
|
|
7ca7bd6111 | ||
|
|
095befd9b7 | ||
|
|
3159868ba4 | ||
|
|
7befa4a084 | ||
|
|
2ee96c2ea4 | ||
|
|
74b67e5549 | ||
|
|
1e24161d4c | ||
|
|
a839fb0bfc | ||
|
|
46267a135b | ||
|
|
f7ab0400a3 | ||
|
|
74863117c5 | ||
|
|
e872272fbd | ||
|
|
9ae7d591cc | ||
|
|
c3c697f4a8 | ||
|
|
60e8b95593 | ||
|
|
1636710099 | ||
|
|
4296c3d136 | ||
|
|
bcda5eea93 | ||
|
|
a63ede0e3a | ||
|
|
a9414065b5 | ||
|
|
fa79adf931 | ||
|
|
cd5f752d26 | ||
|
|
6a41b182f5 | ||
|
|
0011ce26d3 | ||
|
|
1ef92df83c | ||
|
|
3e8ef33402 | ||
|
|
4d4c3bea92 | ||
|
|
9ecbfb0655 | ||
|
|
944492168e | ||
|
|
1c39befa0f | ||
|
|
57ec6a2b3d | ||
|
|
3653d6b911 | ||
|
|
82c2cc0f40 | ||
|
|
48e9f3f8a9 | ||
|
|
12865437d7 | ||
|
|
090e18e405 | ||
|
|
7db49b1528 | ||
|
|
85aad42529 | ||
|
|
9c254be4b5 | ||
|
|
5bd9a15e4b | ||
|
|
3cedab45ee | ||
|
|
56f3fe2d12 | ||
|
|
a2954554b5 | ||
|
|
528ada7d32 | ||
|
|
b7e6e7b1b0 | ||
|
|
e17da08a74 | ||
|
|
32cedf1078 | ||
|
|
60bb3fd4aa | ||
|
|
f421990ae0 | ||
|
|
f9333d2b82 | ||
|
|
dfa5475ecb | ||
|
|
b6e5425bd3 | ||
|
|
1b7347f1d9 | ||
|
|
2ef23d2cb3 | ||
|
|
bba81f6594 | ||
|
|
3a4f08f2f7 | ||
|
|
f8ad465113 | ||
|
|
6df993ce29 | ||
|
|
6009eae42d | ||
|
|
7bed9963ff | ||
|
|
a0610ac05f | ||
|
|
afd063a2b9 | ||
|
|
6b92dcbb2a | ||
|
|
12af99f546 | ||
|
|
68501d646d | ||
|
|
d215d236f0 | ||
|
|
459cf79ef3 | ||
|
|
1d357eca4e | ||
|
|
ca8a7c3bc9 | ||
|
|
3b936eca3f | ||
|
|
2f06e9bc1c | ||
|
|
e25c0705c6 | ||
|
|
d4e9526c75 | ||
|
|
2e2e81638b | ||
|
|
baf5c9700f | ||
|
|
dff7daefc7 | ||
|
|
7e27d704ca | ||
|
|
9722f22837 | ||
|
|
707a12f8c1 | ||
|
|
b9b8864631 | ||
|
|
31bdc0589e | ||
|
|
cb29caf88c | ||
|
|
dd044eba36 | ||
|
|
b8518884b0 | ||
|
|
ed20a54137 | ||
|
|
7781bf1444 | ||
|
|
14db4179b9 | ||
|
|
37eab3ece2 | ||
|
|
768b483351 | ||
|
|
59d277da3d | ||
|
|
f68fd0fa94 | ||
|
|
dd1fcc21e0 | ||
|
|
f445722140 | ||
|
|
a3def6bf4c | ||
|
|
cbd2ac2032 | ||
|
|
1d3d4e78f5 | ||
|
|
0c841ec686 | ||
|
|
f875942e79 | ||
|
|
e901c6708c | ||
|
|
c5863a5309 | ||
|
|
4cf8b72e3f | ||
|
|
4dd3ba29b6 | ||
|
|
96bd66f9e6 | ||
|
|
70716bf99f | ||
|
|
16449cd078 | ||
|
|
5459e293d1 | ||
|
|
749713d698 | ||
|
|
4a5af48f33 | ||
|
|
5cf58a32dc | ||
|
|
a956888355 | ||
|
|
cb835b033c | ||
|
|
b9289e2685 | ||
|
|
c0bd0d49ae | ||
|
|
36fbbed1b0 | ||
|
|
34f70e4ba7 | ||
|
|
8bc361ee7c | ||
|
|
7426bb4e76 | ||
|
|
92b536b32c | ||
|
|
5627161c5e | ||
|
|
091fab154a | ||
|
|
c6e11f6ef2 | ||
|
|
16b5cd75b1 | ||
|
|
f040b491d4 | ||
|
|
8174da31e8 | ||
|
|
53518f4c47 | ||
|
|
693d829946 | ||
|
|
5ab11eb1bc | ||
|
|
486d197854 | ||
|
|
332f518774 | ||
|
|
ed33114947 | ||
|
|
aeb38cd2e9 | ||
|
|
17cf5f48a1 | ||
|
|
22ca482458 | ||
|
|
2565ab30a4 | ||
|
|
1caabef56a | ||
|
|
edde8c8b8f | ||
|
|
6d8fe3c162 | ||
|
|
bdccdf0893 | ||
|
|
f0927bf065 | ||
|
|
ed1fb9a95e | ||
|
|
c950e6dabb | ||
|
|
e27a64f52c | ||
|
|
f02b6f29db | ||
|
|
a3ba516587 | ||
|
|
a15f5895f5 | ||
|
|
c6e36b802c | ||
|
|
f91ca28638 | ||
|
|
cfcf354d59 | ||
|
|
6813077113 | ||
|
|
581ee762b6 | ||
|
|
08c1a8f2ab | ||
|
|
ba25e463ad | ||
|
|
f2211da9bb | ||
|
|
030d977a8c | ||
|
|
8d3419952c | ||
|
|
8010ee3dca | ||
|
|
0f3d82a5e6 | ||
|
|
5e67e735db | ||
|
|
f9f6ca05a4 | ||
|
|
d7379d8ab6 | ||
|
|
51f44e4926 | ||
|
|
447a16dcd5 | ||
|
|
f15922ceb5 | ||
|
|
a5cc218b56 | ||
|
|
640d7305c7 | ||
|
|
fdf98932e2 | ||
|
|
29e44e7101 | ||
|
|
2993c7e688 | ||
|
|
d6db3fe65b | ||
|
|
36099b80ab | ||
|
|
9a795a7f60 | ||
|
|
7ef67cdadd | ||
|
|
55539c83c9 | ||
|
|
c5df241ec3 | ||
|
|
8ea882e5b3 | ||
|
|
aaecd479ad | ||
|
|
65432895ba | ||
|
|
b730acc06a | ||
|
|
6b430abe3a | ||
|
|
5f25df7d19 | ||
|
|
e55f78c767 | ||
|
|
6d257d2455 | ||
|
|
693a5214ef | ||
|
|
cebd47a639 | ||
|
|
1e3a6fadf0 | ||
|
|
47723673d0 | ||
|
|
429d381f63 | ||
|
|
b393d026f7 | ||
|
|
09804de809 | ||
|
|
9ecb087cd6 | ||
|
|
089e634d69 | ||
|
|
028ee4a861 | ||
|
|
561c2106ce | ||
|
|
429607e1fe | ||
|
|
802242f54e | ||
|
|
cd58b6681b | ||
|
|
c7f5975c22 | ||
|
|
03e32dba90 | ||
|
|
739c2ecc53 | ||
|
|
56c553c35d | ||
|
|
dae49ec5f3 | ||
|
|
90d477a0fd | ||
|
|
fc6268b7ff | ||
|
|
425a38f030 | ||
|
|
d94e0523b0 | ||
|
|
e824c9bff2 | ||
|
|
38ad246dc1 | ||
|
|
1bde4e81b7 | ||
|
|
18e53aa03f | ||
|
|
e0b9b9caa2 | ||
|
|
25db5946bf | ||
|
|
715e301a4d | ||
|
|
581f950e33 | ||
|
|
97eadfc39a | ||
|
|
f7a3e2371a | ||
|
|
391c45b8be | ||
|
|
95b63d882b | ||
|
|
b2fa1db4f9 | ||
|
|
3b0b756a30 | ||
|
|
896be70a77 | ||
|
|
bedbc255b3 | ||
|
|
c8dcca8630 | ||
|
|
05305c46ca | ||
|
|
6d5195f0d3 | ||
|
|
abeeac838b | ||
|
|
f2c543ac15 | ||
|
|
081edfd2d6 | ||
|
|
f9a4521ca1 | ||
|
|
a7d66fa850 | ||
|
|
b9597a3333 | ||
|
|
5424702dff | ||
|
|
9b5a9b87e9 | ||
|
|
52c16ab7dd | ||
|
|
0d98c77301 | ||
|
|
e52054e732 | ||
|
|
e04d672750 | ||
|
|
c06c511dc9 | ||
|
|
0ae1ecd867 | ||
|
|
c8fc67fa2b | ||
|
|
89348f69f1 | ||
|
|
c1605454dd | ||
|
|
55b035eaaa | ||
|
|
b19ff3dca4 | ||
|
|
8361f4f692 | ||
|
|
1b6863683f | ||
|
|
7fa5c2d987 | ||
|
|
3b08b1406f | ||
|
|
6dddcadf41 | ||
|
|
a843f94ea1 | ||
|
|
c1297285f3 | ||
|
|
43252a941b | ||
|
|
3e2fb6f814 | ||
|
|
8e28247f17 | ||
|
|
f082a2f2cc | ||
|
|
cc1aed948a | ||
|
|
e081d823ed | ||
|
|
3cbc96b8b7 | ||
|
|
d6fa02cc9e | ||
|
|
d28cf681a3 | ||
|
|
21df09d0ba | ||
|
|
2b5aec5d0a | ||
|
|
9021bcd222 | ||
|
|
e691eaf72f | ||
|
|
9e7a908136 | ||
|
|
6b5a099ba0 | ||
|
|
1c18c8faac | ||
|
|
f1fa5e32bf | ||
|
|
d49818ae6a | ||
|
|
1b7f97dc64 | ||
|
|
26a5c665de | ||
|
|
6a73ac0a33 | ||
|
|
7e5019eed3 | ||
|
|
f23b566689 | ||
|
|
e9431b5ff2 | ||
|
|
a54d08c9e2 | ||
|
|
d342e12363 | ||
|
|
a7aa458a85 | ||
|
|
d135c755c8 | ||
|
|
3292c596ff | ||
|
|
caeaab22ce | ||
|
|
4fa5b28328 | ||
|
|
f916e38da8 | ||
|
|
f77b45725b | ||
|
|
8aedb80140 | ||
|
|
a76c4365ea | ||
|
|
21c6f819a0 | ||
|
|
5f3d5afc37 | ||
|
|
57dec86b06 | ||
|
|
d68a89a32c | ||
|
|
8b94bf1333 | ||
|
|
539578c965 | ||
|
|
124a8687f1 | ||
|
|
42a6f8457a | ||
|
|
4c9ddee55c | ||
|
|
de1efcb81e | ||
|
|
501f56ffd5 | ||
|
|
ad6d99800e | ||
|
|
bd973ec3a9 | ||
|
|
b5c6c7cf2b | ||
|
|
f9ae48e23c | ||
|
|
4e8bbefc17 | ||
|
|
90b4ecb599 | ||
|
|
57658e76f5 | ||
|
|
19708dbc64 | ||
|
|
444e0c1918 | ||
|
|
8fa00b50b1 | ||
|
|
b9f16c3f66 | ||
|
|
c90de725b0 | ||
|
|
c0b43987dd | ||
|
|
c2fa86e388 | ||
|
|
3b8be24630 | ||
|
|
e72f6e4ac4 | ||
|
|
ef81700c05 | ||
|
|
bf54680178 | ||
|
|
a522f9879f | ||
|
|
4ee32b3263 | ||
|
|
c339b4fef8 | ||
|
|
9ff981f34f | ||
|
|
3db55cd82b | ||
|
|
f4bfcdab2e | ||
|
|
f320651cf8 | ||
|
|
3e9de4c392 | ||
|
|
1e9f7af017 | ||
|
|
cd99b9dc34 | ||
|
|
b182a9962c | ||
|
|
9b5cc3deaa | ||
|
|
7e7c7a3841 | ||
|
|
6ae1365505 | ||
|
|
dd2f27cfd4 | ||
|
|
1a38b54e4f | ||
|
|
fffb0e0d07 | ||
|
|
de505dc8cc | ||
|
|
9a735b75dc | ||
|
|
f933226c5d | ||
|
|
baa2aa51da | ||
|
|
32a8cc9a69 | ||
|
|
c2961eede4 | ||
|
|
13ed297fb9 | ||
|
|
4c259e6b9c | ||
|
|
faf51d0455 | ||
|
|
ed1585caed | ||
|
|
3917521ed6 | ||
|
|
924ffc473b | ||
|
|
0e258a49fb | ||
|
|
a1063ce922 | ||
|
|
2471bb21f6 | ||
|
|
b3a830c319 | ||
|
|
45942dfa7f | ||
|
|
cb755a47bc | ||
|
|
a35aa953b4 | ||
|
|
62adc5a91f | ||
|
|
dc71260baa | ||
|
|
88b3ba1427 | ||
|
|
93b7e5790d | ||
|
|
286c6344ec | ||
|
|
1be5889923 | ||
|
|
542b656bea | ||
|
|
4c5994ee7f | ||
|
|
f1bbe16606 | ||
|
|
d28a2f81a2 | ||
|
|
855f1e4ee7 | ||
|
|
b53a9a1c07 | ||
|
|
20cc4b93a9 | ||
|
|
d6af318c21 | ||
|
|
bd8633c630 | ||
|
|
fd5de4e47c | ||
|
|
bb131ef16a | ||
|
|
773d2eff37 | ||
|
|
9f9cc766c6 | ||
|
|
ab52bd1a07 | ||
|
|
f9244a93a5 | ||
|
|
6ef25b604b | ||
|
|
5e3f94fcf7 | ||
|
|
dcad389010 | ||
|
|
a0508684d9 | ||
|
|
9ffae0da7b | ||
|
|
04c4182b24 | ||
|
|
583aee204e | ||
|
|
e05fd02c65 | ||
|
|
c45bf3a994 | ||
|
|
203ff1a6ec | ||
|
|
07d5ead128 | ||
|
|
c042ab08c7 | ||
|
|
598f53f3d4 | ||
|
|
ec2cbc9b1b | ||
|
|
fcb8e520b7 | ||
|
|
5959914932 | ||
|
|
ebb0b3a5ea | ||
|
|
a72fc46d40 | ||
|
|
8d78d15e21 | ||
|
|
890e9e7242 | ||
|
|
492febe626 | ||
|
|
d0549bcb6d | ||
|
|
5e36bd0c27 | ||
|
|
28d3d8a1e0 | ||
|
|
bb226a221e | ||
|
|
0ac369423c | ||
|
|
a6a136c892 | ||
|
|
97224fa6a0 | ||
|
|
5210bb6fbf | ||
|
|
918577a9a0 | ||
|
|
0e89723eab | ||
|
|
1fe027b313 | ||
|
|
cdb7c7854d | ||
|
|
ab68a60480 | ||
|
|
559fee0ffe | ||
|
|
e15fec9845 | ||
|
|
2073158e1f | ||
|
|
7e2ee0300c | ||
|
|
79f6e27959 | ||
|
|
450923c0a4 | ||
|
|
7537c1a908 | ||
|
|
6cfd1a495e | ||
|
|
17d4619c31 | ||
|
|
2c05e7e282 | ||
|
|
928be086e4 | ||
|
|
e3c86e8685 | ||
|
|
e5607aff90 | ||
|
|
a47d9d00fd |
@@ -3,6 +3,9 @@
|
||||
DEBUG=0
|
||||
SQL_DEBUG=0
|
||||
|
||||
# HTTP port to bind to
|
||||
# TANDOOR_PORT=8080
|
||||
|
||||
# hosts the application can run under e.g. recipes.mydomain.com,cooking.mydomain.com,...
|
||||
ALLOWED_HOSTS=*
|
||||
|
||||
@@ -65,6 +68,10 @@ SHOPPING_MIN_AUTOSYNC_INTERVAL=5
|
||||
# when unset: 1 (true) - this is temporary until an appropriate amount of time has passed for everyone to migrate
|
||||
GUNICORN_MEDIA=0
|
||||
|
||||
# GUNICORN SERVER RELATED SETTINGS (see https://docs.gunicorn.org/en/stable/design.html#how-many-workers for recommended settings)
|
||||
# GUNICORN_WORKERS=1
|
||||
# GUNICORN_THREADS=1
|
||||
|
||||
# S3 Media settings: store mediafiles in s3 or any compatible storage backend (e.g. minio)
|
||||
# as long as S3_ACCESS_KEY is not set S3 features are disabled
|
||||
# S3_ACCESS_KEY=
|
||||
@@ -74,6 +81,7 @@ GUNICORN_MEDIA=0
|
||||
# S3_QUERYSTRING_AUTH=1 # default true, set to 0 to serve media from a public bucket without signed urls
|
||||
# S3_QUERYSTRING_EXPIRE=3600 # number of seconds querystring are valid for
|
||||
# S3_ENDPOINT_URL= # when using a custom endpoint like minio
|
||||
# S3_CUSTOM_DOMAIN= # when using a CDN/proxy to S3 (see https://github.com/TandoorRecipes/recipes/issues/1943)
|
||||
|
||||
# Email Settings, see https://docs.djangoproject.com/en/3.2/ref/settings/#email-host
|
||||
# Required for email confirmation and password reset (automatically activates if host is set)
|
||||
@@ -83,8 +91,10 @@ GUNICORN_MEDIA=0
|
||||
# EMAIL_HOST_PASSWORD=
|
||||
# EMAIL_USE_TLS=0
|
||||
# EMAIL_USE_SSL=0
|
||||
# DEFAULT_FROM_EMAIL= # email sender address (default 'webmaster@localhost')
|
||||
# ACCOUNT_EMAIL_SUBJECT_PREFIX= # prefix used for account related emails (default "[Tandoor Recipes] ")
|
||||
# email sender address (default 'webmaster@localhost')
|
||||
# DEFAULT_FROM_EMAIL=
|
||||
# prefix used for account related emails (default "[Tandoor Recipes] ")
|
||||
# ACCOUNT_EMAIL_SUBJECT_PREFIX=
|
||||
|
||||
# allow authentication via reverse proxy (e.g. authelia), leave off if you dont know what you are doing
|
||||
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
8
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -29,11 +29,3 @@ body:
|
||||
attributes:
|
||||
label: "Additional context"
|
||||
description: "Add any other context or screenshots about the feature request here."
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: "Contribute"
|
||||
description: "Are you willing and able to help develop this feature?"
|
||||
options:
|
||||
- label: "Yes"
|
||||
- label: "Partly"
|
||||
- label: "No"
|
||||
|
||||
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@@ -29,6 +29,7 @@ jobs:
|
||||
run: yarn build
|
||||
- name: Install Django dependencies
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y libsasl2-dev python-dev libldap2-dev libssl-dev
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
48
.github/workflows/docker-publish-beta-raspi.yml
vendored
Normal file
48
.github/workflows/docker-publish-beta-raspi.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: publish beta raspi image docker
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'beta'
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = 'beta'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: beta-raspi
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
||||
5
.github/workflows/docker-publish-beta.yml
vendored
5
.github/workflows/docker-publish-beta.yml
vendored
@@ -35,8 +35,9 @@ jobs:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
tag: beta
|
||||
dockerHubUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
platform: linux/amd64,linux/arm64
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
|
||||
45
.github/workflows/docker-publish-latest-raspi.yml
vendored
Normal file
45
.github/workflows/docker-publish-latest-raspi.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: publish latest raspi image docker
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}-raspi
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}-raspi'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
tag: latest-raspi
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
5
.github/workflows/docker-publish-latest.yml
vendored
5
.github/workflows/docker-publish-latest.yml
vendored
@@ -38,6 +38,7 @@ jobs:
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
platform: linux/amd64,linux/arm64
|
||||
tag: latest
|
||||
dockerHubUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
47
.github/workflows/docker-publish-release-raspi.yml
vendored
Normal file
47
.github/workflows/docker-publish-release-raspi.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: publish tagged raspi release docker
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build image job
|
||||
steps:
|
||||
- name: Checkout master
|
||||
uses: actions/checkout@master
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
# Update Version number
|
||||
- name: Update version file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: recipes/version.py
|
||||
contents: |
|
||||
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
|
||||
BUILD_REF = '${{ github.sha }}'
|
||||
write-mode: overwrite
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
# Build container
|
||||
- name: Build and publish image
|
||||
uses: ilteoood/docker_buildx@master
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
dockerFile: Dockerfile-raspi
|
||||
platform: linux/arm/v7
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}-raspi
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
5
.github/workflows/docker-publish-release.yml
vendored
5
.github/workflows/docker-publish-release.yml
vendored
@@ -40,9 +40,10 @@ jobs:
|
||||
with:
|
||||
publish: true
|
||||
imageName: vabene1111/recipes
|
||||
platform: linux/amd64,linux/arm64
|
||||
tag: ${{ steps.get_version.outputs.VERSION }}
|
||||
dockerHubUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
dockerUser: ${{ secrets.DOCKER_USERNAME }}
|
||||
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
|
||||
# Send discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
|
||||
1
.github/workflows/docs.yml
vendored
1
.github/workflows/docs.yml
vendored
@@ -3,7 +3,6 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
||||
1
.idea/dictionaries/vabene1111_PC.xml
generated
1
.idea/dictionaries/vabene1111_PC.xml
generated
@@ -6,6 +6,7 @@
|
||||
<w>csrftoken</w>
|
||||
<w>gunicorn</w>
|
||||
<w>ical</w>
|
||||
<w>invitelink</w>
|
||||
<w>mealie</w>
|
||||
<w>pepperplate</w>
|
||||
<w>safron</w>
|
||||
|
||||
@@ -1,58 +1,83 @@
|
||||
# Contributers
|
||||
|
||||
Many thanks to everyone who contributed to this project! If you add something or help out feel free to add yourself
|
||||
to this list.
|
||||
|
||||
## Code/Features
|
||||
|
||||
Please have a look at the [list of pull requests](https://github.com/vabene1111/recipes/pulls) for
|
||||
a complete list of contributions.
|
||||
Below are some of the larger contributions made yet.
|
||||
|
||||
|
||||
- @tourn provided the serving feature and **several** other improvements!
|
||||
- @l0c4lh057 provided a much improved ingredient text parser in [#277](https://github.com/vabene1111/recipes/pull/277)
|
||||
- @sebimarkgraf added nutritional information [#199](https://github.com/vabene1111/recipes/pull/199)
|
||||
- @cazier added reverse proxy authentication [#88](https://github.com/vabene1111/recipes/pull/88)
|
||||
- [vabene1111]
|
||||
- [Kaibu]
|
||||
- [smilerz]
|
||||
- [MaxJa4] Docker builds and other improvements
|
||||
- [tourn] provided the serving feature and **several** other improvements!
|
||||
- [l0c4lh057] provided a much improved ingredient text parser in [#277](https://github.com/vabene1111/recipes/pull/277)
|
||||
- [sebimarkgraf] added nutritional information [#199](https://github.com/vabene1111/recipes/pull/199)
|
||||
- [cazier] added reverse proxy authentication [#88](https://github.com/vabene1111/recipes/pull/88)
|
||||
- [murphy83] added support for IPv6 #1490
|
||||
- [TheHaf] added custom serving size component #1411
|
||||
- [lostlont] added LDAP support #960
|
||||
|
||||
## Translations
|
||||
|
||||
### Catalan
|
||||
### Catalan
|
||||
|
||||
[Rubenix](https://www.transifex.com/user/profile/rubenix/)
|
||||
|
||||
### Dutch
|
||||
[D0T1X](https://www.transifex.com/user/profile/D0T1X/)
|
||||
[ikbenfrank](https://www.transifex.com/user/profile/ikbenfrank/)
|
||||
[kampsj](https://www.transifex.com/user/profile/kampsj/)
|
||||
|
||||
[D0T1X](https://www.transifex.com/user/profile/D0T1X/)
|
||||
[ikbenfrank](https://www.transifex.com/user/profile/ikbenfrank/)
|
||||
[kampsj](https://www.transifex.com/user/profile/kampsj/)
|
||||
|
||||
### French
|
||||
[jt117](https://www.transifex.com/user/profile/jt117/)
|
||||
[nerdinator](https://www.transifex.com/user/profile/nerdinator/)
|
||||
[agaume](https://www.transifex.com/user/profile/agaume/)
|
||||
|
||||
[jt117](https://www.transifex.com/user/profile/jt117/)
|
||||
[nerdinator](https://www.transifex.com/user/profile/nerdinator/)
|
||||
[agaume](https://www.transifex.com/user/profile/agaume/)
|
||||
|
||||
### German
|
||||
|
||||
[eTaurus](https://www.transifex.com/user/profile/eTaurus/)
|
||||
[l0c4lh057](https://www.transifex.com/user/profile/l0c4lh057/)
|
||||
[hyperbit00](https://github.com/hyperbit00)
|
||||
|
||||
### Hungarian
|
||||
|
||||
[igazka](https://www.transifex.com/user/profile/igazka/)
|
||||
|
||||
### Italian
|
||||
[SK3LA](https://www.transifex.com/user/profile/SK3LA/)
|
||||
[auanasgheps](https://www.transifex.com/user/profile/auanasgheps/)
|
||||
|
||||
[SK3LA](https://www.transifex.com/user/profile/SK3LA/)
|
||||
[auanasgheps](https://www.transifex.com/user/profile/auanasgheps/)
|
||||
|
||||
### Latvian
|
||||
|
||||
[melkypie](https://github.com/melkypie)
|
||||
|
||||
### Portuguese
|
||||
|
||||
[hds](https://www.transifex.com/user/profile/hds/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[stormsz](https://www.transifex.com/user/profile/stormsz/)
|
||||
[hds](https://www.transifex.com/user/profile/hds/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[stormsz](https://www.transifex.com/user/profile/stormsz/)
|
||||
|
||||
### Russian
|
||||
|
||||
[amillerr](https://github.com/amillerr)
|
||||
|
||||
### Spanish
|
||||
|
||||
[albertocp](https://www.transifex.com/user/profile/albertocp/)
|
||||
[alfa5](https://www.transifex.com/user/profile/alfa5/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[sergio.laya](https://www.transifex.com/user/profile/sergio.laya/)
|
||||
[albertocp](https://www.transifex.com/user/profile/albertocp/)
|
||||
[alfa5](https://www.transifex.com/user/profile/alfa5/)
|
||||
[mlopezifu](https://www.transifex.com/user/profile/mlopezifu/)
|
||||
[sergio.laya](https://www.transifex.com/user/profile/sergio.laya/)
|
||||
|
||||
### Swedish
|
||||
|
||||
[makanz](https://github.com/makanz)
|
||||
|
||||
### Turkish
|
||||
|
||||
@@ -60,4 +85,4 @@ Below are some of the larger contributions made yet.
|
||||
|
||||
### Vietnamese
|
||||
|
||||
[vuongtrunghieu](https://www.transifex.com/user/profile/vuongtrunghieu/)
|
||||
[vuongtrunghieu](https://www.transifex.com/user/profile/vuongtrunghieu/)
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -1,7 +1,7 @@
|
||||
FROM python:3.9-alpine3.12
|
||||
FROM python:3.10-alpine3.15
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@@ -15,10 +15,12 @@ WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev libressl-dev libffi-dev cargo openssl-dev openldap-dev && \
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev git && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip install wheel==0.36.2 && \
|
||||
venv/bin/pip install wheel==0.37.1 && \
|
||||
venv/bin/pip install setuptools_rust==1.1.2 && \
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
|
||||
apk --purge del .build-deps
|
||||
|
||||
|
||||
33
Dockerfile-raspi
Normal file
33
Dockerfile-raspi
Normal file
@@ -0,0 +1,33 @@
|
||||
# builds of cryptography for raspberry pi (or better arm v7) fail for some
|
||||
FROM python:3.9-alpine3.15
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap gcompat
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
#This port will be used by gunicorn.
|
||||
EXPOSE 8080
|
||||
|
||||
#Create app dir and install requirements.
|
||||
RUN mkdir /opt/recipes
|
||||
WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev zlib-dev jpeg-dev libwebp-dev python3-dev git && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip install wheel==0.37.1 && \
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir --no-binary=Pillow && \
|
||||
apk --purge del .build-deps
|
||||
|
||||
#Copy project and execute it.
|
||||
COPY . ./
|
||||
RUN chmod +x boot.sh
|
||||
ENTRYPOINT ["/opt/recipes/boot.sh"]
|
||||
@@ -61,6 +61,15 @@ a public page.
|
||||
|
||||
Documentation can be found [here](https://docs.tandoor.dev/).
|
||||
|
||||
## Support our work
|
||||
Tandoor is developed by volunteers in their free time just because its fun. That said earning
|
||||
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
|
||||
Because of that there are several ways you can support us
|
||||
|
||||
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
|
||||
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
|
||||
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
|
||||
|
||||
## Contributing
|
||||
|
||||
You can help out with the ongoing development by looking for potential bugs in our code base, or by contributing new features. We are always welcoming new pull requests containing bug fixes, refactors and new features. We have a list of tasks and bugs on our issue tracker on Github. Please comment on issues if you want to contribute with, to avoid duplicating effort.
|
||||
|
||||
60
boot.sh
60
boot.sh
@@ -1,12 +1,68 @@
|
||||
#!/bin/sh
|
||||
source venv/bin/activate
|
||||
|
||||
echo "Updating database"
|
||||
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
|
||||
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
|
||||
NGINX_CONF_FILE=/opt/recipes/nginx/conf.d/Recipes.conf
|
||||
|
||||
display_warning() {
|
||||
echo "[WARNING]"
|
||||
echo -e "$1"
|
||||
}
|
||||
|
||||
echo "Checking configuration..."
|
||||
|
||||
# Nginx config file must exist if gunicorn is not active
|
||||
if [ ! -f "$NGINX_CONF_FILE" ] && [ $GUNICORN_MEDIA -eq 0 ]; then
|
||||
display_warning "Nginx configuration file could not be found at the default location!\nPath: ${NGINX_CONF_FILE}"
|
||||
fi
|
||||
|
||||
# SECRET_KEY must be set in .env file
|
||||
if [ -z "${SECRET_KEY}" ]; then
|
||||
display_warning "The environment variable 'SECRET_KEY' is not set but REQUIRED for running Tandoor!"
|
||||
fi
|
||||
|
||||
|
||||
echo "Waiting for database to be ready..."
|
||||
|
||||
attempt=0
|
||||
max_attempts=20
|
||||
|
||||
if [ "${DB_ENGINE}" != 'django.db.backends.sqlite3' ]; then
|
||||
|
||||
# POSTGRES_PASSWORD must be set in .env file
|
||||
if [ -z "${POSTGRES_PASSWORD}" ]; then
|
||||
display_warning "The environment variable 'POSTGRES_PASSWORD' is not set but REQUIRED for running Tandoor!"
|
||||
fi
|
||||
|
||||
while pg_isready --host=${POSTGRES_HOST} --port=${POSTGRES_PORT} --user=${POSTGRES_USER} -q; status=$?; attempt=$((attempt+1)); [ $status -ne 0 ] && [ $attempt -le $max_attempts ]; do
|
||||
sleep 5
|
||||
done
|
||||
fi
|
||||
|
||||
if [ $attempt -gt $max_attempts ]; then
|
||||
echo -e "\nDatabase not reachable. Maximum attempts exceeded."
|
||||
echo "Please check logs above - misconfiguration is very likely."
|
||||
echo "Make sure the DB container is up and POSTGRES_HOST is set properly."
|
||||
echo "Shutting down container."
|
||||
exit 1 # exit with error to make the container stop
|
||||
fi
|
||||
|
||||
echo "Database is ready"
|
||||
|
||||
echo "Migrating database"
|
||||
|
||||
|
||||
python manage.py migrate
|
||||
|
||||
echo "Generating static files"
|
||||
|
||||
python manage.py collectstatic_js_reverse
|
||||
python manage.py collectstatic --noinput
|
||||
|
||||
echo "Done"
|
||||
|
||||
chmod -R 755 /opt/recipes/mediafiles
|
||||
|
||||
exec gunicorn -b :8080 --access-logfile - --error-logfile - --log-level INFO recipes.wsgi
|
||||
exec gunicorn -b :$TANDOOR_PORT --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level INFO recipes.wsgi
|
||||
|
||||
@@ -15,7 +15,7 @@ from .models import (BookmarkletImport, Comment, CookLog, Food, FoodInheritField
|
||||
Recipe, RecipeBook, RecipeBookEntry, RecipeImport, SearchPreference, ShareLink,
|
||||
ShoppingList, ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
|
||||
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation)
|
||||
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation, UserSpace)
|
||||
|
||||
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
@@ -32,41 +32,7 @@ admin.site.unregister(Group)
|
||||
@admin.action(description='Delete all data from a space')
|
||||
def delete_space_action(modeladmin, request, queryset):
|
||||
for space in queryset:
|
||||
CookLog.objects.filter(space=space).delete()
|
||||
ViewLog.objects.filter(space=space).delete()
|
||||
ImportLog.objects.filter(space=space).delete()
|
||||
BookmarkletImport.objects.filter(space=space).delete()
|
||||
|
||||
Comment.objects.filter(recipe__space=space).delete()
|
||||
Keyword.objects.filter(space=space).delete()
|
||||
Ingredient.objects.filter(space=space).delete()
|
||||
Food.objects.filter(space=space).delete()
|
||||
Unit.objects.filter(space=space).delete()
|
||||
Step.objects.filter(space=space).delete()
|
||||
NutritionInformation.objects.filter(space=space).delete()
|
||||
RecipeBookEntry.objects.filter(book__space=space).delete()
|
||||
RecipeBook.objects.filter(space=space).delete()
|
||||
MealType.objects.filter(space=space).delete()
|
||||
MealPlan.objects.filter(space=space).delete()
|
||||
ShareLink.objects.filter(space=space).delete()
|
||||
Recipe.objects.filter(space=space).delete()
|
||||
|
||||
RecipeImport.objects.filter(space=space).delete()
|
||||
SyncLog.objects.filter(sync__space=space).delete()
|
||||
Sync.objects.filter(space=space).delete()
|
||||
Storage.objects.filter(space=space).delete()
|
||||
|
||||
ShoppingListEntry.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingListRecipe.objects.filter(shoppinglist__space=space).delete()
|
||||
ShoppingList.objects.filter(space=space).delete()
|
||||
|
||||
SupermarketCategoryRelation.objects.filter(supermarket__space=space).delete()
|
||||
SupermarketCategory.objects.filter(space=space).delete()
|
||||
Supermarket.objects.filter(space=space).delete()
|
||||
|
||||
InviteLink.objects.filter(space=space).delete()
|
||||
UserFile.objects.filter(space=space).delete()
|
||||
Automation.objects.filter(space=space).delete()
|
||||
space.save()
|
||||
|
||||
|
||||
class SpaceAdmin(admin.ModelAdmin):
|
||||
@@ -80,15 +46,23 @@ class SpaceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(Space, SpaceAdmin)
|
||||
|
||||
|
||||
class UserSpaceAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'space',)
|
||||
search_fields = ('user__username', 'space__name',)
|
||||
|
||||
|
||||
admin.site.register(UserSpace, UserSpaceAdmin)
|
||||
|
||||
|
||||
class UserPreferenceAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'space', 'theme', 'nav_color', 'default_page', 'search_style',) # TODO add new fields
|
||||
search_fields = ('user__username', 'space__name')
|
||||
list_filter = ('theme', 'nav_color', 'default_page', 'search_style')
|
||||
list_display = ('name', 'theme', 'nav_color', 'default_page',)
|
||||
search_fields = ('user__username',)
|
||||
list_filter = ('theme', 'nav_color', 'default_page',)
|
||||
date_hierarchy = 'created_at'
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(UserPreference, UserPreferenceAdmin)
|
||||
@@ -101,7 +75,7 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.user.get_user_name()
|
||||
return obj.user.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
@@ -203,7 +177,7 @@ class RecipeAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def created_by(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
|
||||
actions = [rebuild_index]
|
||||
@@ -237,12 +211,12 @@ admin.site.register(Ingredient, IngredientAdmin)
|
||||
|
||||
class CommentAdmin(admin.ModelAdmin):
|
||||
list_display = ('recipe', 'name', 'created_at')
|
||||
search_fields = ('text', 'user__username')
|
||||
search_fields = ('text', 'created_by__username')
|
||||
date_hierarchy = 'created_at'
|
||||
|
||||
@staticmethod
|
||||
def name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(Comment, CommentAdmin)
|
||||
@@ -261,7 +235,7 @@ class RecipeBookAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def user_name(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(RecipeBook, RecipeBookAdmin)
|
||||
@@ -279,7 +253,7 @@ class MealPlanAdmin(admin.ModelAdmin):
|
||||
|
||||
@staticmethod
|
||||
def user(obj):
|
||||
return obj.created_by.get_user_name()
|
||||
return obj.created_by.get_user_display_name()
|
||||
|
||||
|
||||
admin.site.register(MealPlan, MealPlanAdmin)
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import django_filters
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.forms import MultiSelectWidget
|
||||
from cookbook.models import Food, Keyword, Recipe, ShoppingList
|
||||
|
||||
with scopes_disabled():
|
||||
class RecipeFilter(django_filters.FilterSet):
|
||||
name = django_filters.CharFilter(method='filter_name')
|
||||
keywords = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Keyword.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_keywords'
|
||||
)
|
||||
foods = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=Food.objects.none(),
|
||||
widget=MultiSelectWidget,
|
||||
method='filter_foods',
|
||||
label=_('Ingredients')
|
||||
)
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(data, *args, **kwargs)
|
||||
self.filters['foods'].queryset = Food.objects.filter(space=space).all()
|
||||
self.filters['keywords'].queryset = Keyword.objects.filter(space=space).all()
|
||||
|
||||
@staticmethod
|
||||
def filter_keywords(queryset, name, value):
|
||||
if not name == 'keywords':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(keywords=x)
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_foods(queryset, name, value):
|
||||
if not name == 'foods':
|
||||
return queryset
|
||||
for x in value:
|
||||
queryset = queryset.filter(steps__ingredients__food__name=x).distinct()
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_name(queryset, name, value):
|
||||
if not name == 'name':
|
||||
return queryset
|
||||
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2',
|
||||
'django.db.backends.postgresql']:
|
||||
queryset = queryset.annotate(similarity=TrigramSimilarity('name', value), ).filter(
|
||||
Q(similarity__gt=0.1) | Q(name__unaccent__icontains=value)).order_by('-similarity')
|
||||
else:
|
||||
queryset = queryset.filter(name__icontains=value)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ['name', 'keywords', 'foods', 'internal']
|
||||
|
||||
# class FoodFilter(django_filters.FilterSet):
|
||||
# name = django_filters.CharFilter(lookup_expr='icontains')
|
||||
|
||||
# class Meta:
|
||||
# model = Food
|
||||
# fields = ['name']
|
||||
|
||||
class ShoppingListFilter(django_filters.FilterSet):
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
if data is not None:
|
||||
data = data.copy()
|
||||
data.setdefault("finished", False)
|
||||
super().__init__(data, *args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
model = ShoppingList
|
||||
fields = ['finished']
|
||||
@@ -37,19 +37,15 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
prefix = 'preference'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if x := kwargs.get('instance', None):
|
||||
space = x.space
|
||||
else:
|
||||
space = kwargs.pop('space')
|
||||
space = kwargs.pop('space')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userpreference__space=space).all()
|
||||
self.fields['plan_share'].queryset = User.objects.filter(userspace__space=space).all()
|
||||
|
||||
class Meta:
|
||||
model = UserPreference
|
||||
fields = (
|
||||
'default_unit', 'use_fractions', 'use_kj', 'theme', 'nav_color',
|
||||
'sticky_navbar', 'default_page', 'show_recent', 'search_style',
|
||||
'plan_share', 'ingredient_decimals', 'comments',
|
||||
'sticky_navbar', 'default_page', 'plan_share', 'ingredient_decimals', 'comments', 'left_handed',
|
||||
)
|
||||
|
||||
labels = {
|
||||
@@ -60,35 +56,33 @@ class UserPreferenceForm(forms.ModelForm):
|
||||
'nav_color': _('Navbar color'),
|
||||
'sticky_navbar': _('Sticky navbar'),
|
||||
'default_page': _('Default page'),
|
||||
'show_recent': _('Show recent recipes'),
|
||||
'search_style': _('Search style'),
|
||||
'plan_share': _('Plan sharing'),
|
||||
'ingredient_decimals': _('Ingredient decimal places'),
|
||||
'shopping_auto_sync': _('Shopping list auto sync period'),
|
||||
'comments': _('Comments')
|
||||
'comments': _('Comments'),
|
||||
'left_handed': _('Left-handed mode')
|
||||
}
|
||||
|
||||
help_texts = {
|
||||
'nav_color': _('Color of the top navigation bar. Not all colors work with all themes, just try them out!'),
|
||||
# noqa: E501
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'), # noqa: E501
|
||||
|
||||
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'),
|
||||
'use_fractions': _(
|
||||
'Enables support for fractions in ingredient amounts (e.g. convert decimals to fractions automatically)'),
|
||||
# noqa: E501
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'), # noqa: E501
|
||||
|
||||
'use_kj': _('Display nutritional energy amounts in joules instead of calories'),
|
||||
'plan_share': _('Users with whom newly created meal plans should be shared by default.'),
|
||||
'shopping_share': _('Users with whom to share shopping lists.'),
|
||||
# noqa: E501
|
||||
'show_recent': _('Show recently viewed recipes on search page.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'), # noqa: E501
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'), # noqa: E501
|
||||
'ingredient_decimals': _('Number of decimals to round ingredients.'),
|
||||
'comments': _('If you want to be able to create and see comments underneath recipes.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'), # noqa: E501
|
||||
'sticky_navbar': _('Makes the navbar stick to the top of the page.'),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoexclude_onhand': _('Exclude ingredients that are on hand.'),
|
||||
'left_handed': _('Will optimize the UI for use with your left hand.')
|
||||
}
|
||||
|
||||
widgets = {
|
||||
@@ -153,11 +147,13 @@ class ImportExportBase(forms.Form):
|
||||
RECIPESAGE = 'RECIPESAGE'
|
||||
DOMESTICA = 'DOMESTICA'
|
||||
MEALMASTER = 'MEALMASTER'
|
||||
MELARECIPES = 'MELARECIPES'
|
||||
REZKONV = 'REZKONV'
|
||||
OPENEATS = 'OPENEATS'
|
||||
PLANTOEAT = 'PLANTOEAT'
|
||||
COOKBOOKAPP = 'COOKBOOKAPP'
|
||||
COPYMETHAT = 'COPYMETHAT'
|
||||
COOKMATE = 'COOKMATE'
|
||||
PDF = 'PDF'
|
||||
|
||||
type = forms.ChoiceField(choices=(
|
||||
@@ -165,7 +161,8 @@ class ImportExportBase(forms.Form):
|
||||
(MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'), (SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'),
|
||||
(PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'), (DOMESTICA, 'Domestica'),
|
||||
(MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
|
||||
(COOKMATE, 'Cookmate')
|
||||
))
|
||||
|
||||
|
||||
@@ -179,6 +176,7 @@ class ImportForm(ImportExportBase):
|
||||
class ExportForm(ImportExportBase):
|
||||
recipes = forms.ModelMultipleChoiceField(widget=MultiSelectWidget, queryset=Recipe.objects.none(), required=False)
|
||||
all = forms.BooleanField(required=False)
|
||||
custom_filter = forms.IntegerField(required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
space = kwargs.pop('space')
|
||||
@@ -333,9 +331,9 @@ class MealPlanForm(forms.ModelForm):
|
||||
)
|
||||
|
||||
help_texts = {
|
||||
'shared': _('You can list default users to share recipes with in the settings.'), # noqa: E501
|
||||
'shared': _('You can list default users to share recipes with in the settings.'),
|
||||
'note': _('You can use markdown to format this field. See the <a href="/docs/markdown/">docs here</a>')
|
||||
# noqa: E501
|
||||
|
||||
}
|
||||
|
||||
widgets = {
|
||||
@@ -490,8 +488,8 @@ class ShoppingPreferenceForm(forms.ModelForm):
|
||||
help_texts = {
|
||||
'shopping_share': _('Users will see all items you add to your shopping list. They must add you to see items on their list.'),
|
||||
'shopping_auto_sync': _(
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit ' # noqa: E501
|
||||
'of mobile data. If lower than instance limit it is reset when saving.' # noqa: E501
|
||||
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
|
||||
'of mobile data. If lower than instance limit it is reset when saving.'
|
||||
),
|
||||
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
|
||||
'mealplan_autoinclude_related': _('When adding a meal plan to the shopping list (manually or automatically), include all related recipes.'),
|
||||
|
||||
@@ -14,7 +14,7 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
|
||||
def is_open_for_signup(self, request):
|
||||
"""
|
||||
Whether to allow sign ups.
|
||||
Whether to allow sign-ups.
|
||||
"""
|
||||
signup_token = False
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
@@ -31,7 +31,10 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
default = datetime.datetime.now()
|
||||
c = caches['default'].get_or_set(email, default, timeout=360)
|
||||
if c == default:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
try:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
except Exception: # dont fail signup just because confirmation mail could not be send
|
||||
pass
|
||||
else:
|
||||
messages.add_message(self.request, messages.ERROR, _('In order to prevent spam, the requested email was not send. Please wait a few minutes and try again.'))
|
||||
else:
|
||||
|
||||
@@ -10,4 +10,5 @@ def context_settings(request):
|
||||
'TERMS_URL': settings.TERMS_URL,
|
||||
'PRIVACY_URL': settings.PRIVACY_URL,
|
||||
'IMPRINT_URL': settings.IMPRINT_URL,
|
||||
'SHOPPING_MIN_AUTOSYNC_INTERVAL': settings.SHOPPING_MIN_AUTOSYNC_INTERVAL,
|
||||
}
|
||||
|
||||
@@ -38,10 +38,12 @@ def get_filetype(name):
|
||||
|
||||
# TODO this whole file needs proper documentation, refactoring, and testing
|
||||
# TODO also add env variable to define which images sizes should be compressed
|
||||
def handle_image(request, image_object, filetype='.jpeg'):
|
||||
# filetype argument can not be optional, otherwise this function will treat all images as if they were a jpeg
|
||||
# Because it's no longer optional, no reason to return it
|
||||
def handle_image(request, image_object, filetype):
|
||||
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
return rescale_image_jpeg(image_object), filetype
|
||||
return rescale_image_jpeg(image_object)
|
||||
if filetype == '.png':
|
||||
return rescale_image_png(image_object), filetype
|
||||
return image_object, filetype
|
||||
return rescale_image_png(image_object)
|
||||
return image_object
|
||||
|
||||
@@ -4,7 +4,7 @@ import unicodedata
|
||||
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.models import Unit, Food, Automation
|
||||
from cookbook.models import Unit, Food, Automation, Ingredient
|
||||
|
||||
|
||||
class IngredientParser:
|
||||
@@ -46,7 +46,7 @@ class IngredientParser:
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
"""
|
||||
Apply food alias automations to passed foood
|
||||
Apply food alias automations to passed food
|
||||
:param food: unit as string
|
||||
:return: food as string (possibly changed by automation)
|
||||
"""
|
||||
@@ -124,7 +124,7 @@ class IngredientParser:
|
||||
|
||||
def parse_amount(self, x):
|
||||
amount = 0
|
||||
unit = ''
|
||||
unit = None
|
||||
note = ''
|
||||
|
||||
did_check_frac = False
|
||||
@@ -155,33 +155,36 @@ class IngredientParser:
|
||||
except ValueError:
|
||||
unit = x[end:]
|
||||
|
||||
if unit.startswith('(') or unit.startswith('-'): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
unit = ''
|
||||
if unit is not None and unit.strip() == '':
|
||||
unit = None
|
||||
|
||||
if unit is not None and (unit.startswith('(') or unit.startswith('-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
unit = None
|
||||
note = x
|
||||
return amount, unit, note
|
||||
|
||||
def parse_ingredient_with_comma(self, tokens):
|
||||
ingredient = ''
|
||||
def parse_food_with_comma(self, tokens):
|
||||
food = ''
|
||||
note = ''
|
||||
start = 0
|
||||
# search for first occurrence of an argument ending in a comma
|
||||
while start < len(tokens) and not tokens[start].endswith(','):
|
||||
start += 1
|
||||
if start == len(tokens):
|
||||
# no token ending in a comma found -> use everything as ingredient
|
||||
ingredient = ' '.join(tokens)
|
||||
# no token ending in a comma found -> use everything as food
|
||||
food = ' '.join(tokens)
|
||||
else:
|
||||
ingredient = ' '.join(tokens[:start + 1])[:-1]
|
||||
food = ' '.join(tokens[:start + 1])[:-1]
|
||||
note = ' '.join(tokens[start + 1:])
|
||||
return ingredient, note
|
||||
return food, note
|
||||
|
||||
def parse_ingredient(self, tokens):
|
||||
ingredient = ''
|
||||
def parse_food(self, tokens):
|
||||
food = ''
|
||||
note = ''
|
||||
if tokens[-1].endswith(')'):
|
||||
# Check if the matching opening bracket is in the same token
|
||||
if (not tokens[-1].startswith('(')) and ('(' in tokens[-1]):
|
||||
return self.parse_ingredient_with_comma(tokens)
|
||||
return self.parse_food_with_comma(tokens)
|
||||
# last argument ends with closing bracket -> look for opening bracket
|
||||
start = len(tokens) - 1
|
||||
while not tokens[start].startswith('(') and not start == 0:
|
||||
@@ -191,33 +194,51 @@ class IngredientParser:
|
||||
raise ValueError
|
||||
elif start < 0:
|
||||
# no opening bracket anywhere -> just ignore the last bracket
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
food, note = self.parse_food_with_comma(tokens)
|
||||
else:
|
||||
# opening bracket found -> split in ingredient and note, remove brackets from note # noqa: E501
|
||||
# opening bracket found -> split in food and note, remove brackets from note # noqa: E501
|
||||
note = ' '.join(tokens[start:])[1:-1]
|
||||
ingredient = ' '.join(tokens[:start])
|
||||
food = ' '.join(tokens[:start])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient_with_comma(tokens)
|
||||
return ingredient, note
|
||||
food, note = self.parse_food_with_comma(tokens)
|
||||
return food, note
|
||||
|
||||
def parse(self, x):
|
||||
def parse(self, ingredient):
|
||||
"""
|
||||
Main parsing function, takes an ingredient string (e.g. '1 l Water') and extracts amount, unit, food, ...
|
||||
:param ingredient: string ingredient
|
||||
:return: amount, unit (can be None), food, note (can be empty)
|
||||
"""
|
||||
# initialize default values
|
||||
amount = 0
|
||||
unit = ''
|
||||
ingredient = ''
|
||||
unit = None
|
||||
food = ''
|
||||
note = ''
|
||||
unit_note = ''
|
||||
|
||||
if len(ingredient) == 0:
|
||||
raise ValueError('string to parse cannot be empty')
|
||||
|
||||
# some people/languages put amount and unit at the end of the ingredient string
|
||||
# if something like this is detected move it to the beginning so the parser can handle it
|
||||
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
|
||||
match = re.search(r'[1-9](\d)*\s*([^\W\d_])+', ingredient)
|
||||
print(f'reording from {ingredient} to {ingredient[match.start():match.end()] + " " + ingredient.replace(ingredient[match.start():match.end()], "")}')
|
||||
ingredient = ingredient[match.start():match.end()] + ' ' + ingredient.replace(ingredient[match.start():match.end()], '')
|
||||
|
||||
# if the string contains parenthesis early on remove it and place it at the end
|
||||
# because its likely some kind of note
|
||||
if re.match('(.){1,6}\s\((.[^\(\)])+\)\s', x):
|
||||
match = re.search('\((.[^\(])+\)', x)
|
||||
x = x[:match.start()] + x[match.end():] + ' ' + x[match.start():match.end()]
|
||||
if re.match('(.){1,6}\s\((.[^\(\)])+\)\s', ingredient):
|
||||
match = re.search('\((.[^\(])+\)', ingredient)
|
||||
ingredient = ingredient[:match.start()] + ingredient[match.end():] + ' ' + ingredient[match.start():match.end()]
|
||||
|
||||
tokens = x.split()
|
||||
# leading spaces before commas result in extra tokens, clean them out
|
||||
ingredient = ingredient.replace(' ,', ',')
|
||||
|
||||
tokens = ingredient.split() # split at each space into tokens
|
||||
if len(tokens) == 1:
|
||||
# there only is one argument, that must be the ingredient
|
||||
ingredient = tokens[0]
|
||||
# there only is one argument, that must be the food
|
||||
food = tokens[0]
|
||||
else:
|
||||
try:
|
||||
# try to parse first argument as amount
|
||||
@@ -227,48 +248,65 @@ class IngredientParser:
|
||||
# a fraction for the amount
|
||||
if len(tokens) > 2:
|
||||
try:
|
||||
if not unit == '':
|
||||
if unit is not None:
|
||||
# a unit is already found, no need to try the second argument for a fraction
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except # noqa: E501
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except
|
||||
raise ValueError
|
||||
# try to parse second argument as amount and add that, in case of '2 1/2' or '2 ½'
|
||||
amount += self.parse_fraction(tokens[1])
|
||||
# assume that units can't end with a comma
|
||||
if len(tokens) > 3 and not tokens[2].endswith(','):
|
||||
# try to use third argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
# try to use third argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
ingredient, note = self.parse_ingredient(tokens[3:])
|
||||
food, note = self.parse_food(tokens[3:])
|
||||
unit = tokens[2]
|
||||
except ValueError:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
except ValueError:
|
||||
# assume that units can't end with a comma
|
||||
if not tokens[1].endswith(','):
|
||||
# try to use second argument as unit and everything else as ingredient, use everything as ingredient if it fails # noqa: E501
|
||||
# try to use second argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
ingredient, note = self.parse_ingredient(tokens[2:])
|
||||
if unit == '':
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
if unit is None:
|
||||
unit = tokens[1]
|
||||
else:
|
||||
note = tokens[1]
|
||||
except ValueError:
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
ingredient, note = self.parse_ingredient(tokens[1:])
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
# only two arguments, first one is the amount
|
||||
# which means this is the ingredient
|
||||
ingredient = tokens[1]
|
||||
# which means this is the food
|
||||
food = tokens[1]
|
||||
except ValueError:
|
||||
try:
|
||||
# can't parse first argument as amount
|
||||
# -> no unit -> parse everything as ingredient
|
||||
ingredient, note = self.parse_ingredient(tokens)
|
||||
# -> no unit -> parse everything as food
|
||||
food, note = self.parse_food(tokens)
|
||||
except ValueError:
|
||||
ingredient = ' '.join(tokens[1:])
|
||||
food = ' '.join(tokens[1:])
|
||||
|
||||
if unit_note not in note:
|
||||
note += ' ' + unit_note
|
||||
return amount, self.apply_unit_automation(unit.strip()), self.apply_food_automation(ingredient.strip()), note.strip()
|
||||
|
||||
if unit:
|
||||
unit = self.apply_unit_automation(unit.strip())
|
||||
|
||||
food = self.apply_food_automation(food.strip())
|
||||
if len(food) > Food._meta.get_field('name').max_length: # test if food name is to long
|
||||
# try splitting it at a space and taking only the first arg
|
||||
if len(food.split()) > 1 and len(food.split()[0]) < Food._meta.get_field('name').max_length:
|
||||
note = ' '.join(food.split()[1:]) + ' ' + note
|
||||
food = food.split()[0]
|
||||
else:
|
||||
note = food + ' ' + note
|
||||
food = food[:Food._meta.get_field('name').max_length]
|
||||
|
||||
if len(food.strip()) == 0:
|
||||
raise ValueError(f'Error parsing string {ingredient}, food cannot be empty')
|
||||
|
||||
return amount, unit, food, note[:Ingredient._meta.get_field('note').max_length].strip()
|
||||
|
||||
@@ -73,9 +73,9 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
|
||||
class UrlizeExtension(markdown.Extension):
|
||||
""" Urlize Extension for Python-Markdown. """
|
||||
|
||||
def extendMarkdown(self, md, md_globals):
|
||||
def extendMarkdown(self, md):
|
||||
""" Replace autolink with UrlizePattern """
|
||||
md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
|
||||
md.inlinePatterns.register(UrlizePattern(URLIZE_RE, md), 'autolink', 120)
|
||||
|
||||
|
||||
def makeExtension(*args, **kwargs):
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
"""
|
||||
Source: https://djangosnippets.org/snippets/1703/
|
||||
"""
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasScope, TokenHasReadWriteScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
|
||||
from cookbook.models import ShareLink
|
||||
from cookbook.models import ShareLink, Recipe, UserSpace
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -30,11 +31,12 @@ def get_allowed_groups(groups_required):
|
||||
return groups_allowed
|
||||
|
||||
|
||||
def has_group_permission(user, groups):
|
||||
def has_group_permission(user, groups, no_cache=False):
|
||||
"""
|
||||
Tests if a given user is member of a certain group (or any higher group)
|
||||
Superusers always bypass permission checks.
|
||||
Unauthenticated users can't be member of any group thus always return false.
|
||||
:param no_cache: (optional) do not return cached results, always check agains DB
|
||||
:param user: django auth user object
|
||||
:param groups: list or tuple of groups the user should be checked for
|
||||
:return: True if user is in allowed groups, false otherwise
|
||||
@@ -42,10 +44,24 @@ def has_group_permission(user, groups):
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
groups_allowed = get_allowed_groups(groups)
|
||||
|
||||
CACHE_KEY = hash((inspect.stack()[0][3], (user.pk, user.username, user.email), groups_allowed))
|
||||
if not no_cache:
|
||||
cached_result = cache.get(CACHE_KEY, default=None)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
result = False
|
||||
print('running check', user, groups_allowed)
|
||||
if user.is_authenticated:
|
||||
if bool(user.groups.filter(name__in=groups_allowed)):
|
||||
return True
|
||||
return False
|
||||
if user_space := user.userspace_set.filter(active=True):
|
||||
if len(user_space) != 1:
|
||||
result = False # do not allow any group permission if more than one space is active, needs to be changed when simultaneous multi-space-tenancy is added
|
||||
elif bool(user_space.first().groups.filter(name__in=groups_allowed)):
|
||||
result = True
|
||||
|
||||
cache.set(CACHE_KEY, result, timeout=10)
|
||||
return result
|
||||
|
||||
|
||||
def is_object_owner(user, obj):
|
||||
@@ -53,7 +69,6 @@ def is_object_owner(user, obj):
|
||||
Tests if a given user is the owner of a given object
|
||||
test performed by checking user against the objects user
|
||||
and create_by field (if exists)
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of object, false otherwise
|
||||
@@ -66,11 +81,25 @@ def is_object_owner(user, obj):
|
||||
return False
|
||||
|
||||
|
||||
def is_space_owner(user, obj):
|
||||
"""
|
||||
Tests if a given user is the owner the space of a given object
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is owner of the objects space, false otherwise
|
||||
"""
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
try:
|
||||
return obj.get_space().get_owner() == user
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def is_object_shared(user, obj):
|
||||
"""
|
||||
Tests if a given user is shared for a given object
|
||||
test performed by checking user against the objects shared table
|
||||
superusers bypass all checks, unauthenticated users cannot own anything
|
||||
:param user django auth user object
|
||||
:param obj any object that should be tested
|
||||
:return: true if user is shared for object, false otherwise
|
||||
@@ -91,7 +120,7 @@ def share_link_valid(recipe, share):
|
||||
"""
|
||||
try:
|
||||
CACHE_KEY = f'recipe_share_{recipe.pk}_{share}'
|
||||
if c := caches['default'].get(CACHE_KEY, False):
|
||||
if c := cache.get(CACHE_KEY, False):
|
||||
return c
|
||||
|
||||
if link := ShareLink.objects.filter(recipe=recipe, uuid=share, abuse_blocked=False).first():
|
||||
@@ -99,7 +128,7 @@ def share_link_valid(recipe, share):
|
||||
return False
|
||||
link.request_count += 1
|
||||
link.save()
|
||||
caches['default'].set(CACHE_KEY, True, timeout=3)
|
||||
cache.set(CACHE_KEY, True, timeout=3)
|
||||
return True
|
||||
return False
|
||||
except ValidationError:
|
||||
@@ -166,7 +195,7 @@ class OwnerRequiredMixin(object):
|
||||
|
||||
try:
|
||||
obj = self.get_object()
|
||||
if obj.get_space() != request.space:
|
||||
if not request.user.userspace.filter(space=obj.get_space()).exists():
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You do not have the required permissions to view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('index'))
|
||||
@@ -184,7 +213,7 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
verifies user has ownership over object
|
||||
(either user or created_by or user is request user)
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!') # noqa: E501
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated
|
||||
@@ -193,6 +222,28 @@ class CustomIsOwner(permissions.BasePermission):
|
||||
return is_object_owner(request.user, obj)
|
||||
|
||||
|
||||
class CustomIsOwnerReadOnly(CustomIsOwner):
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return super().has_object_permission(request, view) and request.method in SAFE_METHODS
|
||||
|
||||
|
||||
class CustomIsSpaceOwner(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for django rest framework views
|
||||
verifies if the user is the owner of the space the object belongs to
|
||||
"""
|
||||
message = _('You cannot interact with this object as it is not owned by you!')
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.user.is_authenticated and request.space.created_by == request.user
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return is_space_owner(request.user, obj)
|
||||
|
||||
|
||||
# TODO function duplicate/too similar name
|
||||
class CustomIsShared(permissions.BasePermission):
|
||||
"""
|
||||
@@ -262,3 +313,125 @@ class CustomIsShare(permissions.BasePermission):
|
||||
if share:
|
||||
return share_link_valid(obj, share)
|
||||
return False
|
||||
|
||||
|
||||
class CustomRecipePermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for recipe api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a guest or a share link is given and the request is safe
|
||||
share = request.query_params.get('share', None)
|
||||
return has_group_permission(request.user, ['guest']) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
share = request.query_params.get('share', None)
|
||||
if share:
|
||||
return share_link_valid(obj, share)
|
||||
else:
|
||||
if obj.private:
|
||||
return ((obj.created_by == request.user) or (request.user in obj.shared.all())) and obj.space == request.space
|
||||
else:
|
||||
return has_group_permission(request.user, ['guest']) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for user api endpoint
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # a space filtered user list is visible for everyone
|
||||
return has_group_permission(request.user, ['guest'])
|
||||
|
||||
def has_object_permission(self, request, view, obj): # object write permissions are only available for user
|
||||
if request.method in SAFE_METHODS and 'pk' in view.kwargs and has_group_permission(request.user, ['guest']) and request.space in obj.userspace_set.all():
|
||||
return True
|
||||
elif request.user == obj:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class CustomTokenHasScope(TokenHasScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return request.user.is_authenticated
|
||||
|
||||
|
||||
class CustomTokenHasReadWriteScope(TokenHasReadWriteScope):
|
||||
"""
|
||||
Custom implementation of Django OAuth Toolkit TokenHasReadWriteScope class
|
||||
Only difference: if any other authentication method except OAuth2Authentication is used the scope check is ignored
|
||||
IMPORTANT: do not use this class without any other permission class as it will not check anything besides token scopes
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def above_space_limit(space): # TODO add file storage limit
|
||||
"""
|
||||
Test if the space has reached any limit (e.g. max recipes, users, ..)
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal any limit else false, message)
|
||||
"""
|
||||
r_limit, r_msg = above_space_recipe_limit(space)
|
||||
u_limit, u_msg = above_space_user_limit(space)
|
||||
return r_limit or u_limit, (r_msg + ' ' + u_msg).strip()
|
||||
|
||||
|
||||
def above_space_recipe_limit(space):
|
||||
"""
|
||||
Test if a space has reached its recipe limit
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal limit else false, message)
|
||||
"""
|
||||
limit = space.max_recipes != 0 and Recipe.objects.filter(space=space).count() >= space.max_recipes
|
||||
if limit:
|
||||
return True, _('You have reached the maximum number of recipes for your space.')
|
||||
return False, ''
|
||||
|
||||
|
||||
def above_space_user_limit(space):
|
||||
"""
|
||||
Test if a space has reached its user limit
|
||||
:param space: Space to test for limits
|
||||
:return: Tuple (True if above or equal limit else false, message)
|
||||
"""
|
||||
limit = space.max_users != 0 and UserSpace.objects.filter(space=space).count() > space.max_users
|
||||
if limit:
|
||||
return True, _('You have more users than allowed in your space.')
|
||||
return False, ''
|
||||
|
||||
|
||||
def switch_user_active_space(user, space):
|
||||
"""
|
||||
Switch the currently active space of a user by setting all spaces to inactive and activating the one passed
|
||||
:param user: user to change active space for
|
||||
:param space: space to activate user for
|
||||
:return user space object or none if not found/no permission
|
||||
"""
|
||||
try:
|
||||
us = UserSpace.objects.get(space=space, user=user)
|
||||
if not us.active:
|
||||
UserSpace.objects.filter(user=user).update(active=False)
|
||||
us.active = True
|
||||
us.save()
|
||||
return us
|
||||
else:
|
||||
return us
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
@@ -1,194 +1,191 @@
|
||||
import json
|
||||
import re
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import unquote
|
||||
# import json
|
||||
# import re
|
||||
# from json import JSONDecodeError
|
||||
# from urllib.parse import unquote
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import Tag
|
||||
from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
# from bs4 import BeautifulSoup
|
||||
# from bs4.element import Tag
|
||||
# from recipe_scrapers import scrape_html, scrape_me
|
||||
# from recipe_scrapers._exceptions import NoSchemaFoundInWildMode
|
||||
# from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
# from cookbook.helper import recipe_url_import as helper
|
||||
# from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
|
||||
|
||||
def get_recipe_from_source(text, url, request):
|
||||
def build_node(k, v):
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
return node
|
||||
# def get_recipe_from_source(text, url, request):
|
||||
# def build_node(k, v):
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# return node
|
||||
|
||||
def get_children_dict(children):
|
||||
kid_list = []
|
||||
for k, v in children.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
return kid_list
|
||||
# def get_children_dict(children):
|
||||
# kid_list = []
|
||||
# for k, v in children.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# return kid_list
|
||||
|
||||
def get_children_list(children):
|
||||
kid_list = []
|
||||
for kid in children:
|
||||
if type(kid) == list:
|
||||
node = {
|
||||
'name': "unknown list",
|
||||
'value': "unknown list",
|
||||
'children': get_children_list(kid)
|
||||
}
|
||||
kid_list.append(node)
|
||||
elif type(kid) == dict:
|
||||
for k, v in kid.items():
|
||||
kid_list.append(build_node(k, v))
|
||||
else:
|
||||
kid_list.append({
|
||||
'name': normalize_string(str(kid)),
|
||||
'value': normalize_string(str(kid))
|
||||
})
|
||||
return kid_list
|
||||
# def get_children_list(children):
|
||||
# kid_list = []
|
||||
# for kid in children:
|
||||
# if type(kid) == list:
|
||||
# node = {
|
||||
# 'name': "unknown list",
|
||||
# 'value': "unknown list",
|
||||
# 'children': get_children_list(kid)
|
||||
# }
|
||||
# kid_list.append(node)
|
||||
# elif type(kid) == dict:
|
||||
# for k, v in kid.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# else:
|
||||
# kid_list.append({
|
||||
# 'name': normalize_string(str(kid)),
|
||||
# 'value': normalize_string(str(kid))
|
||||
# })
|
||||
# return kid_list
|
||||
|
||||
recipe_json = {
|
||||
'name': '',
|
||||
'url': '',
|
||||
'description': '',
|
||||
'image': '',
|
||||
'keywords': [],
|
||||
'recipeIngredient': [],
|
||||
'recipeInstructions': '',
|
||||
'servings': '',
|
||||
'prepTime': '',
|
||||
'cookTime': ''
|
||||
}
|
||||
recipe_tree = []
|
||||
parse_list = []
|
||||
html_data = []
|
||||
images = []
|
||||
text = unquote(text)
|
||||
# recipe_tree = []
|
||||
# parse_list = []
|
||||
# soup = BeautifulSoup(text, "html.parser")
|
||||
# html_data = get_from_html(soup)
|
||||
# images = get_images_from_source(soup, url)
|
||||
# text = unquote(text)
|
||||
# scrape = None
|
||||
|
||||
try:
|
||||
parse_list.append(remove_graph(json.loads(text)))
|
||||
if not url and 'url' in parse_list[0]:
|
||||
url = parse_list[0]['url']
|
||||
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
# if url and not text:
|
||||
# try:
|
||||
# scrape = scrape_me(url_path=url, wild_mode=True)
|
||||
# except(NoSchemaFoundInWildMode):
|
||||
# pass
|
||||
|
||||
except JSONDecodeError:
|
||||
soup = BeautifulSoup(text, "html.parser")
|
||||
html_data = get_from_html(soup)
|
||||
images += get_images_from_source(soup, url)
|
||||
for el in soup.find_all('script', type='application/ld+json'):
|
||||
el = remove_graph(el)
|
||||
if not url and 'url' in el:
|
||||
url = el['url']
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
for el in soup.find_all(type='application/json'):
|
||||
el = remove_graph(el)
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
# if not scrape:
|
||||
# try:
|
||||
# parse_list.append(remove_graph(json.loads(text)))
|
||||
# if not url and 'url' in parse_list[0]:
|
||||
# url = parse_list[0]['url']
|
||||
# scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
|
||||
recipe_json = helper.get_from_scraper(scrape, request)
|
||||
# except JSONDecodeError:
|
||||
# for el in soup.find_all('script', type='application/ld+json'):
|
||||
# el = remove_graph(el)
|
||||
# if not url and 'url' in el:
|
||||
# url = el['url']
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# for el in soup.find_all(type='application/json'):
|
||||
# el = remove_graph(el)
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# scrape = text_scraper(text, url=url)
|
||||
|
||||
for el in parse_list:
|
||||
temp_tree = []
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
except TypeError:
|
||||
continue
|
||||
# recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
for k, v in el.items():
|
||||
if isinstance(v, dict):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_dict(v)
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
node = {
|
||||
'name': k,
|
||||
'value': k,
|
||||
'children': get_children_list(v)
|
||||
}
|
||||
else:
|
||||
node = {
|
||||
'name': k + ": " + normalize_string(str(v)),
|
||||
'value': normalize_string(str(v))
|
||||
}
|
||||
temp_tree.append(node)
|
||||
# # TODO: DEPRECATE recipe_tree & html_data. first validate it isn't used anywhere
|
||||
# for el in parse_list:
|
||||
# temp_tree = []
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# except TypeError:
|
||||
# continue
|
||||
|
||||
if '@type' in el and el['@type'] == 'Recipe':
|
||||
recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
else:
|
||||
recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
# for k, v in el.items():
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# temp_tree.append(node)
|
||||
|
||||
return recipe_json, recipe_tree, html_data, images
|
||||
# if '@type' in el and el['@type'] == 'Recipe':
|
||||
# recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
# else:
|
||||
# recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
|
||||
# return recipe_json, recipe_tree, html_data, images
|
||||
|
||||
|
||||
def get_from_html(soup):
|
||||
INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
html = []
|
||||
for s in soup.strings:
|
||||
if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
html.append(s)
|
||||
return html
|
||||
# def get_from_html(soup):
|
||||
# INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
# html = []
|
||||
# for s in soup.strings:
|
||||
# if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
# html.append(s)
|
||||
# return html
|
||||
|
||||
|
||||
def get_images_from_source(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
# def get_images_from_source(soup, url):
|
||||
# sources = ['src', 'srcset', 'data-src']
|
||||
# images = []
|
||||
# img_tags = soup.find_all('img')
|
||||
# if url:
|
||||
# site = get_host_name(url)
|
||||
# prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
# urls = []
|
||||
# for img in img_tags:
|
||||
# for src in sources:
|
||||
# try:
|
||||
# urls.append(img[src])
|
||||
# except KeyError:
|
||||
# pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
# for u in urls:
|
||||
# u = u.split('?')[0]
|
||||
# filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
# if filename:
|
||||
# if (('http' not in u) and (url)):
|
||||
# # sometimes an image source can be relative
|
||||
# # if it is provide the base url
|
||||
# u = '{}://{}{}'.format(prot, site, u)
|
||||
# if 'http' in u:
|
||||
# images.append(u)
|
||||
# return images
|
||||
|
||||
|
||||
def remove_graph(el):
|
||||
# recipes type might be wrapped in @graph type
|
||||
if isinstance(el, Tag):
|
||||
try:
|
||||
el = json.loads(el.string)
|
||||
if '@graph' in el:
|
||||
for x in el['@graph']:
|
||||
if '@type' in x and x['@type'] == 'Recipe':
|
||||
el = x
|
||||
except (TypeError, JSONDecodeError):
|
||||
pass
|
||||
return el
|
||||
# def remove_graph(el):
|
||||
# # recipes type might be wrapped in @graph type
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# if '@graph' in el:
|
||||
# for x in el['@graph']:
|
||||
# if '@type' in x and x['@type'] == 'Recipe':
|
||||
# el = x
|
||||
# except (TypeError, JSONDecodeError):
|
||||
# pass
|
||||
# return el
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,27 @@
|
||||
import random
|
||||
import re
|
||||
from html import unescape
|
||||
from unicodedata import decomposition
|
||||
|
||||
from django.utils.dateparse import parse_duration
|
||||
from django.utils.translation import gettext as _
|
||||
from isodate import parse_duration as iso_parse_duration
|
||||
from isodate.isoerror import ISO8601Error
|
||||
from recipe_scrapers._utils import get_minutes
|
||||
from pytube import YouTube
|
||||
from recipe_scrapers._utils import get_host_name, get_minutes
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.models import Keyword
|
||||
|
||||
# from recipe_scrapers._utils import get_minutes ## temporary until/unless upstream incorporates get_minutes() PR
|
||||
|
||||
|
||||
def get_from_scraper(scrape, request):
|
||||
# converting the scrape_me object to the existing json format based on ld+json
|
||||
recipe_json = {}
|
||||
try:
|
||||
recipe_json['name'] = parse_name(scrape.title() or None)
|
||||
recipe_json['name'] = parse_name(scrape.title()[:128] or None)
|
||||
except Exception:
|
||||
recipe_json['name'] = None
|
||||
if not recipe_json['name']:
|
||||
@@ -26,43 +31,46 @@ def get_from_scraper(scrape, request):
|
||||
recipe_json['name'] = ''
|
||||
|
||||
try:
|
||||
description = scrape.schema.data.get("description") or ''
|
||||
description = scrape.description() or None
|
||||
except Exception:
|
||||
description = ''
|
||||
|
||||
recipe_json['description'] = parse_description(description)
|
||||
|
||||
try:
|
||||
servings = scrape.yields() or None
|
||||
except Exception:
|
||||
servings = None
|
||||
if not servings:
|
||||
description = None
|
||||
if not description:
|
||||
try:
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
description = scrape.schema.data.get("description") or ''
|
||||
except Exception:
|
||||
servings = 1
|
||||
if type(servings) != int:
|
||||
description = ''
|
||||
|
||||
recipe_json['internal'] = True
|
||||
|
||||
try:
|
||||
servings = scrape.schema.data.get('recipeYield') or 1 # dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
except Exception:
|
||||
servings = 1
|
||||
|
||||
recipe_json['servings'] = parse_servings(servings)
|
||||
recipe_json['servings_text'] = parse_servings_text(servings)
|
||||
|
||||
try:
|
||||
recipe_json['working_time'] = get_minutes(scrape.prep_time()) or 0
|
||||
except Exception:
|
||||
try:
|
||||
servings = int(re.findall(r'\b\d+\b', servings)[0])
|
||||
recipe_json['working_time'] = get_minutes(scrape.schema.data.get("prepTime")) or 0
|
||||
except Exception:
|
||||
servings = 1
|
||||
recipe_json['servings'] = max(servings, 1)
|
||||
|
||||
recipe_json['working_time'] = 0
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.schema.data.get("prepTime")) or 0
|
||||
recipe_json['waiting_time'] = get_minutes(scrape.cook_time()) or 0
|
||||
except Exception:
|
||||
recipe_json['prepTime'] = 0
|
||||
try:
|
||||
recipe_json['cookTime'] = get_minutes(scrape.schema.data.get("cookTime")) or 0
|
||||
except Exception:
|
||||
recipe_json['cookTime'] = 0
|
||||
|
||||
if recipe_json['cookTime'] + recipe_json['prepTime'] == 0:
|
||||
try:
|
||||
recipe_json['prepTime'] = get_minutes(scrape.total_time()) or 0
|
||||
recipe_json['waiting_time'] = get_minutes(scrape.schema.data.get("cookTime")) or 0
|
||||
except Exception:
|
||||
recipe_json['waiting_time'] = 0
|
||||
|
||||
if recipe_json['working_time'] + recipe_json['waiting_time'] == 0:
|
||||
try:
|
||||
recipe_json['working_time'] = get_minutes(scrape.total_time()) or 0
|
||||
except Exception:
|
||||
try:
|
||||
get_minutes(scrape.schema.data.get("totalTime")) or 0
|
||||
recipe_json['working_time'] = get_minutes(scrape.schema.data.get("totalTime")) or 0
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -83,15 +91,38 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if scrape.schema.data.get('recipeCategory'):
|
||||
keywords += listify_keywords(scrape.schema.data.get("recipeCategory"))
|
||||
if scrape.category():
|
||||
keywords += listify_keywords(scrape.category())
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if scrape.schema.data.get('recipeCategory'):
|
||||
keywords += listify_keywords(scrape.schema.data.get("recipeCategory"))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if scrape.schema.data.get('recipeCuisine'):
|
||||
keywords += listify_keywords(scrape.schema.data.get("recipeCuisine"))
|
||||
if scrape.cuisine():
|
||||
keywords += listify_keywords(scrape.cuisine())
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if scrape.schema.data.get('recipeCuisine'):
|
||||
keywords += listify_keywords(scrape.schema.data.get("recipeCuisine"))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
|
||||
except AttributeError:
|
||||
@@ -99,55 +130,84 @@ def get_from_scraper(scrape, request):
|
||||
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
|
||||
ingredients = []
|
||||
recipe_json['steps'] = []
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
except Exception:
|
||||
pass
|
||||
if len(recipe_json['steps']) == 0:
|
||||
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
|
||||
|
||||
if len(parse_description(description)) > 256: # split at 256 as long descriptions dont look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f'*{parse_description(description)}* \n\n' + recipe_json['steps'][0]['instruction']
|
||||
else:
|
||||
recipe_json['description'] = parse_description(description)[:512]
|
||||
|
||||
try:
|
||||
for x in scrape.ingredients():
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
ingredients.append(
|
||||
{
|
||||
'amount': amount,
|
||||
'unit': {
|
||||
'text': unit,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': ingredient,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': note,
|
||||
'original': x
|
||||
}
|
||||
)
|
||||
ingredient = {
|
||||
'amount': amount,
|
||||
'food': {
|
||||
'name': ingredient,
|
||||
},
|
||||
'unit': None,
|
||||
'note': note,
|
||||
'original_text': x
|
||||
}
|
||||
if unit:
|
||||
ingredient['unit'] = {'name': unit, }
|
||||
recipe_json['steps'][0]['ingredients'].append(ingredient)
|
||||
except Exception:
|
||||
ingredients.append(
|
||||
recipe_json['steps'][0]['ingredients'].append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': {
|
||||
'text': '',
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': x,
|
||||
'id': random.randrange(10000, 99999)
|
||||
'unit': None,
|
||||
'food': {
|
||||
'name': x,
|
||||
},
|
||||
'note': '',
|
||||
'original': x
|
||||
'original_text': x
|
||||
}
|
||||
)
|
||||
recipe_json['recipeIngredient'] = ingredients
|
||||
except Exception:
|
||||
recipe_json['recipeIngredient'] = ingredients
|
||||
pass
|
||||
|
||||
return recipe_json
|
||||
|
||||
|
||||
def get_from_youtube_scraper(url, request):
|
||||
"""A YouTube Information Scraper."""
|
||||
kw, created = Keyword.objects.get_or_create(name='YouTube', space=request.space)
|
||||
default_recipe_json = {
|
||||
'name': '',
|
||||
'internal': True,
|
||||
'description': '',
|
||||
'servings': 1,
|
||||
'working_time': 0,
|
||||
'waiting_time': 0,
|
||||
'image': "",
|
||||
'keywords': [{'name': kw.name, 'label': kw.name, 'id': kw.pk}],
|
||||
'source_url': url,
|
||||
'steps': [
|
||||
{
|
||||
'ingredients': [],
|
||||
'instruction': ''
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
try:
|
||||
recipe_json['recipeInstructions'] = parse_instructions(scrape.instructions())
|
||||
video = YouTube(url=url)
|
||||
default_recipe_json['name'] = video.title
|
||||
default_recipe_json['image'] = video.thumbnail_url
|
||||
default_recipe_json['steps'][0]['instruction'] = video.description
|
||||
except Exception:
|
||||
recipe_json['recipeInstructions'] = ""
|
||||
pass
|
||||
|
||||
if scrape.url:
|
||||
recipe_json['url'] = scrape.url
|
||||
recipe_json['recipeInstructions'] += "\n\nImported from " + scrape.url
|
||||
return recipe_json
|
||||
return default_recipe_json
|
||||
|
||||
|
||||
def parse_name(name):
|
||||
@@ -159,102 +219,46 @@ def parse_name(name):
|
||||
return normalize_string(name)
|
||||
|
||||
|
||||
def parse_ingredients(ingredients):
|
||||
# some pages have comma separated ingredients in a single array entry
|
||||
try:
|
||||
if type(ingredients[0]) == dict:
|
||||
return ingredients
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
|
||||
if (len(ingredients) == 1 and type(ingredients) == list):
|
||||
ingredients = ingredients[0].split(',')
|
||||
elif type(ingredients) == str:
|
||||
ingredients = ingredients.split(',')
|
||||
|
||||
for x in ingredients:
|
||||
if '\n' in x:
|
||||
ingredients.remove(x)
|
||||
for i in x.split('\n'):
|
||||
ingredients.insert(0, i)
|
||||
|
||||
ingredient_list = []
|
||||
|
||||
for x in ingredients:
|
||||
if x.replace(' ', '') != '':
|
||||
x = x.replace('½', "0.5").replace('¼', "0.25").replace('¾', "0.75")
|
||||
try:
|
||||
amount, unit, ingredient, note = parse_single_ingredient(x)
|
||||
if ingredient:
|
||||
ingredient_list.append(
|
||||
{
|
||||
'amount': amount,
|
||||
'unit': {
|
||||
'text': unit,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': ingredient,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': note,
|
||||
'original': x
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
ingredient_list.append(
|
||||
{
|
||||
'amount': 0,
|
||||
'unit': {
|
||||
'text': '',
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'ingredient': {
|
||||
'text': x,
|
||||
'id': random.randrange(10000, 99999)
|
||||
},
|
||||
'note': '',
|
||||
'original': x
|
||||
}
|
||||
)
|
||||
|
||||
ingredients = ingredient_list
|
||||
else:
|
||||
ingredients = []
|
||||
return ingredients
|
||||
|
||||
|
||||
def parse_description(description):
|
||||
return normalize_string(description)
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
instruction_text = ''
|
||||
|
||||
# flatten instructions if they are in a list
|
||||
if type(instructions) == list:
|
||||
for i in instructions:
|
||||
if type(i) == str:
|
||||
instruction_text += i
|
||||
else:
|
||||
if 'text' in i:
|
||||
instruction_text += i['text'] + '\n\n'
|
||||
elif 'itemListElement' in i:
|
||||
for ile in i['itemListElement']:
|
||||
if type(ile) == str:
|
||||
instruction_text += ile + '\n\n'
|
||||
elif 'text' in ile:
|
||||
instruction_text += ile['text'] + '\n\n'
|
||||
else:
|
||||
instruction_text += str(i)
|
||||
instructions = instruction_text
|
||||
|
||||
normalized_string = normalize_string(instructions)
|
||||
def clean_instruction_string(instruction):
|
||||
normalized_string = normalize_string(instruction)
|
||||
normalized_string = normalized_string.replace('\n', ' \n')
|
||||
normalized_string = normalized_string.replace(' \n \n', '\n\n')
|
||||
return normalized_string
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
"""
|
||||
Convert arbitrary instructions object from website import and turn it into a flat list of strings
|
||||
:param instructions: any instructions object from import
|
||||
:return: list of strings (from one to many elements depending on website)
|
||||
"""
|
||||
instruction_list = []
|
||||
|
||||
if type(instructions) == list:
|
||||
for i in instructions:
|
||||
if type(i) == str:
|
||||
instruction_list.append(clean_instruction_string(i))
|
||||
else:
|
||||
if 'text' in i:
|
||||
instruction_list.append(clean_instruction_string(i['text']))
|
||||
elif 'itemListElement' in i:
|
||||
for ile in i['itemListElement']:
|
||||
if type(ile) == str:
|
||||
instruction_list.append(clean_instruction_string(ile))
|
||||
elif 'text' in ile:
|
||||
instruction_list.append(clean_instruction_string(ile['text']))
|
||||
else:
|
||||
instruction_list.append(clean_instruction_string(str(i)))
|
||||
else:
|
||||
instruction_list.append(clean_instruction_string(instructions))
|
||||
|
||||
return instruction_list
|
||||
|
||||
|
||||
def parse_image(image):
|
||||
# check if list of images is returned, take first if so
|
||||
if not image:
|
||||
@@ -289,40 +293,31 @@ def parse_servings(servings):
|
||||
return servings
|
||||
|
||||
|
||||
def parse_cooktime(cooktime):
|
||||
if type(cooktime) not in [int, float]:
|
||||
def parse_servings_text(servings):
|
||||
if type(servings) == str:
|
||||
try:
|
||||
cooktime = float(re.search(r'\d+', cooktime).group())
|
||||
servings = re.sub("\d+", '', servings).strip()
|
||||
except Exception:
|
||||
servings = ''
|
||||
return str(servings)[:32]
|
||||
|
||||
|
||||
def parse_time(recipe_time):
|
||||
if type(recipe_time) not in [int, float]:
|
||||
try:
|
||||
recipe_time = float(re.search(r'\d+', recipe_time).group())
|
||||
except (ValueError, AttributeError):
|
||||
try:
|
||||
cooktime = round(iso_parse_duration(cooktime).seconds / 60)
|
||||
recipe_time = round(iso_parse_duration(recipe_time).seconds / 60)
|
||||
except ISO8601Error:
|
||||
try:
|
||||
if (type(cooktime) == list and len(cooktime) > 0):
|
||||
cooktime = cooktime[0]
|
||||
cooktime = round(parse_duration(cooktime).seconds / 60)
|
||||
if (type(recipe_time) == list and len(recipe_time) > 0):
|
||||
recipe_time = recipe_time[0]
|
||||
recipe_time = round(parse_duration(recipe_time).seconds / 60)
|
||||
except AttributeError:
|
||||
cooktime = 0
|
||||
recipe_time = 0
|
||||
|
||||
return cooktime
|
||||
|
||||
|
||||
def parse_preptime(preptime):
|
||||
if type(preptime) not in [int, float]:
|
||||
try:
|
||||
preptime = float(re.search(r'\d+', preptime).group())
|
||||
except ValueError:
|
||||
try:
|
||||
preptime = round(iso_parse_duration(preptime).seconds / 60)
|
||||
except ISO8601Error:
|
||||
try:
|
||||
if (type(preptime) == list and len(preptime) > 0):
|
||||
preptime = preptime[0]
|
||||
preptime = round(parse_duration(preptime).seconds / 60)
|
||||
except AttributeError:
|
||||
preptime = 0
|
||||
|
||||
return preptime
|
||||
return recipe_time
|
||||
|
||||
|
||||
def parse_keywords(keyword_json, space):
|
||||
@@ -332,9 +327,9 @@ def parse_keywords(keyword_json, space):
|
||||
kw = normalize_string(kw)
|
||||
if len(kw) != 0:
|
||||
if k := Keyword.objects.filter(name=kw, space=space).first():
|
||||
keywords.append({'id': str(k.id), 'text': str(k)})
|
||||
keywords.append({'label': str(k), 'name': k.name, 'id': k.id})
|
||||
else:
|
||||
keywords.append({'id': random.randrange(1111111, 9999999, 1), 'text': kw})
|
||||
keywords.append({'label': kw, 'name': kw})
|
||||
|
||||
return keywords
|
||||
|
||||
@@ -372,3 +367,32 @@ def iso_duration_to_minutes(string):
|
||||
string
|
||||
).groupdict()
|
||||
return int(match['days'] or 0) * 24 * 60 + int(match['hours'] or 0) * 60 + int(match['minutes'] or 0)
|
||||
|
||||
|
||||
def get_images_from_soup(soup, url):
|
||||
sources = ['src', 'srcset', 'data-src']
|
||||
images = []
|
||||
img_tags = soup.find_all('img')
|
||||
if url:
|
||||
site = get_host_name(url)
|
||||
prot = url.split(':')[0]
|
||||
|
||||
urls = []
|
||||
for img in img_tags:
|
||||
for src in sources:
|
||||
try:
|
||||
urls.append(img[src])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for u in urls:
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
if 'http' in u:
|
||||
images.append(u)
|
||||
return images
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
@@ -14,6 +15,12 @@ class ScopeMiddleware:
|
||||
|
||||
def __call__(self, request):
|
||||
prefix = settings.JS_REVERSE_SCRIPT_PREFIX or ''
|
||||
|
||||
# need to disable scopes for writing requests into userpref and enable for loading ?
|
||||
if request.path.startswith(prefix + '/api/user-preference/'):
|
||||
with scopes_disabled():
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.is_authenticated:
|
||||
|
||||
if request.path.startswith(prefix + '/admin/'):
|
||||
@@ -26,24 +33,35 @@ class ScopeMiddleware:
|
||||
if request.path.startswith(prefix + '/accounts/'):
|
||||
return self.get_response(request)
|
||||
|
||||
with scopes_disabled():
|
||||
if request.user.userpreference.space is None and not reverse('account_logout') in request.path:
|
||||
return views.no_space(request)
|
||||
if request.path.startswith(prefix + '/switch-space/'):
|
||||
return self.get_response(request)
|
||||
|
||||
if request.user.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
with scopes_disabled():
|
||||
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.space_overview(request)
|
||||
|
||||
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
|
||||
user_space = request.user.userspace_set.filter(active=True).first()
|
||||
|
||||
if not user_space:
|
||||
return views.space_overview(request)
|
||||
|
||||
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.no_groups(request)
|
||||
|
||||
request.space = request.user.userpreference.space
|
||||
request.space = user_space.space
|
||||
# with scopes_disabled():
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
else:
|
||||
if request.path.startswith(prefix + '/api/'):
|
||||
try:
|
||||
if auth := TokenAuthentication().authenticate(request):
|
||||
request.space = auth[0].userpreference.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
if auth := OAuth2Authentication().authenticate(request):
|
||||
user_space = auth[0].userspace_set.filter(active=True).first()
|
||||
if user_space:
|
||||
request.space = user_space.space
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
except AuthenticationFailed:
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from json import JSONDecodeError
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from recipe_scrapers import SCRAPERS, get_host_name
|
||||
from recipe_scrapers._factory import SchemaScraperFactory
|
||||
from recipe_scrapers._schemaorg import SchemaOrg
|
||||
@@ -26,17 +27,17 @@ def text_scraper(text, url=None):
|
||||
class TextScraper(scraper_class):
|
||||
def __init__(
|
||||
self,
|
||||
page_data,
|
||||
url=None
|
||||
html=None,
|
||||
url=None,
|
||||
):
|
||||
self.wild_mode = False
|
||||
self.meta_http_equiv = False
|
||||
self.soup = BeautifulSoup(page_data, "html.parser")
|
||||
self.soup = BeautifulSoup(html, "html.parser")
|
||||
self.url = url
|
||||
self.recipe = None
|
||||
try:
|
||||
self.schema = SchemaOrg(page_data)
|
||||
self.schema = SchemaOrg(html)
|
||||
except (JSONDecodeError, AttributeError):
|
||||
pass
|
||||
|
||||
return TextScraper(text, url)
|
||||
return TextScraper(url=url, html=text)
|
||||
|
||||
@@ -35,7 +35,7 @@ def shopping_helper(qs, request):
|
||||
qs = qs.filter(Q(checked=False) | Q(completed_at__gte=week_ago))
|
||||
supermarket_order = ['checked'] + supermarket_order
|
||||
|
||||
return qs.order_by(*supermarket_order).select_related('unit', 'food', 'ingredient', 'created_by', 'list_recipe', 'list_recipe__mealplan', 'list_recipe__recipe')
|
||||
return qs.distinct().order_by(*supermarket_order).select_related('unit', 'food', 'ingredient', 'created_by', 'list_recipe', 'list_recipe__mealplan', 'list_recipe__recipe')
|
||||
|
||||
|
||||
class RecipeShoppingEditor():
|
||||
@@ -65,9 +65,13 @@ class RecipeShoppingEditor():
|
||||
except (ValueError, TypeError):
|
||||
self.servings = getattr(self._shopping_list_recipe, 'servings', None) or getattr(self.mealplan, 'servings', None) or getattr(self.recipe, 'servings', None)
|
||||
|
||||
@property
|
||||
def _recipe_servings(self):
|
||||
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings', None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
|
||||
|
||||
@property
|
||||
def _servings_factor(self):
|
||||
return self.servings / self.recipe.servings
|
||||
return Decimal(self.servings)/Decimal(self._recipe_servings)
|
||||
|
||||
@property
|
||||
def _shared_users(self):
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
from gettext import gettext as _
|
||||
|
||||
import bleach
|
||||
import markdown as md
|
||||
from bleach_allowlist import markdown_attrs, markdown_tags
|
||||
from jinja2 import Template, TemplateSyntaxError, UndefinedError
|
||||
from markdown.extensions.tables import TableExtension
|
||||
|
||||
from cookbook.helper.mdx_attributes import MarkdownFormatExtension
|
||||
from cookbook.helper.mdx_urlize import UrlizeExtension
|
||||
from jinja2 import Template, TemplateSyntaxError, UndefinedError
|
||||
from gettext import gettext as _
|
||||
from markdown.extensions.tables import TableExtension
|
||||
|
||||
|
||||
class IngredientObject(object):
|
||||
amount = ""
|
||||
@@ -36,7 +39,7 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
instructions = step.instruction
|
||||
|
||||
tags = markdown_tags + [
|
||||
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead'
|
||||
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead', 'img'
|
||||
]
|
||||
parsed_md = md.markdown(
|
||||
instructions,
|
||||
@@ -45,7 +48,7 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
UrlizeExtension(), MarkdownFormatExtension()
|
||||
]
|
||||
)
|
||||
markdown_attrs['*'] = markdown_attrs['*'] + ['class']
|
||||
markdown_attrs['*'] = markdown_attrs['*'] + ['class', 'width', 'height']
|
||||
|
||||
instructions = bleach.clean(parsed_md, tags, markdown_attrs)
|
||||
|
||||
|
||||
@@ -2,14 +2,14 @@ import re
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class ChefTap(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
print("testing", zip_info_object.filename)
|
||||
return re.match(r'^cheftap_export/([A-Za-z\d\w\s-])+.txt$', zip_info_object.filename) or re.match(r'^([A-Za-z\d\w\s-])+.txt$', zip_info_object.filename)
|
||||
return re.match(r'^cheftap_export/([A-Za-z\d\s\-_()\[\]\u00C0-\u017F])+.txt$', zip_info_object.filename) or re.match(r'^([A-Za-z\d\s\-_()\[\]\u00C0-\u017F])+.txt$', zip_info_object.filename)
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
source_url = ''
|
||||
@@ -45,11 +45,11 @@ class ChefTap(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@ from zipfile import ZipFile
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Chowdown(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
print("testing", zip_info_object.filename)
|
||||
return re.match(r'^(_)*recipes/([A-Za-z\d\s-])+.md$', zip_info_object.filename)
|
||||
return re.match(r'^(_)*recipes/([A-Za-z\d\s\-_()\[\]\u00C0-\u017F])+.md$', zip_info_object.filename)
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
ingredient_mode = False
|
||||
@@ -60,12 +60,13 @@ class Chowdown(Integration):
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
for f in self.files:
|
||||
|
||||
@@ -2,17 +2,19 @@ import base64
|
||||
import gzip
|
||||
import json
|
||||
import re
|
||||
from gettext import gettext as _
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
import yaml
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.helper.recipe_url_import import (get_from_scraper, get_images_from_soup,
|
||||
iso_duration_to_minutes)
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from gettext import gettext as _
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class CookBookApp(Integration):
|
||||
@@ -23,7 +25,10 @@ class CookBookApp(Integration):
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_html = file.getvalue().decode("utf-8")
|
||||
|
||||
recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
|
||||
# recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
|
||||
scrape = text_scraper(text=recipe_html)
|
||||
recipe_json = get_from_scraper(scrape, self.request)
|
||||
images = list(dict.fromkeys(get_images_from_soup(scrape.soup, None)))
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_json['name'].strip(),
|
||||
@@ -41,7 +46,8 @@ class CookBookApp(Integration):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction=recipe_json['recipeInstructions'], space=self.request.space, )
|
||||
# assuming import files only contain single step
|
||||
step = Step.objects.create(instruction=recipe_json['steps'][0]['instruction'], space=self.request.space, )
|
||||
|
||||
if 'nutrition' in recipe_json:
|
||||
step.instruction = step.instruction + '\n\n' + recipe_json['nutrition']
|
||||
@@ -50,17 +56,21 @@ class CookBookApp(Integration):
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipeIngredient']:
|
||||
f = ingredient_parser.get_food(ingredient['ingredient']['text'])
|
||||
u = ingredient_parser.get_unit(ingredient['unit']['text'])
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=ingredient['amount'], note=ingredient['note'], space=self.request.space,
|
||||
))
|
||||
for ingredient in recipe_json['steps'][0]['ingredients']:
|
||||
f = ingredient_parser.get_food(ingredient['food']['name'])
|
||||
u = None
|
||||
if unit := ingredient.get('unit', None):
|
||||
u = ingredient_parser.get_unit(unit.get('name', None))
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
|
||||
))
|
||||
|
||||
if len(images) > 0:
|
||||
try:
|
||||
response = requests.get(images[0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = images[0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
|
||||
88
cookbook/integration/cookmate.py
Normal file
88
cookbook/integration/cookmate.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import base64
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
from gettext import gettext as _
|
||||
|
||||
import requests
|
||||
import validators
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_time, parse_servings_text
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Cookmate(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
return zip_info_object.filename.endswith('.xml')
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_xml = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_xml.find('title').text.strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if recipe_xml.find('preptime') is not None and recipe_xml.find('preptime').text is not None:
|
||||
recipe.working_time = parse_time(recipe_xml.find('preptime').text.strip())
|
||||
|
||||
if recipe_xml.find('cooktime') is not None and recipe_xml.find('cooktime').text is not None:
|
||||
recipe.waiting_time = parse_time(recipe_xml.find('cooktime').text.strip())
|
||||
|
||||
if recipe_xml.find('quantity') is not None and recipe_xml.find('quantity').text is not None:
|
||||
recipe.servings = parse_servings(recipe_xml.find('quantity').text.strip())
|
||||
recipe.servings_text = parse_servings_text(recipe_xml.find('quantity').text.strip())
|
||||
|
||||
if recipe_xml.find('url') is not None and recipe_xml.find('url').text is not None:
|
||||
recipe.source_url = recipe_xml.find('url').text.strip()
|
||||
|
||||
if recipe_xml.find('description') is not None: # description is a list of <li>'s with text
|
||||
if len(recipe_xml.find('description')) > 0:
|
||||
recipe.description = recipe_xml.find('description')[0].text[:512]
|
||||
|
||||
if recipe_text := recipe_xml.find('recipetext'):
|
||||
for step in recipe_text.getchildren():
|
||||
if step.text:
|
||||
step = Step.objects.create(
|
||||
instruction=step.text.strip(), space=self.request.space,
|
||||
)
|
||||
recipe.steps.add(step)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
if recipe_ingredients := recipe_xml.find('ingredient'):
|
||||
ingredient_step = recipe.steps.first()
|
||||
if ingredient_step is None:
|
||||
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
|
||||
|
||||
for ingredient in recipe_ingredients.getchildren():
|
||||
if ingredient.text:
|
||||
if ingredient.text.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
ingredient_step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space,
|
||||
))
|
||||
|
||||
if recipe_xml.find('imageurl') is not None:
|
||||
try:
|
||||
url = recipe_xml.find('imageurl').text.strip()
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
recipe.save()
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -2,13 +2,13 @@ import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_html_import import get_recipe_from_source
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes, parse_servings
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
from recipes.settings import DEBUG
|
||||
|
||||
|
||||
@@ -21,50 +21,89 @@ class CopyMeThat(Integration):
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
# 'file' comes is as a beautifulsoup object
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip(), created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
try:
|
||||
source = file.find("a", {"id": "original_link"}).text
|
||||
except AttributeError:
|
||||
source = None
|
||||
|
||||
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip()[:128], source_url=source, created_by=self.request.user, internal=True, space=self.request.space, )
|
||||
|
||||
for category in file.find_all("span", {"class": "recipeCategory"}):
|
||||
keyword, created = Keyword.objects.get_or_create(name=category.text, space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
|
||||
try:
|
||||
recipe.servings = parse_servings(file.find("a", {"id": "recipeYield"}).text.strip())
|
||||
recipe.working_time = iso_duration_to_minutes(file.find("span", {"meta": "prepTime"}).text.strip())
|
||||
recipe.waiting_time = iso_duration_to_minutes(file.find("span", {"meta": "cookTime"}).text.strip())
|
||||
recipe.save()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
if len(file.find("span", {"id": "starred"}).text.strip()) > 0:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('Favorite'))[0])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
if len(file.find("span", {"id": "made_this"}).text.strip()) > 0:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('I made this'))[0])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file.find_all("li", {"class": "recipeIngredient"}):
|
||||
if ingredient.text == "":
|
||||
continue
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find_all("li", {"class": "instruction"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
ingredients = file.find("ul", {"id": "recipeIngredients"})
|
||||
if isinstance(ingredients, Tag):
|
||||
for ingredient in ingredients.children:
|
||||
if not isinstance(ingredient, Tag) or not ingredient.text.strip() or "recipeIngredient_spacer" in ingredient['class']:
|
||||
continue
|
||||
if any(x in ingredient['class'] for x in ["recipeIngredient_subheader", "recipeIngredient_note"]):
|
||||
step.ingredients.add(Ingredient.objects.create(is_header=True, note=ingredient.text.strip()[:256], original_text=ingredient.text.strip(), space=self.request.space, ))
|
||||
else:
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, note=note, original_text=ingredient.text.strip(), space=self.request.space, ))
|
||||
|
||||
for s in file.find_all("li", {"class": "recipeNote"}):
|
||||
if s.text == "":
|
||||
continue
|
||||
step.instruction += s.text.strip() + ' \n\n'
|
||||
instructions = file.find("ol", {"id": "recipeInstructions"})
|
||||
if isinstance(instructions, Tag):
|
||||
for instruction in instructions.children:
|
||||
if not isinstance(instruction, Tag) or instruction.text == "":
|
||||
continue
|
||||
if "instruction_subheader" in instruction['class']:
|
||||
if step.instruction:
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
step = Step.objects.create(instruction='', space=self.request.space, )
|
||||
|
||||
step.name = instruction.text.strip()[:128]
|
||||
else:
|
||||
step.instruction += instruction.text.strip() + ' \n\n'
|
||||
|
||||
notes = file.find_all("li", {"class": "recipeNote"})
|
||||
if notes:
|
||||
step.instruction += '*Notes:* \n\n'
|
||||
|
||||
for n in notes:
|
||||
if n.text == "":
|
||||
continue
|
||||
step.instruction += '*' + n.text.strip() + '* \n\n'
|
||||
|
||||
description = ''
|
||||
try:
|
||||
if file.find("a", {"id": "original_link"}).text != '':
|
||||
step.instruction += "\n\nImported from: " + file.find("a", {"id": "original_link"}).text
|
||||
step.save()
|
||||
description = file.find("div", {"id": "description"}).text.strip()
|
||||
except AttributeError:
|
||||
pass
|
||||
if len(description) <= 512:
|
||||
recipe.description = description
|
||||
else:
|
||||
recipe.description = description[:480] + ' ... (full description below)'
|
||||
step.instruction += '*Description:* \n\n*' + description + '* \n\n'
|
||||
|
||||
step.save()
|
||||
recipe.steps.add(step)
|
||||
|
||||
# import the Primary recipe image that is stored in the Zip
|
||||
|
||||
@@ -4,7 +4,7 @@ from io import BytesIO
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class Domestica(Integration):
|
||||
@@ -37,11 +37,11 @@ class Domestica(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ import traceback
|
||||
import uuid
|
||||
from io import BytesIO, StringIO
|
||||
from zipfile import BadZipFile, ZipFile
|
||||
|
||||
import lxml
|
||||
from django.core.cache import cache
|
||||
import datetime
|
||||
|
||||
@@ -16,6 +18,7 @@ from django.http import HttpResponse
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scope
|
||||
from lxml import etree
|
||||
|
||||
from cookbook.forms import ImportExportBase
|
||||
from cookbook.helper.image_processing import get_filetype, handle_image
|
||||
@@ -40,7 +43,7 @@ class Integration:
|
||||
self.export_type = export_type
|
||||
self.ignored_recipes = []
|
||||
|
||||
description = f'Imported by {request.user.get_user_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
description = f'Imported by {request.user.get_user_display_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
|
||||
icon = '📥'
|
||||
|
||||
try:
|
||||
@@ -144,7 +147,7 @@ class Integration:
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
import_zip.close()
|
||||
elif '.zip' in f['name'] or '.paprikarecipes' in f['name']:
|
||||
elif '.zip' in f['name'] or '.paprikarecipes' in f['name'] or '.mcb' in f['name']:
|
||||
import_zip = ZipFile(f['file'])
|
||||
file_list = []
|
||||
for z in import_zip.filelist:
|
||||
@@ -157,9 +160,16 @@ class Integration:
|
||||
file_list = self.split_recipe_file(BytesIO(import_zip.read('recipes.html')))
|
||||
il.total_recipes += len(file_list)
|
||||
|
||||
if isinstance(self, cookbook.integration.cookmate.Cookmate):
|
||||
new_file_list = []
|
||||
for file in file_list:
|
||||
new_file_list += etree.parse(BytesIO(import_zip.read(file.filename))).getroot().getchildren()
|
||||
il.total_recipes = len(new_file_list)
|
||||
file_list = new_file_list
|
||||
|
||||
for z in file_list:
|
||||
try:
|
||||
if isinstance(z, Tag):
|
||||
if not hasattr(z, 'filename') or type(z) == Tag:
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
@@ -172,7 +182,7 @@ class Integration:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name']:
|
||||
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
il.total_recipes += len(data_list)
|
||||
for d in data_list:
|
||||
@@ -243,7 +253,7 @@ class Integration:
|
||||
:param image_file: ByteIO stream containing the image
|
||||
:param filetype: type of file to write bytes to, default to .jpeg if unknown
|
||||
"""
|
||||
recipe.image = File(handle_image(self.request, File(image_file, name='image'), filetype=filetype)[0], name=f'{uuid.uuid4()}_{recipe.pk}{filetype}')
|
||||
recipe.image = File(handle_image(self.request, File(image_file, name='image'), filetype=filetype), name=f'{uuid.uuid4()}_{recipe.pk}{filetype}')
|
||||
recipe.save()
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
|
||||
@@ -6,13 +6,13 @@ from zipfile import ZipFile
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class Mealie(Integration):
|
||||
|
||||
def import_file_name_filter(self, zip_info_object):
|
||||
return re.match(r'^recipes/([A-Za-z\d-])+/([A-Za-z\d-])+.json$', zip_info_object.filename)
|
||||
return re.match(r'^recipes/([A-Za-z\d\s\-_()\[\]\u00C0-\u017F])+/([A-Za-z\d\s\-_()\[\]\u00C0-\u017F])+.json$', zip_info_object.filename)
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_json = json.loads(file.getvalue().decode("utf-8"))
|
||||
@@ -45,12 +45,14 @@ class Mealie(Integration):
|
||||
u = ingredient_parser.get_unit(ingredient['unit'])
|
||||
amount = ingredient['quantity']
|
||||
note = ingredient['note']
|
||||
original_text = None
|
||||
else:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient['note'])
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient['note'])
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
original_text = ingredient['note']
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=original_text, space=self.request.space,
|
||||
))
|
||||
except Exception:
|
||||
pass
|
||||
@@ -60,7 +62,8 @@ class Mealie(Integration):
|
||||
if '.zip' in f['name']:
|
||||
import_zip = ZipFile(f['file'])
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(f'recipes/{recipe_json["slug"]}/images/min-original.webp')), filetype=get_filetype(f'recipes/{recipe_json["slug"]}/images/original'))
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(f'recipes/{recipe_json["slug"]}/images/min-original.webp')),
|
||||
filetype=get_filetype(f'recipes/{recipe_json["slug"]}/images/original'))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import re
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class MealMaster(Integration):
|
||||
@@ -45,11 +45,11 @@ class MealMaster(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
|
||||
83
cookbook/integration/melarecipes.py
Normal file
83
cookbook/integration/melarecipes.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import base64
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
from gettext import gettext as _
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class MelaRecipes(Integration):
|
||||
|
||||
def split_recipe_file(self, file):
|
||||
return [json.loads(file.getvalue().decode("utf-8"))]
|
||||
|
||||
def get_files_from_recipes(self, recipes, el, cookie):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
recipe_json = file
|
||||
|
||||
recipe = Recipe.objects.create(
|
||||
name=recipe_json['title'].strip(),
|
||||
created_by=self.request.user, internal=True, space=self.request.space)
|
||||
|
||||
if 'yield' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['yield'])
|
||||
|
||||
if 'cookTime' in recipe_json:
|
||||
recipe.waiting_time = parse_time(recipe_json['cookTime'])
|
||||
|
||||
if 'prepTime' in recipe_json:
|
||||
recipe.working_time = parse_time(recipe_json['prepTime'])
|
||||
|
||||
if 'favorite' in recipe_json and recipe_json['favorite']:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=_('Favorite'))[0])
|
||||
|
||||
if 'categories' in recipe_json:
|
||||
try:
|
||||
for x in recipe_json['categories']:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
instruction = ''
|
||||
if 'text' in recipe_json:
|
||||
instruction += f'*{recipe_json["text"].strip()}* \n'
|
||||
|
||||
if 'instructions' in recipe_json:
|
||||
instruction += recipe_json["instructions"].strip() + ' \n'
|
||||
|
||||
if 'notes' in recipe_json:
|
||||
instruction += recipe_json["notes"].strip() + ' \n'
|
||||
|
||||
if 'link' in recipe_json:
|
||||
recipe.source_url = recipe_json['link']
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction=instruction, space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['ingredients'].split('\n'):
|
||||
if ingredient.strip() != '':
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
if recipe_json.get("images", None):
|
||||
try:
|
||||
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['images'][0])), filetype='.jpeg')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return recipe
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -7,7 +7,7 @@ from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class NextcloudCookbook(Integration):
|
||||
@@ -31,6 +31,9 @@ class NextcloudCookbook(Integration):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if 'url' in recipe_json:
|
||||
recipe.source_url = recipe_json['url'].strip()
|
||||
|
||||
if 'recipeCategory' in recipe_json:
|
||||
try:
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=recipe_json['recipeCategory'])[0])
|
||||
@@ -40,7 +43,8 @@ class NextcloudCookbook(Integration):
|
||||
if 'keywords' in recipe_json:
|
||||
try:
|
||||
for x in recipe_json['keywords'].split(','):
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
if x.strip() != '':
|
||||
recipe.keywords.add(Keyword.objects.get_or_create(space=self.request.space, name=x)[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -57,11 +61,11 @@ class NextcloudCookbook(Integration):
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in recipe_json['recipeIngredient']:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class OpenEats(Integration):
|
||||
|
||||
@@ -2,12 +2,13 @@ import base64
|
||||
import gzip
|
||||
import json
|
||||
import re
|
||||
from gettext import gettext as _
|
||||
from io import BytesIO
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from gettext import gettext as _
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Paprika(Integration):
|
||||
@@ -26,10 +27,9 @@ class Paprika(Integration):
|
||||
recipe.description = '' if len(recipe_json['description'].strip()) > 500 else recipe_json['description'].strip()
|
||||
|
||||
try:
|
||||
if re.match(r'([0-9])+\s(.)*', recipe_json['servings']):
|
||||
s = recipe_json['servings'].split(' ')
|
||||
recipe.servings = s[0]
|
||||
recipe.servings_text = s[1]
|
||||
if 'servings' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['servings'])
|
||||
recipe.servings_text = parse_servings_text(recipe_json['servings'])
|
||||
|
||||
if len(recipe_json['cook_time'].strip()) > 0:
|
||||
recipe.waiting_time = re.findall(r'\d+', recipe_json['cook_time'])[0]
|
||||
@@ -70,11 +70,11 @@ class Paprika(Integration):
|
||||
try:
|
||||
for ingredient in recipe_json['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class Pepperplate(Integration):
|
||||
@@ -41,11 +41,11 @@ class Pepperplate(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import requests
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class Plantoeat(Integration):
|
||||
@@ -49,6 +49,7 @@ class Plantoeat(Integration):
|
||||
)
|
||||
|
||||
if tags:
|
||||
tags = tags.replace('^',',')
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
@@ -56,11 +57,11 @@ class Plantoeat(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
@@ -78,7 +79,11 @@ class Plantoeat(Integration):
|
||||
current_recipe = ''
|
||||
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("windows-1250")
|
||||
try:
|
||||
line = fl.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
line = fl.decode("windows-1250")
|
||||
|
||||
if line.startswith('--------------'):
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import re
|
||||
import imghdr
|
||||
import json
|
||||
import requests
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
import imghdr
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class RecetteTek(Integration):
|
||||
@@ -48,7 +51,7 @@ class RecetteTek(Integration):
|
||||
# Append the original import url to the step (if it exists)
|
||||
try:
|
||||
if file['url'] != '':
|
||||
step.instruction += '\n\nImported from: ' + file['url']
|
||||
step.instruction += '\n\n' + _('Imported from') + ': ' + file['url']
|
||||
step.save()
|
||||
except Exception as e:
|
||||
print(recipe.name, ': failed to import source url ', str(e))
|
||||
@@ -58,11 +61,11 @@ class RecetteTek(Integration):
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in file['ingredients'].split('\n'):
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(food)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
except Exception as e:
|
||||
print(recipe.name, ': failed to parse recipe ingredients ', str(e))
|
||||
@@ -121,11 +124,13 @@ class RecetteTek(Integration):
|
||||
self.import_recipe_image(recipe, BytesIO(import_zip.read(image_file_name)), filetype=get_filetype(image_file_name))
|
||||
else:
|
||||
if file['originalPicture'] != '':
|
||||
response = requests.get(file['originalPicture'])
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
url = file['originalPicture']
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
except Exception as e:
|
||||
print(recipe.name, ': failed to import image ', str(e))
|
||||
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, iso_duration_to_minutes
|
||||
from cookbook.helper.recipe_url_import import iso_duration_to_minutes, parse_servings
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class RecipeKeeper(Integration):
|
||||
@@ -45,11 +47,11 @@ class RecipeKeeper(Integration):
|
||||
for ingredient in file.find("div", {"itemprop": "recipeIngredients"}).findChildren("p"):
|
||||
if ingredient.text == "":
|
||||
continue
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
|
||||
for s in file.find("div", {"itemprop": "recipeDirections"}).find_all("p"):
|
||||
@@ -58,7 +60,7 @@ class RecipeKeeper(Integration):
|
||||
step.instruction += s.text + ' \n'
|
||||
|
||||
if file.find("span", {"itemprop": "recipeSource"}).text != '':
|
||||
step.instruction += "\n\nImported from: " + file.find("span", {"itemprop": "recipeSource"}).text
|
||||
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("span", {"itemprop": "recipeSource"}).text
|
||||
step.save()
|
||||
|
||||
recipe.steps.add(step)
|
||||
|
||||
@@ -2,10 +2,11 @@ import json
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
import validators
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class RecipeSage(Integration):
|
||||
@@ -31,7 +32,7 @@ class RecipeSage(Integration):
|
||||
except Exception as e:
|
||||
print('failed to parse yield or time ', str(e))
|
||||
|
||||
ingredient_parser = IngredientParser(self.request,True)
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
ingredients_added = False
|
||||
for s in file['recipeInstructions']:
|
||||
step = Step.objects.create(
|
||||
@@ -41,18 +42,20 @@ class RecipeSage(Integration):
|
||||
ingredients_added = True
|
||||
|
||||
for ingredient in file['recipeIngredient']:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
if len(file['image']) > 0:
|
||||
try:
|
||||
response = requests.get(file['image'][0])
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
url = file['image'][0]
|
||||
if validators.url(url, public=True):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
@@ -77,14 +80,13 @@ class RecipeSage(Integration):
|
||||
}
|
||||
|
||||
for s in recipe.steps.all():
|
||||
if s.type != Step.TIME:
|
||||
data['recipeInstructions'].append({
|
||||
'@type': 'HowToStep',
|
||||
'text': s.instruction
|
||||
})
|
||||
data['recipeInstructions'].append({
|
||||
'@type': 'HowToStep',
|
||||
'text': s.instruction
|
||||
})
|
||||
|
||||
for i in s.ingredients.all():
|
||||
data['recipeIngredient'].append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
for i in s.ingredients.all():
|
||||
data['recipeIngredient'].append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient, Keyword
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step
|
||||
|
||||
|
||||
class RezKonv(Integration):
|
||||
@@ -12,43 +12,43 @@ class RezKonv(Integration):
|
||||
|
||||
ingredients = []
|
||||
directions = []
|
||||
for line in file.replace('\r', '').split('\n'):
|
||||
for line in file.replace('\r', '').replace('\n\n', '\n').split('\n'):
|
||||
if 'Titel:' in line:
|
||||
title = line.replace('Titel:', '').strip()
|
||||
if 'Kategorien:' in line:
|
||||
tags = line.replace('Kategorien:', '').strip()
|
||||
if ingredient_mode and ('quelle' in line.lower() or 'source' in line.lower()):
|
||||
if ingredient_mode and (
|
||||
'quelle' in line.lower() or 'source' in line.lower() or (line == '' and len(ingredients) > 0)):
|
||||
ingredient_mode = False
|
||||
direction_mode = True
|
||||
if ingredient_mode:
|
||||
if line != '' and '===' not in line and 'Zubereitung' not in line:
|
||||
ingredients.append(line.strip())
|
||||
if direction_mode:
|
||||
if line.strip() != '' and line.strip() != '=====':
|
||||
directions.append(line.strip())
|
||||
if 'Zutaten:' in line:
|
||||
if 'Zutaten:' in line or 'Ingredients' in line or 'Menge:' in line:
|
||||
ingredient_mode = True
|
||||
if 'Zubereitung:' in line:
|
||||
ingredient_mode = False
|
||||
direction_mode = True
|
||||
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space)
|
||||
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True,
|
||||
space=self.request.space)
|
||||
|
||||
for k in tags.split(','):
|
||||
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
|
||||
recipe.keywords.add(keyword)
|
||||
|
||||
step = Step.objects.create(
|
||||
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
|
||||
instruction=' \n'.join(directions) + '\n\n', space=self.request.space,
|
||||
)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
if len(ingredient.strip()) > 0:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
@@ -60,9 +60,15 @@ class RezKonv(Integration):
|
||||
def split_recipe_file(self, file):
|
||||
recipe_list = []
|
||||
current_recipe = ''
|
||||
|
||||
encoding_list = ['windows-1250',
|
||||
'latin-1'] # TODO build algorithm to try trough encodings and fail if none work, use for all importers
|
||||
encoding = 'windows-1250'
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("windows-1250")
|
||||
try:
|
||||
line = fl.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
encoding = 'latin-1'
|
||||
line = fl.decode(encoding)
|
||||
if line.startswith('=====') and 'rezkonv' in line.lower():
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
@@ -2,7 +2,7 @@ from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Recipe, Step, Ingredient
|
||||
from cookbook.models import Ingredient, Recipe, Step
|
||||
|
||||
|
||||
class Saffron(Integration):
|
||||
@@ -47,11 +47,11 @@ class Saffron(Integration):
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
for ingredient in ingredients:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(ingredient)
|
||||
amount, unit, food, note = ingredient_parser.parse(ingredient)
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
step.ingredients.add(Ingredient.objects.create(
|
||||
food=f, unit=u, amount=amount, note=note, space=self.request.space,
|
||||
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
|
||||
))
|
||||
recipe.steps.add(step)
|
||||
|
||||
@@ -71,12 +71,11 @@ class Saffron(Integration):
|
||||
recipeInstructions = []
|
||||
recipeIngredient = []
|
||||
for s in recipe.steps.all():
|
||||
if s.type != Step.TIME:
|
||||
recipeInstructions.append(s.instruction)
|
||||
recipeInstructions.append(s.instruction)
|
||||
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
for i in s.ingredients.all():
|
||||
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
|
||||
|
||||
data += "Ingredients: \n"
|
||||
for ingredient in recipeIngredient:
|
||||
data += ingredient+"\n"
|
||||
@@ -91,10 +90,10 @@ class Saffron(Integration):
|
||||
files = []
|
||||
for r in recipes:
|
||||
filename, data = self.get_file_from_recipe(r)
|
||||
files.append([ filename, data ])
|
||||
files.append([filename, data])
|
||||
|
||||
el.exported_recipes += 1
|
||||
el.msg += self.get_recipe_processed_msg(r)
|
||||
el.save()
|
||||
|
||||
return files
|
||||
|
||||
return files
|
||||
|
||||
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/ar/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
2638
cookbook/locale/ar/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
3018
cookbook/locale/bg/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2983
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
2983
cookbook/locale/da/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-11-04 12:31+0100\n"
|
||||
"PO-Revision-Date: 2021-11-06 14:06+0000\n"
|
||||
"Last-Translator: Nicklas Yli-Länttä <admin@timanttikuutio.eu>\n"
|
||||
"PO-Revision-Date: 2022-03-18 16:31+0000\n"
|
||||
"Last-Translator: Stefan Werner <werner@iki.fi>\n"
|
||||
"Language-Team: Finnish <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/fi/>\n"
|
||||
"Language: fi\n"
|
||||
@@ -17,7 +17,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=n != 1;\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
"X-Generator: Weblate 4.10.1\n"
|
||||
|
||||
#: .\cookbook\filters.py:23 .\cookbook\templates\base.html:125
|
||||
#: .\cookbook\templates\forms\ingredients.html:34
|
||||
@@ -31,10 +31,12 @@ msgid ""
|
||||
"Color of the top navigation bar. Not all colors work with all themes, just "
|
||||
"try them out!"
|
||||
msgstr ""
|
||||
"Ylänavigointipalkin väri. Ei kaikki värit toimi kaikkien teemojen kanssa; "
|
||||
"kokeile!"
|
||||
|
||||
#: .\cookbook\forms.py:55
|
||||
msgid "Default Unit to be used when inserting a new ingredient into a recipe."
|
||||
msgstr ""
|
||||
msgstr "Oletusmittayksikkö uuden aineksen lisäämisessä."
|
||||
|
||||
#: .\cookbook\forms.py:57
|
||||
msgid ""
|
||||
@@ -2435,7 +2437,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\new.py:225
|
||||
msgid "Email to user could not be send, please share link manually."
|
||||
msgid "Email could not be sent to user. Please share the link manually."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:127
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/pt_BR/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/pt_BR/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2808
cookbook/locale/pt_BR/LC_MESSAGES/django.po
Normal file
2808
cookbook/locale/pt_BR/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -2493,7 +2493,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\new.py:229
|
||||
msgid "Email to user could not be send, please share link manually."
|
||||
msgid "Email could not be sent to user. Please share the link manually."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:127
|
||||
|
||||
Binary file not shown.
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-09-13 22:40+0200\n"
|
||||
"PO-Revision-Date: 2021-10-23 09:06+0000\n"
|
||||
"Last-Translator: rustam <uzbekr@gmail.com>\n"
|
||||
"PO-Revision-Date: 2022-04-07 19:32+0000\n"
|
||||
"Last-Translator: Artem Aksenov <artemmillerr@gmail.com>\n"
|
||||
"Language-Team: Russian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/ru/>\n"
|
||||
"Language: ru\n"
|
||||
@@ -18,14 +18,14 @@ msgstr ""
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
|
||||
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
|
||||
"X-Generator: Weblate 4.8\n"
|
||||
"X-Generator: Weblate 4.10.1\n"
|
||||
|
||||
#: .\cookbook\filters.py:23 .\cookbook\templates\base.html:125
|
||||
#: .\cookbook\templates\forms\ingredients.html:34
|
||||
#: .\cookbook\templates\space.html:43 .\cookbook\templates\stats.html:28
|
||||
#: .\cookbook\templates\url_import.html:270
|
||||
msgid "Ingredients"
|
||||
msgstr "ингредиенты"
|
||||
msgstr "Ингредиенты"
|
||||
|
||||
#: .\cookbook\forms.py:50
|
||||
msgid ""
|
||||
@@ -95,14 +95,14 @@ msgstr ""
|
||||
#: .\cookbook\forms.py:103 .\cookbook\forms.py:334
|
||||
#: .\cookbook\templates\url_import.html:154
|
||||
msgid "Name"
|
||||
msgstr "Имя"
|
||||
msgstr "Название"
|
||||
|
||||
#: .\cookbook\forms.py:104 .\cookbook\forms.py:335
|
||||
#: .\cookbook\templates\space.html:39 .\cookbook\templates\stats.html:24
|
||||
#: .\cookbook\templates\url_import.html:188
|
||||
#: .\cookbook\templates\url_import.html:573 .\cookbook\views\lists.py:112
|
||||
msgid "Keywords"
|
||||
msgstr "Ключевые поля"
|
||||
msgstr "Ключевые слова"
|
||||
|
||||
#: .\cookbook\forms.py:105
|
||||
msgid "Preparation time in minutes"
|
||||
@@ -2501,7 +2501,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\new.py:245
|
||||
msgid "Email to user could not be send, please share link manually."
|
||||
msgid "Email could not be sent to user. Please share the link manually."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:128
|
||||
|
||||
Binary file not shown.
@@ -2530,7 +2530,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\new.py:229
|
||||
msgid "Email to user could not be send, please share link manually."
|
||||
msgid "Email could not be sent to user. Please share the link manually."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:127
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/uk/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/uk/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
2624
cookbook/locale/uk/LC_MESSAGES/django.po
Normal file
2624
cookbook/locale/uk/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user