Compare commits

...

169 Commits
2.1.2 ... 2.2.7

Author SHA1 Message Date
vabene1111
8572f338ad fixed ingredient insert focus error 2025-09-25 21:03:35 +02:00
vabene1111
920ec8e74b fixed missing pg extensions 2025-09-25 20:56:53 +02:00
vabene1111
2328bf2342 fixed mealie edgecases 2025-09-25 20:48:20 +02:00
vabene1111
85620a1431 Merge branch 'master' into develop 2025-09-25 12:33:43 +02:00
vabene1111
0037858885 fixed step editor layout 2025-09-25 12:33:39 +02:00
vabene1111
9df3ff0028 Merge branch 'develop' 2025-09-25 07:39:38 +02:00
vabene1111
0a43272126 fixed property editor page updateing with 0 values 2025-09-25 07:39:30 +02:00
vabene1111
ff96eb194f auto grow comment textarea 2025-09-25 07:37:40 +02:00
vabene1111
6b69c4184b fixed ingredients wihtout unit in steps overview 2025-09-25 07:31:45 +02:00
vabene1111
e90e21181c fixed sizing of ingredient input in recipe editor 2025-09-25 07:28:01 +02:00
vabene1111
5237228673 added patreon link 2025-09-24 19:54:36 +02:00
vabene1111
ecb3172085 increased storage token length 2025-09-24 19:24:33 +02:00
vabene1111
b4f4e9fd2b Merge branch 'develop' 2025-09-24 19:11:01 +02:00
vabene1111
6d0f3b99c8 deactivate for now 2025-09-24 18:56:45 +02:00
vabene1111
cdb94ae628 more nginx testing 2025-09-24 18:49:29 +02:00
vabene1111
0d589444fd fixed order 2025-09-24 18:34:53 +02:00
vabene1111
95fa420c3a updated nginx confi 2025-09-24 18:31:02 +02:00
vabene1111
dd4dc1083f add duplicate to recipe 2025-09-24 18:24:58 +02:00
vabene1111
04f889b742 fixed search updated at filter 2025-09-24 17:36:37 +02:00
vabene1111
67d374c071 always clear staticfiles directory on collect 2025-09-24 17:33:49 +02:00
vabene1111
8d749e351d django vite manifest path 2025-09-24 15:22:01 +02:00
vabene1111
417ffcab5d improved recipe editor layout 2025-09-24 07:46:15 +02:00
vabene1111
0c0012aab8 WIP recipe editor improvements 2025-09-23 07:58:38 +02:00
vabene1111
e562883da3 return steps in importer 2025-09-23 07:41:11 +02:00
vabene1111
a81bc335cc added AI properties import 2025-09-22 21:59:34 +02:00
vabene1111
ebee1ccd4b Merge branch 'develop' of https://github.com/TandoorRecipes/recipes into develop 2025-09-22 21:08:59 +02:00
vabene1111
b1104b4581 import button in shared recipes 2025-09-22 21:08:55 +02:00
vabene1111
f5e952d88c Merge pull request #4060 from Nailik/patch-1
Synology documentation update Tandoor 2
2025-09-22 20:29:54 +02:00
vabene1111
968fcc3936 fixed docs 2025-09-22 20:28:36 +02:00
vabene1111
73d3d87217 fixed open data import 2025-09-22 20:21:59 +02:00
vabene1111
9050f648f9 added special symbols and updated translations 2025-09-22 20:17:55 +02:00
vabene1111
a4a9e104b5 invite link button on UserSpace model Liste page 2025-09-22 20:02:56 +02:00
Nailik
3ed85ea0c4 Update synology.md 2025-09-22 14:16:53 +02:00
Nailik
da8ceb7abe Update synology.md 2025-09-22 14:06:25 +02:00
Nailik
5ff3a6bb2e Update synology.md 2025-09-22 14:02:42 +02:00
Vincenzo Reale
3ed750b330 Translated using Weblate (Italian)
Currently translated at 100.0% (488 of 488 strings)

Translation: Tandoor/Recipes Backend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-backend/it/
2025-09-22 10:09:35 +00:00
TC Kuo
0315911802 Translated using Weblate (Chinese (Traditional Han script))
Currently translated at 92.0% (795 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/zh_Hant/
2025-09-22 10:02:22 +00:00
TC Kuo
1fe96f2b3d Translated using Weblate (Chinese (Simplified Han script))
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/zh_Hans/
2025-09-22 10:02:22 +00:00
TC Kuo
11761c0b15 Translated using Weblate (Ukrainian)
Currently translated at 35.9% (311 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/uk/
2025-09-22 10:02:22 +00:00
TC Kuo
a7b0a1ab30 Translated using Weblate (Turkish)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/tr/
2025-09-22 10:02:22 +00:00
TC Kuo
e4fcae3b00 Translated using Weblate (Swedish)
Currently translated at 70.0% (605 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/sv/
2025-09-22 10:02:22 +00:00
TC Kuo
b0401639f1 Translated using Weblate (Slovenian)
Currently translated at 92.0% (795 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/sl/
2025-09-22 10:02:22 +00:00
TC Kuo
5f8770f502 Translated using Weblate (Russian)
Currently translated at 91.6% (792 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/ru/
2025-09-22 10:02:22 +00:00
TC Kuo
aaa627d3b6 Translated using Weblate (Romanian)
Currently translated at 55.3% (478 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/ro/
2025-09-22 10:02:22 +00:00
TC Kuo
e77734f696 Translated using Weblate (Portuguese (Brazil))
Currently translated at 75.9% (656 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/pt_BR/
2025-09-22 10:02:22 +00:00
TC Kuo
73df6bb961 Translated using Weblate (Polish)
Currently translated at 68.7% (594 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/pl/
2025-09-22 10:02:22 +00:00
Justin Straver
7d187b638e Translated using Weblate (Dutch)
Currently translated at 92.2% (797 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/nl/
2025-09-22 10:02:22 +00:00
TC Kuo
68bb750f8c Translated using Weblate (Dutch)
Currently translated at 92.2% (797 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/nl/
2025-09-22 10:02:22 +00:00
TC Kuo
1e35035540 Translated using Weblate (Norwegian Bokmål)
Currently translated at 46.4% (401 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/nb_NO/
2025-09-22 10:02:22 +00:00
Vincenzo Reale
561ba2f1da Translated using Weblate (Italian)
Currently translated at 100.0% (864 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/it/
2025-09-22 10:02:22 +00:00
TC Kuo
bd600301f9 Translated using Weblate (Italian)
Currently translated at 100.0% (864 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/it/
2025-09-22 10:02:22 +00:00
TC Kuo
cf5483a4d9 Translated using Weblate (Indonesian)
Currently translated at 16.5% (143 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/id/
2025-09-22 10:02:22 +00:00
TC Kuo
ba417c49dd Translated using Weblate (Hungarian)
Currently translated at 53.7% (464 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/hu/
2025-09-22 10:02:22 +00:00
TC Kuo
0259b1dc08 Translated using Weblate (Croatian)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/hr/
2025-09-22 10:02:22 +00:00
TC Kuo
34553dadd7 Translated using Weblate (Hebrew)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/he/
2025-09-22 10:02:22 +00:00
TC Kuo
535b88c8db Translated using Weblate (French)
Currently translated at 89.2% (771 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/fr/
2025-09-22 10:02:22 +00:00
TC Kuo
71eb8818b5 Translated using Weblate (Finnish)
Currently translated at 59.6% (515 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/fi/
2025-09-22 10:02:21 +00:00
TC Kuo
0dc94a817f Translated using Weblate (Spanish)
Currently translated at 88.0% (761 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/es/
2025-09-22 10:02:21 +00:00
TC Kuo
4df862c7f3 Translated using Weblate (Greek)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/el/
2025-09-22 10:02:21 +00:00
TC Kuo
69f013c980 Translated using Weblate (Danish)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/da/
2025-09-22 10:02:21 +00:00
TC Kuo
23c420dda8 Translated using Weblate (Czech)
Currently translated at 64.6% (559 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/cs/
2025-09-22 10:02:21 +00:00
TC Kuo
63daf1e958 Translated using Weblate (Catalan)
Currently translated at 65.7% (568 of 864 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/ca/
2025-09-22 10:02:21 +00:00
Vincenzo Reale
52b44eacdd Translated using Weblate (Italian)
Currently translated at 100.0% (488 of 488 strings)

Translation: Tandoor/Recipes Backend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-backend/it/
2025-09-22 10:02:21 +00:00
vabene1111
fa7cc12b99 added AI step sorter 2025-09-22 08:21:26 +02:00
vabene1111
64d2108ef6 locales 2025-09-21 13:16:16 +02:00
vabene1111
dccfdcc11c Merge branch 'develop' of http://translate.tandoor.dev/git/tandoor/recipes-backend into develop
# Conflicts:
#	vue3/src/locales/pt.json
2025-09-21 12:47:20 +02:00
vabene1111
974e72631d improved modeldeletepage, hide when not needed 2025-09-21 10:23:04 +02:00
vabene1111
70f31b8553 fxied api food test: foods can be deleted even when part of a recipe 2025-09-21 10:22:54 +02:00
vabene1111
3cb980c0e7 model delete page done 2025-09-21 09:01:07 +02:00
vabene1111
b8a403b7c1 playing with delete confirm view 2025-09-20 19:04:51 +02:00
vabene1111
b037d90220 proper function for delete relation views 2025-09-20 12:24:25 +02:00
vabene1111
ad32e457fa basics of delete collector logic 2025-09-20 12:05:29 +02:00
vabene1111
8e2726caeb improved step sorting 2025-09-20 10:53:44 +02:00
vabene1111
e693737c57 fixed AI ordering and VNumberInput decimal seperator 2025-09-20 10:42:39 +02:00
vabene1111
9f239c06d3 fixed unglobal of AI provider 2025-09-20 10:12:31 +02:00
vabene1111
0f551c5f88 improved german translation for keyword 2025-09-20 10:03:29 +02:00
vabene1111
eb224a769d Merge branch 'develop' 2025-09-19 17:02:10 +02:00
vabene1111
4515eba9d7 fixed ai provider admin and prevent accidental update 2025-09-19 17:02:01 +02:00
vabene1111
30b37bf0b6 fixed ai credit system 2025-09-19 16:49:30 +02:00
vabene1111
f17207e56e protect endpoint WIP 2025-09-19 16:22:20 +02:00
vabene1111
2cba0e18af Merge branch 'develop' 2025-09-19 16:20:56 +02:00
vabene1111
ec6e81316a fixed unwanted redirect to start page 2025-09-19 16:20:48 +02:00
vabene1111
b72897b222 Merge branch 'develop' 2025-09-18 18:17:58 +02:00
vabene1111
bca1ebbf99 various fixes 2025-09-18 18:10:58 +02:00
vabene1111
f0342d4568 fixed some tests 2025-09-17 18:04:02 +02:00
vabene1111
81f62de500 Merge pull request #4050 from TandoorRecipes/revert-4040-ipv6
Revert "feat: make nginx respect ipv6 disable fixes #3996"
2025-09-17 16:20:13 +02:00
vabene1111
f783949a61 Revert "feat: make nginx respect ipv6 disable fixes #3996" 2025-09-17 16:20:03 +02:00
vabene1111
820fad1b5c Merge pull request #4040 from wilmardo/ipv6
feat: make nginx respect ipv6 disable fixes #3996
2025-09-17 16:19:44 +02:00
vabene1111
1169abd942 mealie docs update 2025-09-17 07:55:29 +02:00
vabene1111
48e175f58f mealie importer working with settings 2025-09-17 07:50:56 +02:00
wilmardo
5450e18342 feat: make nginx respect ipv6 disable fixes #3996
Signed-off-by: wilmardo <info@wilmardenouden.nl>
2025-09-16 15:01:54 +02:00
vabene1111
ea590f8e49 mealie importer options 2025-09-16 08:00:22 +02:00
vabene1111
13626ca11b mealie importer improvements 2025-09-16 07:48:58 +02:00
vabene1111
f53fe1e3c4 import comments 2025-09-15 22:12:52 +02:00
vabene1111
d177316b47 mealie 1.0 importer WIP 2025-09-15 22:05:15 +02:00
vabene1111
338db1fac2 fixed default properties view 2025-09-15 21:26:51 +02:00
vabene1111
377619473c small fixes 2025-09-15 07:49:17 +02:00
vabene1111
000962c5bb moved create space to its own file 2025-09-14 12:05:09 +02:00
vabene1111
9228c1d59f fixed ai import layout 2025-09-14 11:55:58 +02:00
vabene1111
27007de7a0 improved start page with little recipes 2025-09-14 11:12:46 +02:00
vabene1111
29c99b66a1 fixed thermomix special symbol parser 2025-09-14 11:11:22 +02:00
vabene1111
bc179f430d Merge branch 'develop' of https://github.com/TandoorRecipes/recipes into develop 2025-09-14 11:03:53 +02:00
vabene1111
58c412ad95 space and user space api updates 2025-09-14 09:57:57 +02:00
vabene1111
4f248afe76 overhauld space management and settings system 2025-09-14 08:48:49 +02:00
vabene1111
f722d24eaa wip space editor 2025-09-14 07:37:31 +02:00
vabene1111
723b74509f moved space stuff to database and reworked invite link backend logic 2025-09-11 21:44:40 +02:00
vabene1111
ad4b1393dd various improvements 2025-09-11 18:58:44 +02:00
vabene1111
04bab7072c WIP stepper and language select component 2025-09-11 07:55:06 +02:00
vabene1111
6391cee9eb Merge pull request #4025 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vite-7.1.5
Bump vite from 7.1.4 to 7.1.5 in /vue3
2025-09-11 07:07:50 +02:00
vabene1111
14884fc0d4 Merge pull request #3985 from TandoorRecipes/dependabot/github_actions/awalsh128/cache-apt-pkgs-action-1.5.3
Bump awalsh128/cache-apt-pkgs-action from 1.5.1 to 1.5.3
2025-09-11 07:07:44 +02:00
vabene1111
f2191f79dd auto space creation and redirect to welcome page 2025-09-10 22:18:09 +02:00
vabene1111
c2533d9ea2 add migration shortcut 2025-09-10 21:28:10 +02:00
dependabot[bot]
db72fdb1bb Bump vite from 7.1.4 to 7.1.5 in /vue3
Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 7.1.4 to 7.1.5.
- [Release notes](https://github.com/vitejs/vite/releases)
- [Changelog](https://github.com/vitejs/vite/blob/main/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite/commits/v7.1.5/packages/vite)

---
updated-dependencies:
- dependency-name: vite
  dependency-version: 7.1.5
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-10 19:03:01 +00:00
dependabot[bot]
78252662cb Bump awalsh128/cache-apt-pkgs-action from 1.5.1 to 1.5.3
Bumps [awalsh128/cache-apt-pkgs-action](https://github.com/awalsh128/cache-apt-pkgs-action) from 1.5.1 to 1.5.3.
- [Release notes](https://github.com/awalsh128/cache-apt-pkgs-action/releases)
- [Commits](https://github.com/awalsh128/cache-apt-pkgs-action/compare/v1.5.1...v1.5.3)

---
updated-dependencies:
- dependency-name: awalsh128/cache-apt-pkgs-action
  dependency-version: 1.5.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-10 18:59:37 +00:00
vabene1111
4e078bf477 updated to django 5 2025-09-10 20:56:47 +02:00
vabene1111
2e9e226fe0 Merge pull request #3994 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vite-plugin-pwa-1.0.3
Bump vite-plugin-pwa from 1.0.2 to 1.0.3 in /vue3
2025-09-10 20:44:54 +02:00
dependabot[bot]
18cfbd80ab Bump vite-plugin-pwa from 1.0.2 to 1.0.3 in /vue3
Bumps [vite-plugin-pwa](https://github.com/vite-pwa/vite-plugin-pwa) from 1.0.2 to 1.0.3.
- [Release notes](https://github.com/vite-pwa/vite-plugin-pwa/releases)
- [Commits](https://github.com/vite-pwa/vite-plugin-pwa/compare/v1.0.2...v1.0.3)

---
updated-dependencies:
- dependency-name: vite-plugin-pwa
  dependency-version: 1.0.3
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-10 18:42:09 +00:00
vabene1111
4d284b4fff Merge pull request #3984 from TandoorRecipes/dependabot/github_actions/actions/checkout-5
Bump actions/checkout from 4 to 5
2025-09-10 20:42:06 +02:00
vabene1111
b1128dd134 Merge pull request #3986 from TandoorRecipes/dependabot/pip/django-storages-1.14.6
Bump django-storages from 1.14.2 to 1.14.6
2025-09-10 20:41:46 +02:00
vabene1111
3aebf58406 Merge pull request #3987 from TandoorRecipes/dependabot/pip/djangorestframework-3.16.1
Bump djangorestframework from 3.15.2 to 3.16.1
2025-09-10 20:41:39 +02:00
vabene1111
f3816a77df Merge pull request #3988 from TandoorRecipes/dependabot/pip/django-prometheus-2.4.1
Bump django-prometheus from 2.3.1 to 2.4.1
2025-09-10 20:41:31 +02:00
vabene1111
e4183d79ab Merge pull request #3989 from TandoorRecipes/dependabot/pip/drf-spectacular-sidecar-2025.8.1
Bump drf-spectacular-sidecar from 2025.7.1 to 2025.8.1
2025-09-10 20:41:25 +02:00
vabene1111
f4aa1a083f Merge pull request #3990 from TandoorRecipes/dependabot/pip/python-dotenv-1.1.1
Bump python-dotenv from 1.0.0 to 1.1.1
2025-09-10 20:41:18 +02:00
vabene1111
ed5508b576 Merge pull request #3991 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vue-i18n-11.1.11
Bump vue-i18n from 11.1.10 to 11.1.11 in /vue3
2025-09-10 20:40:57 +02:00
vabene1111
040e247487 Merge pull request #3992 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vue-tsc-3.0.6
Bump vue-tsc from 2.2.10 to 3.0.6 in /vue3
2025-09-10 20:40:50 +02:00
vabene1111
5d28c7b17d Merge pull request #3995 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vuetify-3.9.6
Bump vuetify from 3.9.3 to 3.9.6 in /vue3
2025-09-10 20:40:39 +02:00
vabene1111
15b2df07f2 Merge pull request #3993 from TandoorRecipes/dependabot/npm_and_yarn/vue3/vite-7.1.3
Bump vite from 6.3.5 to 7.1.3 in /vue3
2025-09-10 20:40:26 +02:00
vabene1111
ed8f97e9e0 Merge branch 'develop'
# Conflicts:
#	vue3/src/locales/de.json
2025-09-10 20:36:33 +02:00
vabene1111
034f68fc28 Merge branch 'develop' of https://github.com/TandoorRecipes/recipes into develop 2025-09-10 20:34:55 +02:00
vabene1111
0158087a0b fixed ai test 2025-09-10 20:34:51 +02:00
vabene1111
cb6bfd741d Merge pull request #4023 from TandoorRecipes/dependabot/pip/django-4.2.24
Bump django from 4.2.22 to 4.2.24
2025-09-10 20:34:21 +02:00
vabene1111
afeee5f7cb fixed link for subpath setups 2025-09-10 20:16:22 +02:00
vabene1111
b43d6e08d4 food batch editor implementation 2025-09-10 16:52:35 +02:00
vabene1111
1188624376 food batch update dialog and first api functions 2025-09-10 07:54:42 +02:00
dependabot[bot]
9ac837c969 Bump django from 4.2.22 to 4.2.24
Bumps [django](https://github.com/django/django) from 4.2.22 to 4.2.24.
- [Commits](https://github.com/django/django/compare/4.2.22...4.2.24)

---
updated-dependencies:
- dependency-name: django
  dependency-version: 4.2.24
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-10 02:19:20 +00:00
vabene1111
fc4b017d30 food batch update endpoint 2025-09-09 16:54:29 +02:00
vabene1111
4636ac28f9 ai system improvements 2025-09-09 16:30:54 +02:00
vabene1111
397912e87f WIP AI system 2025-09-09 14:58:32 +02:00
vabene1111
d0b860e623 shorter create message for model select 2025-09-09 13:36:05 +02:00
vabene1111
8a90ed1274 fixed ingredient parser error 2025-09-09 13:33:18 +02:00
vabene1111
286d707347 lots of AI provider stuff 2025-09-09 07:54:59 +02:00
vabene1111
98d308aee9 fixed space overview 2025-09-09 07:54:45 +02:00
vabene1111
a7c5240227 ai provider system 2025-09-08 22:15:57 +02:00
vabene1111
75fcff8e70 font show sponsor link on hosted edition 2025-09-08 21:15:20 +02:00
vabene1111
2f27cf4deb fixed meal plan loading 2025-09-08 21:13:33 +02:00
vabene1111
686b595f45 fixed demo auto login 2025-09-08 20:55:20 +02:00
vabene1111
0f9f9e8f7c Merge pull request #4013 from tomtjes/fix-logo-font
Make logo consistent across end user systems
2025-09-08 20:15:19 +02:00
dependabot[bot]
7be7c5b954 Bump vite from 6.3.5 to 7.1.3 in /vue3
Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.3.5 to 7.1.3.
- [Release notes](https://github.com/vitejs/vite/releases)
- [Changelog](https://github.com/vitejs/vite/blob/main/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite/commits/v7.1.3/packages/vite)

---
updated-dependencies:
- dependency-name: vite
  dependency-version: 7.1.3
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-08 06:53:43 +00:00
dependabot[bot]
0853a9ec64 Bump vuetify from 3.9.3 to 3.9.6 in /vue3
Bumps [vuetify](https://github.com/vuetifyjs/vuetify/tree/HEAD/packages/vuetify) from 3.9.3 to 3.9.6.
- [Release notes](https://github.com/vuetifyjs/vuetify/releases)
- [Commits](https://github.com/vuetifyjs/vuetify/commits/v3.9.6/packages/vuetify)

---
updated-dependencies:
- dependency-name: vuetify
  dependency-version: 3.9.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-08 06:51:27 +00:00
dependabot[bot]
fa3daee965 Bump vue-tsc from 2.2.10 to 3.0.6 in /vue3
Bumps [vue-tsc](https://github.com/vuejs/language-tools/tree/HEAD/packages/tsc) from 2.2.10 to 3.0.6.
- [Release notes](https://github.com/vuejs/language-tools/releases)
- [Changelog](https://github.com/vuejs/language-tools/blob/master/CHANGELOG.md)
- [Commits](https://github.com/vuejs/language-tools/commits/v3.0.6/packages/tsc)

---
updated-dependencies:
- dependency-name: vue-tsc
  dependency-version: 3.0.6
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-08 06:42:43 +00:00
vabene1111
e6abdf8cd4 fixed vite config 2025-09-08 08:38:46 +02:00
vabene1111
741e9eb370 plugin and hosted fixes 2025-09-08 08:31:01 +02:00
tomtjes
7db523d8c4 convert text to path 2025-09-07 19:34:11 +00:00
vabene1111
41f0060c43 basics of AI provider system 2025-09-05 21:36:43 +02:00
vabene1111
5572833f64 dont display 0 amount in ingredient table 2025-09-05 18:02:09 +02:00
vabene1111
780e441a3b fixed migration tree 2025-09-05 17:50:02 +02:00
vabene1111
c4fd2d0b4e fixed timer localization 2025-09-05 17:46:18 +02:00
vabene1111
1c6618f452 Merge pull request #3999 from icedieler/patch-1
Update nginx configuration for manual setup
2025-09-05 17:30:00 +02:00
vabene1111
8c96a75a1e basics of ai database 2025-09-05 14:50:10 +02:00
Matthias Lange
f099e2e5d3 Update nginx configuration for manual setup
Since v2 tandoor added user session tracking which requires the reverse proxy to add an extra header.

This change adds the `X-Forwarded-For` header to the example nginx configuration. This header fixes the issue described in #3943.
2025-09-03 10:22:49 +02:00
dependabot[bot]
774c05e76f Bump vue-i18n from 11.1.10 to 11.1.11 in /vue3
Bumps [vue-i18n](https://github.com/intlify/vue-i18n/tree/HEAD/packages/vue-i18n) from 11.1.10 to 11.1.11.
- [Release notes](https://github.com/intlify/vue-i18n/releases)
- [Changelog](https://github.com/intlify/vue-i18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/intlify/vue-i18n/commits/v11.1.11/packages/vue-i18n)

---
updated-dependencies:
- dependency-name: vue-i18n
  dependency-version: 11.1.11
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 01:07:24 +00:00
dependabot[bot]
b08c39e284 Bump python-dotenv from 1.0.0 to 1.1.1
Bumps [python-dotenv](https://github.com/theskumar/python-dotenv) from 1.0.0 to 1.1.1.
- [Release notes](https://github.com/theskumar/python-dotenv/releases)
- [Changelog](https://github.com/theskumar/python-dotenv/blob/main/CHANGELOG.md)
- [Commits](https://github.com/theskumar/python-dotenv/compare/v1.0.0...v1.1.1)

---
updated-dependencies:
- dependency-name: python-dotenv
  dependency-version: 1.1.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:05:39 +00:00
dependabot[bot]
ae036cfa9a Bump drf-spectacular-sidecar from 2025.7.1 to 2025.8.1
Bumps [drf-spectacular-sidecar](https://github.com/tfranzel/drf-spectacular-sidecar) from 2025.7.1 to 2025.8.1.
- [Commits](https://github.com/tfranzel/drf-spectacular-sidecar/compare/2025.7.1...2025.8.1)

---
updated-dependencies:
- dependency-name: drf-spectacular-sidecar
  dependency-version: 2025.8.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:05:35 +00:00
dependabot[bot]
37628c1735 Bump django-prometheus from 2.3.1 to 2.4.1
Bumps [django-prometheus](https://github.com/korfuri/django-prometheus) from 2.3.1 to 2.4.1.
- [Release notes](https://github.com/korfuri/django-prometheus/releases)
- [Changelog](https://github.com/django-commons/django-prometheus/blob/master/CHANGELOG.md)
- [Commits](https://github.com/korfuri/django-prometheus/compare/v2.3.1...v2.4.1)

---
updated-dependencies:
- dependency-name: django-prometheus
  dependency-version: 2.4.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:05:31 +00:00
dependabot[bot]
530a6db35c Bump djangorestframework from 3.15.2 to 3.16.1
Bumps [djangorestframework](https://github.com/encode/django-rest-framework) from 3.15.2 to 3.16.1.
- [Release notes](https://github.com/encode/django-rest-framework/releases)
- [Commits](https://github.com/encode/django-rest-framework/compare/3.15.2...3.16.1)

---
updated-dependencies:
- dependency-name: djangorestframework
  dependency-version: 3.16.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:05:28 +00:00
dependabot[bot]
2930093da0 Bump django-storages from 1.14.2 to 1.14.6
Bumps [django-storages](https://github.com/jschneier/django-storages) from 1.14.2 to 1.14.6.
- [Changelog](https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/jschneier/django-storages/compare/1.14.2...1.14.6)

---
updated-dependencies:
- dependency-name: django-storages
  dependency-version: 1.14.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:05:24 +00:00
dependabot[bot]
b7e63a466b Bump actions/checkout from 4 to 5
Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 00:02:38 +00:00
Whalysonramos
a35c92439c Translated using Weblate (Portuguese)
Currently translated at 43.9% (350 of 797 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/pt/
2025-08-20 19:54:18 +00:00
Whalysonramos
eed09a7891 Translated using Weblate (Portuguese)
Currently translated at 43.7% (349 of 797 strings)

Translation: Tandoor/Recipes Frontend
Translate-URL: http://translate.tandoor.dev/projects/tandoor/recipes-frontend/pt/
2025-08-20 19:54:17 +00:00
270 changed files with 45173 additions and 39061 deletions

View File

@@ -21,7 +21,7 @@ jobs:
suffix: ""
continue-on-error: false
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Get version number
id: get_version

View File

@@ -12,8 +12,8 @@ jobs:
python-version: ["3.12"]
node-version: ["22"]
steps:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@v1.5.1
- uses: actions/checkout@v5
- uses: awalsh128/cache-apt-pkgs-action@v1.5.3
with:
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libxslt-dev libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
version: 1.0

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v5
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.

View File

@@ -12,7 +12,7 @@ jobs:
if: github.repository_owner == 'TandoorRecipes' && ${{ github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- uses: actions/setup-python@v5
with:
python-version: 3.x

View File

@@ -15,14 +15,14 @@
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer"><img src="https://badgen.net/badge/icon/discord?icon=discord&label" ></a>
<a href="https://hub.docker.com/r/vabene1111/recipes" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/docker/pulls/vabene1111/recipes" ></a>
<a href="https://github.com/vabene1111/recipes/releases/latest" rel="noopener noreferrer"><img src="https://img.shields.io/github/v/release/vabene1111/recipes" ></a>
<a href="https://app.tandoor.dev/accounts/login/?demo" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
<a href="https://app.tandoor.dev/e/demo-auto-login/" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
</p>
<p align="center">
<a href="https://tandoor.dev" target="_blank" rel="noopener noreferrer">Website</a> •
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Docs</a> •
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
<a href="https://app.tandoor.dev/e/demo-auto-login/" target="_blank" rel="noopener noreferrer">Demo</a> •
<a href="https://community.tandoor.dev" target="_blank" rel="noopener noreferrer">Community</a> •
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord</a>
</p>
@@ -30,9 +30,11 @@
![Preview](docs/preview.png)
## Core Features
- 🥗 **Manage your recipes** - Manage your ever growing recipe collection
- 📆 **Plan** - multiple meals for each day
- 🛒 **Shopping lists** - via the meal plan or straight from recipes
- 🪄 **use AI** to recognize images, sort recipe steps, find nutrition facts and more
- 📚 **Cookbooks** - collect recipes into books
- 👪 **Share and collaborate** on recipes with friends and family
@@ -62,12 +64,13 @@ a public page.
Documentation can be found [here](https://docs.tandoor.dev/).
## Support our work
## ❤️ Support our work ❤️
Tandoor is developed by volunteers in their free time just because its fun. That said earning
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
Because of that there are several ways you can support us
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
- **Patron** You can sponsor contributors of this project on Patron: [vabene111](https://www.patreon.com/cw/vabene1111)
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).

17
boot.sh
View File

@@ -22,6 +22,14 @@ display_warning() {
echo -e "$1"
}
# prepare nginx config
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
# start nginx early to display error pages
echo "Starting nginx"
nginx
echo "Checking configuration..."
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
@@ -93,7 +101,7 @@ fi
echo "Collecting static files, this may take a while..."
python manage.py collectstatic --noinput
python manage.py collectstatic --noinput --clear
echo "Done"
@@ -101,13 +109,6 @@ chmod -R 755 ${MEDIA_ROOT:-/opt/recipes/mediafiles}
ipv6_disable=$(cat /sys/module/ipv6/parameters/disable)
# prepare nginx config
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
# start nginx
echo "Starting nginx"
nginx
echo "Starting gunicorn"
# Check if IPv6 is enabled, only then run gunicorn with ipv6 support
if [ "$ipv6_disable" -eq 0 ]; then

View File

@@ -17,7 +17,7 @@ from .models import (BookmarkletImport, Comment, CookLog, CustomFilter, Food, Im
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
ViewLog, ConnectorConfig)
ViewLog, ConnectorConfig, AiProvider, AiLog)
admin.site.login = secure_admin_login(admin.site.login)
@@ -90,6 +90,20 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
admin.site.register(SearchPreference, SearchPreferenceAdmin)
class AiProviderAdmin(admin.ModelAdmin):
list_display = ('name', 'space', 'model_name',)
search_fields = ('name', 'space', 'model_name',)
admin.site.register(AiProvider, AiProviderAdmin)
class AiLogAdmin(admin.ModelAdmin):
list_display = ('ai_provider', 'function', 'credit_cost', 'created_by', 'created_at',)
admin.site.register(AiLog, AiLogAdmin)
class StorageAdmin(admin.ModelAdmin):
list_display = ('name', 'method')
search_fields = ('name',)

View File

@@ -26,6 +26,7 @@ class ImportExportBase(forms.Form):
PAPRIKA = 'PAPRIKA'
NEXTCLOUD = 'NEXTCLOUD'
MEALIE = 'MEALIE'
MEALIE1 = 'MEALIE1'
CHOWDOWN = 'CHOWDOWN'
SAFFRON = 'SAFFRON'
CHEFTAP = 'CHEFTAP'
@@ -46,7 +47,7 @@ class ImportExportBase(forms.Form):
PDF = 'PDF'
GOURMET = 'GOURMET'
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'),
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (MEALIE1, 'Mealie1'), (CHOWDOWN, 'Chowdown'),
(SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'), (PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'),
(DOMESTICA, 'Domestica'), (MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
@@ -75,6 +76,11 @@ class ImportForm(ImportExportBase):
files = MultipleFileField(required=True)
duplicates = forms.BooleanField(help_text=_('To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
required=False)
meal_plans = forms.BooleanField(required=False)
shopping_lists = forms.BooleanField(required=False)
nutrition_per_serving = forms.BooleanField(required=False) # some managers (e.g. mealie) do not specify what the nutrition's relate to so we let the user choose
class ExportForm(ImportExportBase):
recipes = forms.ModelMultipleChoiceField(widget=MultiSelectWidget, queryset=Recipe.objects.none(), required=False)
all = forms.BooleanField(required=False)

View File

@@ -0,0 +1,85 @@
from decimal import Decimal
from django.utils import timezone
from django.db.models import Sum
from litellm import CustomLogger
from cookbook.models import AiLog
from recipes import settings
def get_monthly_token_usage(space):
"""
returns the number of credits the space has used in the current month
"""
token_usage = AiLog.objects.filter(space=space, credits_from_balance=False, created_at__month=timezone.now().month).aggregate(Sum('credit_cost'))['credit_cost__sum']
if token_usage is None:
token_usage = 0
return token_usage
def has_monthly_token(space):
"""
checks if the monthly credit limit has been exceeded
"""
return get_monthly_token_usage(space) < space.ai_credits_monthly
def can_perform_ai_request(space):
return (has_monthly_token(space) or space.ai_credits_balance > 0) and space.ai_enabled
class AiCallbackHandler(CustomLogger):
space = None
user = None
ai_provider = None
function = None
def __init__(self, space, user, ai_provider, function):
super().__init__()
self.space = space
self.user = user
self.ai_provider = ai_provider
self.function = function
def log_pre_api_call(self, model, messages, kwargs):
pass
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
pass
def log_success_event(self, kwargs, response_obj, start_time, end_time):
self.create_ai_log(kwargs, response_obj, start_time, end_time)
def log_failure_event(self, kwargs, response_obj, start_time, end_time):
self.create_ai_log(kwargs, response_obj, start_time, end_time)
def create_ai_log(self, kwargs, response_obj, start_time, end_time):
credit_cost = 0
credits_from_balance = False
if self.ai_provider.log_credit_cost:
credit_cost = kwargs.get("response_cost", 0) * 100
if (not has_monthly_token(self.space)) and self.space.ai_credits_balance > 0:
remaining_balance = self.space.ai_credits_balance - Decimal(str(credit_cost))
if remaining_balance < 0:
remaining_balance = 0
if settings.HOSTED and self.space.ai_credits_monthly == 0:
self.space.ai_enabled = False
self.space.ai_credits_balance = remaining_balance
credits_from_balance = True
self.space.save()
AiLog.objects.create(
created_by=self.user,
space=self.space,
ai_provider=self.ai_provider,
start_time=start_time,
end_time=end_time,
input_tokens=response_obj['usage']['prompt_tokens'],
output_tokens=response_obj['usage']['completion_tokens'],
function=self.function,
credit_cost=credit_cost,
credits_from_balance=credits_from_balance,
)

View File

@@ -0,0 +1,22 @@
def add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
"""
given a model, the base and related field and the base and related ids, bulk create relation objects
"""
relation_objects = []
for b in base_ids:
for r in related_ids:
relation_objects.append(relation_model(**{base_field_name: b, related_field_name: r}))
relation_model.objects.bulk_create(relation_objects, ignore_conflicts=True, unique_fields=(base_field_name, related_field_name,))
def remove_from_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids, f'{related_field_name}__in': related_ids}).delete()
def remove_all_from_relation(relation_model, base_field_name, base_ids):
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids}).delete()
def set_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
remove_all_from_relation(relation_model, base_field_name, base_ids)
add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids)

View File

@@ -51,10 +51,10 @@ class OpenDataImporter:
for field in field_list:
if isinstance(getattr(obj, field), float) or isinstance(getattr(obj, field), Decimal):
if abs(float(getattr(obj, field)) - float(existing_obj[field])) > 0.001: # convert both to float and check if basically equal
print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
#print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
elif getattr(obj, field) != existing_obj[field]:
print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
#print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
return True
@@ -342,7 +342,7 @@ class OpenDataImporter:
'name': self.data[datatype][k]['name'],
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']] if self.data[datatype][k]['store_category'] in self.slug_id_cache['category'] else None,
'fdc_id': re.sub(r'\D', '', self.data[datatype][k]['fdc_id']) if self.data[datatype][k]['fdc_id'] != '' else None,
'fdc_id': re.sub(r'\D', '', str(self.data[datatype][k]['fdc_id'])) if self.data[datatype][k]['fdc_id'] != '' else None,
'open_data_slug': k,
'properties_food_unit_id': None,
'space_id': self.request.space.id,

View File

@@ -3,17 +3,19 @@ import inspect
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth.models import Group
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.utils.translation import gettext as _
from django_scopes import scopes_disabled
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
from oauth2_provider.models import AccessToken
from rest_framework import permissions
from rest_framework.permissions import SAFE_METHODS
from cookbook.models import Recipe, ShareLink, UserSpace
import random
from cookbook.models import Recipe, ShareLink, UserSpace, Space
def get_allowed_groups(groups_required):
@@ -331,6 +333,25 @@ class CustomRecipePermission(permissions.BasePermission):
or has_group_permission(request.user, ['user'])) and obj.space == request.space
class CustomAiProviderPermission(permissions.BasePermission):
"""
Custom permission class for the AiProvider api endpoint
users: can read all
admins: can read and write
superusers: can read and write + write providers without a space
"""
message = _('You do not have the required permissions to view this page!')
def has_permission(self, request, view): # user is either at least a user and the request is safe
return (has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS) or (has_group_permission(request.user, ['admin']) or request.user.is_superuser)
# editing of global providers allowed for superusers, space providers by admins and users can read only access
def has_object_permission(self, request, view, obj):
return ((obj.space is None and request.user.is_superuser)
or (obj.space == request.space and has_group_permission(request.user, ['admin']))
or (obj.space == request.space and has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS))
class CustomUserPermission(permissions.BasePermission):
"""
Custom permission class for user api endpoint
@@ -437,3 +458,36 @@ class IsReadOnlyDRF(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in SAFE_METHODS
class IsCreateDRF(permissions.BasePermission):
message = 'You cannot interact with this object, you can only create'
def has_permission(self, request, view):
return request.method == 'POST'
def create_space_for_user(user, name=None):
with scopes_disabled():
if not name:
name = f"{user.username}'s Space"
if Space.objects.filter(name=name).exists():
name = f'{name} #{random.randrange(1, 10 ** 5)}'
created_space = Space(name=name,
created_by=user,
max_file_storage_mb=settings.SPACE_DEFAULT_MAX_FILES,
max_recipes=settings.SPACE_DEFAULT_MAX_RECIPES,
max_users=settings.SPACE_DEFAULT_MAX_USERS,
allow_sharing=settings.SPACE_DEFAULT_ALLOW_SHARING,
ai_enabled=settings.SPACE_AI_ENABLED,
ai_credits_monthly=settings.SPACE_AI_CREDITS_MONTHLY,
space_setup_completed=False, )
created_space.save()
UserSpace.objects.filter(user=user).update(active=False)
user_space = UserSpace.objects.create(space=created_space, user=user, active=True)
user_space.groups.add(Group.objects.filter(name='admin').get())
return user_space

View File

@@ -288,7 +288,7 @@ class RecipeSearch():
def _updated_on_filter(self):
if self._updatedon:
self._queryset = self._queryset.filter(updated_at__date__date=self._updatedon)
self._queryset = self._queryset.filter(updated_at__date=self._updatedon)
elif self._updatedon_lte:
self._queryset = self._queryset.filter(updated_at__date__lte=self._updatedon_lte)
elif self._updatedon_gte:

View File

@@ -155,7 +155,7 @@ def get_from_scraper(scrape, request):
# assign steps
try:
for i in parse_instructions(scrape.instructions()):
for i in parse_instructions(scrape.instructions_list()):
recipe_json['steps'].append({
'instruction': i,
'ingredients': [],
@@ -177,11 +177,11 @@ def get_from_scraper(scrape, request):
for x in scrape.ingredients():
if x.strip() != '':
try:
amount, unit, ingredient, note = ingredient_parser.parse(x)
amount, unit, food, note = ingredient_parser.parse(x)
ingredient = {
'amount': amount,
'food': {
'name': ingredient,
'name': food,
},
'unit': None,
'note': note,
@@ -315,14 +315,29 @@ def clean_instruction_string(instruction):
# handle unsupported, special UTF8 character in Thermomix-specific instructions,
# that happen in nearly every recipe on Cookidoo, Zaubertopf Club, Rezeptwelt
# and in Thermomix-specific recipes on many other sites
return normalized_string \
.replace("", _('reverse rotation')) \
.replace("", _('careful rotation')) \
.replace("", _('knead')) \
.replace("Andicken ", _('thicken')) \
.replace("Erwärmen ", _('warm up')) \
.replace("Fermentieren ", _('ferment')) \
.replace("Sous-vide ", _("sous-vide"))
normalized_string = normalized_string \
.replace(u"\uE003", _('reverse rotation')) \
.replace(u"\uE002", _('careful rotation')) \
.replace(u"\uE001", _('knead')) \
.replace(u"\uE031", _('thicken')) \
.replace(u"\uE019", _('warm up')) \
.replace(u"\uE02E", _('ferment')) \
.replace(u"\uE018", _('slow cook')) \
.replace(u"\uE033", _('egg boiler')) \
.replace(u"\uE016", _('kettle')) \
.replace(u"\uE01E", _('blend')) \
.replace(u"\uE011", _('pre-clean')) \
.replace(u"\uE026", _('high temperature')) \
.replace(u"\uE00D", _('rice cooker')) \
.replace(u"\uE00C", _('caramelize')) \
.replace(u"\uE038", _('peeler')) \
.replace(u"\uE037", _('slicer')) \
.replace(u"\uE036", _('grater')) \
.replace(u"\uE04C", _('spiralizer')) \
.replace(u"\uE02D", _("sous-vide"))
return normalized_string
def parse_instructions(instructions):
@@ -403,6 +418,8 @@ def parse_servings_text(servings):
def parse_time(recipe_time):
if not recipe_time:
return 0
if type(recipe_time) not in [int, float]:
try:
recipe_time = float(re.search(r'\d+', recipe_time).group())

View File

@@ -1,8 +1,15 @@
from django.contrib.auth.models import Group
from django.http import HttpResponseRedirect
from django.urls import reverse
from django_scopes import scope, scopes_disabled
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
from psycopg2.errors import UniqueViolation
from rest_framework.exceptions import AuthenticationFailed
import random
from cookbook.helper.permission_helper import create_space_for_user
from cookbook.models import Space, UserSpace
from cookbook.views import views
from recipes import settings
@@ -34,16 +41,28 @@ class ScopeMiddleware:
if request.path.startswith(prefix + '/switch-space/'):
return self.get_response(request)
with scopes_disabled():
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
return views.space_overview(request)
if request.path.startswith(prefix + '/invite/'):
return self.get_response(request)
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
user_space = request.user.userspace_set.filter(active=True).first()
if not user_space:
return views.space_overview(request)
if not user_space and request.user.userspace_set.count() > 0:
# if the users has a userspace but nothing is active, activate the first one
user_space = request.user.userspace_set.first()
if user_space:
user_space.active = True
user_space.save()
if not user_space:
if 'signup_token' in request.session:
# if user is authenticated, has no space but a signup token (InviteLink) is present, redirect to invite link logic
return HttpResponseRedirect(reverse('view_invite', args=[request.session.pop('signup_token', '')]))
else:
# if user does not yet have a space create one for him
user_space = create_space_for_user(request.user)
# TODO remove the need for this view
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
return views.no_groups(request)

View File

@@ -26,6 +26,12 @@ class Integration:
files = None
export_type = None
ignored_recipes = []
import_log = None
import_duplicates = False
import_meal_plans = True
import_shopping_lists = True
nutrition_per_serving = False
def __init__(self, request, export_type):
"""
@@ -102,7 +108,7 @@ class Integration:
"""
return True
def do_import(self, files, il, import_duplicates):
def do_import(self, files, il, import_duplicates, meal_plans=True, shopping_lists=True, nutrition_per_serving=False):
"""
Imports given files
:param import_duplicates: if true duplicates are imported as well
@@ -111,6 +117,12 @@ class Integration:
:return: HttpResponseRedirect to the recipe search showing all imported recipes
"""
with scope(space=self.request.space):
self.import_log = il
self.import_duplicates = import_duplicates
self.import_meal_plans = meal_plans
self.import_shopping_lists = shopping_lists
self.nutrition_per_serving = nutrition_per_serving
try:
self.files = files
@@ -166,20 +178,24 @@ class Integration:
il.total_recipes = len(new_file_list)
file_list = new_file_list
for z in file_list:
try:
if not hasattr(z, 'filename') or isinstance(z, Tag):
recipe = self.get_recipe_from_file(z)
else:
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
recipe.keywords.add(self.keyword)
il.msg += self.get_recipe_processed_msg(recipe)
self.handle_duplicates(recipe, import_duplicates)
il.imported_recipes += 1
il.save()
except Exception as e:
traceback.print_exc()
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
if isinstance(self, cookbook.integration.mealie1.Mealie1):
# since the mealie 1.0 export is a backup and not a classic recipe export we treat it a bit differently
recipes = self.get_recipe_from_file(import_zip)
else:
for z in file_list:
try:
if not hasattr(z, 'filename') or isinstance(z, Tag):
recipe = self.get_recipe_from_file(z)
else:
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
recipe.keywords.add(self.keyword)
il.msg += self.get_recipe_processed_msg(recipe)
self.handle_duplicates(recipe, import_duplicates)
il.imported_recipes += 1
il.save()
except Exception as e:
traceback.print_exc()
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
import_zip.close()
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
data_list = self.split_recipe_file(f['file'])

View File

@@ -0,0 +1,352 @@
import json
import re
import traceback
import uuid
from decimal import Decimal
from io import BytesIO
from zipfile import ZipFile
from gettext import gettext as _
from django.db import transaction
from cookbook.helper import ingredient_parser
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step, Food, Unit, SupermarketCategory, PropertyType, Property, MealType, MealPlan, CookLog, ShoppingListEntry
class Mealie1(Integration):
"""
integration for mealie past version 1.0
"""
def get_recipe_from_file(self, file):
mealie_database = json.loads(BytesIO(file.read('database.json')).getvalue().decode("utf-8"))
self.import_log.total_recipes = len(mealie_database['recipes'])
self.import_log.msg += f"Importing {len(mealie_database["categories"]) + len(mealie_database["tags"])} tags and categories as keywords...\n"
self.import_log.save()
keywords_categories_dict = {}
for c in mealie_database['categories']:
if keyword := Keyword.objects.filter(name=c['name'], space=self.request.space).first():
keywords_categories_dict[c['id']] = keyword.pk
else:
keyword = Keyword.objects.create(name=c['name'], space=self.request.space)
keywords_categories_dict[c['id']] = keyword.pk
keywords_tags_dict = {}
for t in mealie_database['tags']:
if keyword := Keyword.objects.filter(name=t['name'], space=self.request.space).first():
keywords_tags_dict[t['id']] = keyword.pk
else:
keyword = Keyword.objects.create(name=t['name'], space=self.request.space)
keywords_tags_dict[t['id']] = keyword.pk
self.import_log.msg += f"Importing {len(mealie_database["multi_purpose_labels"])} multi purpose labels as supermarket categories...\n"
self.import_log.save()
supermarket_categories_dict = {}
for m in mealie_database['multi_purpose_labels']:
if supermarket_category := SupermarketCategory.objects.filter(name=m['name'], space=self.request.space).first():
supermarket_categories_dict[m['id']] = supermarket_category.pk
else:
supermarket_category = SupermarketCategory.objects.create(name=m['name'], space=self.request.space)
supermarket_categories_dict[m['id']] = supermarket_category.pk
self.import_log.msg += f"Importing {len(mealie_database["ingredient_foods"])} foods...\n"
self.import_log.save()
foods_dict = {}
for f in mealie_database['ingredient_foods']:
if food := Food.objects.filter(name=f['name'], space=self.request.space).first():
foods_dict[f['id']] = food.pk
else:
food = {'name': f['name'],
'plural_name': f['plural_name'],
'description': f['description'],
'space': self.request.space}
if f['label_id'] and f['label_id'] in supermarket_categories_dict:
food['supermarket_category_id'] = supermarket_categories_dict[f['label_id']]
food = Food.objects.create(**food)
if f['on_hand']:
food.onhand_users.add(self.request.user)
foods_dict[f['id']] = food.pk
self.import_log.msg += f"Importing {len(mealie_database["ingredient_units"])} units...\n"
self.import_log.save()
units_dict = {}
for u in mealie_database['ingredient_units']:
if unit := Unit.objects.filter(name=u['name'], space=self.request.space).first():
units_dict[u['id']] = unit.pk
else:
unit = Unit.objects.create(name=u['name'], plural_name=u['plural_name'], description=u['description'], space=self.request.space)
units_dict[u['id']] = unit.pk
recipes_dict = {}
recipe_property_factor_dict = {}
recipes = []
recipe_keyword_relation = []
for r in mealie_database['recipes']:
if Recipe.objects.filter(space=self.request.space, name=r['name']).exists() and not self.import_duplicates:
self.import_log.msg += f"Ignoring {r['name']} because a recipe with this name already exists.\n"
self.import_log.save()
else:
recipe = Recipe.objects.create(
waiting_time=parse_time(r['perform_time']),
working_time=parse_time(r['prep_time']),
description=r['description'][:512],
name=r['name'],
source_url=r['org_url'],
servings=r['recipe_servings'] if r['recipe_servings'] and r['recipe_servings'] != 0 else 1,
servings_text=r['recipe_yield'].strip() if r['recipe_yield'] else "",
internal=True,
created_at=r['created_at'],
space=self.request.space,
created_by=self.request.user,
)
if not self.nutrition_per_serving:
recipe_property_factor_dict[r['id']] = recipe.servings
self.import_log.msg += self.get_recipe_processed_msg(recipe)
self.import_log.imported_recipes += 1
self.import_log.save()
recipes.append(recipe)
recipes_dict[r['id']] = recipe.pk
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipe.pk, keyword_id=self.keyword.pk))
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
self.import_log.msg += f"Importing {len(mealie_database["recipe_instructions"])} instructions...\n"
self.import_log.save()
steps_relation = []
first_step_of_recipe_dict = {}
for s in mealie_database['recipe_instructions']:
if s['recipe_id'] in recipes_dict:
step = Step.objects.create(instruction=(s['text'] if s['text'] else "") + (f" \n {s['summary']}" if s['summary'] else ""),
order=s['position'],
name=s['title'],
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[s['recipe_id']], step_id=step.pk))
if s['recipe_id'] not in first_step_of_recipe_dict:
first_step_of_recipe_dict[s['recipe_id']] = step.pk
# it is possible for a recipe to not have steps but have ingredients, in that case create an empty step to add them to later
for r in recipes_dict.keys():
if r not in first_step_of_recipe_dict:
step = Step.objects.create(instruction='',
order=0,
name='',
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[r], step_id=step.pk))
first_step_of_recipe_dict[r] = step.pk
for n in mealie_database['notes']:
if n['recipe_id'] in recipes_dict:
step = Step.objects.create(instruction=n['text'],
name=n['title'],
order=100,
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[n['recipe_id']], step_id=step.pk))
Recipe.steps.through.objects.bulk_create(steps_relation)
ingredient_parser = IngredientParser(self.request, True)
self.import_log.msg += f"Importing {len(mealie_database["recipes_ingredients"])} ingredients...\n"
self.import_log.save()
ingredients_relation = []
for i in mealie_database['recipes_ingredients']:
if i['recipe_id'] in recipes_dict:
if i['title']:
title_ingredient = Ingredient.objects.create(
note=i['title'],
is_header=True,
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=title_ingredient.pk))
if i['food_id']:
ingredient = Ingredient.objects.create(
food_id=foods_dict[i['food_id']] if i['food_id'] in foods_dict else None,
unit_id=units_dict[i['unit_id']] if i['unit_id'] in units_dict else None,
original_text=i['original_text'],
order=i['position'],
amount=i['quantity'] if i['quantity'] else 0,
note=i['note'],
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=ingredient.pk))
elif i['note'].strip():
amount, unit, food, note = ingredient_parser.parse(i['note'].strip())
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
ingredient = Ingredient.objects.create(
food=f,
unit=u,
amount=amount,
note=note,
original_text=i['original_text'],
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=ingredient.pk))
Step.ingredients.through.objects.bulk_create(ingredients_relation)
self.import_log.msg += f"Importing {len(mealie_database["recipes_to_categories"]) + len(mealie_database["recipes_to_tags"])} category and keyword relations...\n"
self.import_log.save()
recipe_keyword_relation = []
for rC in mealie_database['recipes_to_categories']:
if rC['recipe_id'] in recipes_dict:
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rC['recipe_id']], keyword_id=keywords_categories_dict[rC['category_id']]))
for rT in mealie_database['recipes_to_tags']:
if rT['recipe_id'] in recipes_dict:
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rT['recipe_id']], keyword_id=keywords_tags_dict[rT['tag_id']]))
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
self.import_log.msg += f"Importing {len(mealie_database["recipe_nutrition"])} properties...\n"
self.import_log.save()
property_types_dict = {
'calories': PropertyType.objects.get_or_create(name=_('Calories'), space=self.request.space, defaults={'unit': 'kcal', 'fdc_id': 1008})[0],
'carbohydrate_content': PropertyType.objects.get_or_create(name=_('Carbohydrates'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1005})[0],
'cholesterol_content': PropertyType.objects.get_or_create(name=_('Cholesterol'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1253})[0],
'fat_content': PropertyType.objects.get_or_create(name=_('Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1004})[0],
'fiber_content': PropertyType.objects.get_or_create(name=_('Fiber'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1079})[0],
'protein_content': PropertyType.objects.get_or_create(name=_('Protein'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1003})[0],
'saturated_fat_content': PropertyType.objects.get_or_create(name=_('Saturated Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1258})[0],
'sodium_content': PropertyType.objects.get_or_create(name=_('Sodium'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1093})[0],
'sugar_content': PropertyType.objects.get_or_create(name=_('Sugar'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1063})[0],
'trans_fat_content': PropertyType.objects.get_or_create(name=_('Trans Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1257})[0],
'unsaturated_fat_content': PropertyType.objects.get_or_create(name=_('Unsaturated Fat'), space=self.request.space, defaults={'unit': 'g'})[0],
}
with transaction.atomic():
recipe_properties_relation = []
properties_relation = []
for r in mealie_database['recipe_nutrition']:
if r['recipe_id'] in recipes_dict:
for key in property_types_dict:
if r[key]:
properties_relation.append(
Property(property_type_id=property_types_dict[key].pk,
property_amount=Decimal(str(r[key])) / (
Decimal(str(recipe_property_factor_dict[r['recipe_id']])) if r['recipe_id'] in recipe_property_factor_dict else 1),
open_data_food_slug=r['recipe_id'],
space=self.request.space))
properties = Property.objects.bulk_create(properties_relation)
property_ids = []
for p in properties:
recipe_properties_relation.append(Recipe.properties.through(recipe_id=recipes_dict[p.open_data_food_slug], property_id=p.pk))
property_ids.append(p.pk)
Recipe.properties.through.objects.bulk_create(recipe_properties_relation, ignore_conflicts=True)
Property.objects.filter(id__in=property_ids).update(open_data_food_slug=None)
# delete unused property types
for pT in property_types_dict:
try:
property_types_dict[pT].delete()
except:
pass
self.import_log.msg += f"Importing {len(mealie_database["recipe_comments"]) + len(mealie_database["recipe_timeline_events"])} comments and cook logs...\n"
self.import_log.save()
cook_log_list = []
for c in mealie_database['recipe_comments']:
if c['recipe_id'] in recipes_dict:
cook_log_list.append(CookLog(
recipe_id=recipes_dict[c['recipe_id']],
comment=c['text'],
created_at=c['created_at'],
created_by=self.request.user,
space=self.request.space,
))
for c in mealie_database['recipe_timeline_events']:
if c['recipe_id'] in recipes_dict:
if c['event_type'] == 'comment':
cook_log_list.append(CookLog(
recipe_id=recipes_dict[c['recipe_id']],
comment=c['message'],
created_at=c['created_at'],
created_by=self.request.user,
space=self.request.space,
))
CookLog.objects.bulk_create(cook_log_list)
if self.import_meal_plans:
self.import_log.msg += f"Importing {len(mealie_database["group_meal_plans"])} meal plans...\n"
self.import_log.save()
meal_types_dict = {}
meal_plans = []
for m in mealie_database['group_meal_plans']:
if m['recipe_id'] in recipes_dict:
if not m['entry_type'] in meal_types_dict:
meal_type = MealType.objects.get_or_create(name=m['entry_type'], created_by=self.request.user, space=self.request.space)[0]
meal_types_dict[m['entry_type']] = meal_type.pk
meal_plans.append(MealPlan(
recipe_id=recipes_dict[m['recipe_id']] if m['recipe_id'] else None,
title=m['title'] if m['title'] else "",
note=m['text'] if m['text'] else "",
from_date=m['date'],
to_date=m['date'],
meal_type_id=meal_types_dict[m['entry_type']],
created_by=self.request.user,
space=self.request.space,
))
MealPlan.objects.bulk_create(meal_plans)
if self.import_shopping_lists:
self.import_log.msg += f"Importing {len(mealie_database["shopping_list_items"])} shopping list items...\n"
self.import_log.save()
shopping_list_items = []
for sli in mealie_database['shopping_list_items']:
if not sli['checked']:
if sli['food_id']:
shopping_list_items.append(ShoppingListEntry(
amount=sli['quantity'],
unit_id=units_dict[sli['unit_id']] if sli['unit_id'] else None,
food_id=foods_dict[sli['food_id']] if sli['food_id'] else None,
created_by=self.request.user,
space=self.request.space,
))
elif not sli['food_id'] and sli['note'].strip():
amount, unit, food, note = ingredient_parser.parse(sli['note'].strip())
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
shopping_list_items.append(ShoppingListEntry(
amount=amount,
unit=u,
food=f,
created_by=self.request.user,
space=self.request.space,
))
ShoppingListEntry.objects.bulk_create(shopping_list_items)
self.import_log.msg += f"Importing Images. This might take some time ...\n"
self.import_log.save()
for r in mealie_database['recipes']:
try:
if recipe := Recipe.objects.filter(pk=recipes_dict[r['id']]).first():
self.import_recipe_image(recipe, BytesIO(file.read(f'data/recipes/{str(uuid.UUID(str(r['id'])))}/images/original.webp')), filetype='.webp')
except Exception:
pass
return recipes
def get_file_from_recipe(self, recipe):
raise NotImplementedError('Method not implemented in storage integration')

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
# Generated by Django 4.2.22 on 2025-09-05 06:51
import cookbook.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cookbook', '0223_auto_20250831_1111'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_credits_balance',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='space',
name='ai_credits_monthly',
field=models.IntegerField(default=100),
),
migrations.CreateModel(
name='AiProvider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('description', models.TextField(blank=True)),
('api_key', models.CharField(max_length=2048)),
('model_name', models.CharField(max_length=256)),
('url', models.CharField(blank=True, max_length=2048, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('space', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
],
),
migrations.CreateModel(
name='AiLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('function', models.CharField(max_length=64)),
('credit_cost', models.DecimalField(decimal_places=4, max_digits=16)),
('credits_from_balance', models.BooleanField(default=False)),
('input_tokens', models.IntegerField(default=0)),
('output_tokens', models.IntegerField(default=0)),
('start_time', models.DateTimeField(null=True)),
('end_time', models.DateTimeField(null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('ai_provider', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cookbook.aiprovider')),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
],
bases=(models.Model, cookbook.models.PermissionModelMixin),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 4.2.22 on 2025-09-08 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0224_space_ai_credits_balance_space_ai_credits_monthly_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_enabled',
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 4.2.22 on 2025-09-08 20:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0225_space_ai_enabled'),
]
operations = [
migrations.AddField(
model_name='aiprovider',
name='log_credit_cost',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='space',
name='ai_credits_monthly',
field=models.IntegerField(default=10000),
),
]

View File

@@ -0,0 +1,24 @@
# Generated by Django 4.2.22 on 2025-09-09 11:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0226_aiprovider_log_credit_cost_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_default_provider',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='space_ai_default_provider', to='cookbook.aiprovider'),
),
migrations.AlterField(
model_name='space',
name='ai_credits_balance',
field=models.DecimalField(decimal_places=4, default=0, max_digits=16),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 5.2.6 on 2025-09-10 20:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0001_squashed_0227_space_ai_default_provider_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='space_setup_completed',
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 5.2.6 on 2025-09-24 17:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0228_space_space_setup_completed'),
]
operations = [
migrations.AlterModelOptions(
name='ailog',
options={'ordering': ('-created_at',)},
),
migrations.AlterModelOptions(
name='aiprovider',
options={'ordering': ('id',)},
),
migrations.AlterField(
model_name='storage',
name='token',
field=models.CharField(blank=True, max_length=4098, null=True),
),
]

View File

@@ -0,0 +1,15 @@
# Generated by Django 5.2.6 on 2025-09-25 18:56
from django.db import migrations
from django.contrib.postgres.operations import TrigramExtension, UnaccentExtension
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0229_alter_ailog_options_alter_aiprovider_options_and_more'),
]
operations = [
TrigramExtension(),
UnaccentExtension(),
]

View File

@@ -329,6 +329,13 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
demo = models.BooleanField(default=False)
food_inherit = models.ManyToManyField(FoodInheritField, blank=True)
space_setup_completed = models.BooleanField(default=True)
ai_enabled = models.BooleanField(default=True)
ai_credits_monthly = models.IntegerField(default=100)
ai_credits_balance = models.DecimalField(default=0, max_digits=16, decimal_places=4)
ai_default_provider = models.ForeignKey("AiProvider", on_delete=models.SET_NULL, null=True, blank=True, related_name='space_ai_default_provider')
internal_note = models.TextField(blank=True, null=True)
def safe_delete(self):
@@ -341,6 +348,9 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
BookmarkletImport.objects.filter(space=self).delete()
CustomFilter.objects.filter(space=self).delete()
AiLog.objects.filter(space=self).delete()
AiProvider.objects.filter(space=self).delete()
Property.objects.filter(space=self).delete()
PropertyType.objects.filter(space=self).delete()
@@ -393,6 +403,55 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
return self.name
class AiProvider(models.Model):
name = models.CharField(max_length=128)
description = models.TextField(blank=True)
# AiProviders can be global, so space=null is allowed (configurable by superusers)
space = models.ForeignKey(Space, on_delete=models.CASCADE, null=True)
api_key = models.CharField(max_length=2048)
model_name = models.CharField(max_length=256)
url = models.CharField(max_length=2048, blank=True, null=True)
log_credit_cost = models.BooleanField(default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Meta:
ordering = ('id',)
class AiLog(models.Model, PermissionModelMixin):
F_FILE_IMPORT = 'FILE_IMPORT'
F_STEP_SORT = 'STEP_SORT'
F_FOOD_PROPERTIES = 'FOOD_PROPERTIES'
ai_provider = models.ForeignKey(AiProvider, on_delete=models.SET_NULL, null=True)
function = models.CharField(max_length=64)
credit_cost = models.DecimalField(max_digits=16, decimal_places=4)
# if credits from balance were used, else its from monthly quota
credits_from_balance = models.BooleanField(default=False)
input_tokens = models.IntegerField(default=0)
output_tokens = models.IntegerField(default=0)
start_time = models.DateTimeField(null=True)
end_time = models.DateTimeField(null=True)
space = models.ForeignKey(Space, on_delete=models.CASCADE)
created_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.function} {self.ai_provider.name} {self.created_at}"
class Meta:
ordering = ('-created_at',)
class ConnectorConfig(models.Model, PermissionModelMixin):
HOMEASSISTANT = 'HomeAssistant'
CONNECTER_TYPE = ((HOMEASSISTANT, 'HomeAssistant'),)
@@ -533,7 +592,7 @@ class Storage(models.Model, PermissionModelMixin):
)
username = models.CharField(max_length=128, blank=True, null=True)
password = models.CharField(max_length=128, blank=True, null=True)
token = models.CharField(max_length=512, blank=True, null=True)
token = models.CharField(max_length=4098, blank=True, null=True)
url = models.URLField(blank=True, null=True)
path = models.CharField(blank=True, default='', max_length=256)
created_by = models.ForeignKey(User, on_delete=models.PROTECT)
@@ -748,14 +807,7 @@ class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
self.delete()
return target
def delete(self):
if self.ingredient_set.all().exclude(step=None).count() > 0:
raise ProtectedError(self.name + _(" is part of a recipe step and cannot be deleted"), self.ingredient_set.all().exclude(step=None))
else:
return super().delete()
# MP_Tree move uses raw SQL to execute move, override behavior to force a save triggering post_save signal
def move(self, *args, **kwargs):
super().move(*args, **kwargs)
# treebeard bypasses ORM, need to explicity save to trigger post save signals retrieve the object again to avoid writing previous state back to disk

View File

@@ -24,8 +24,9 @@ from rest_framework.fields import IntegerField
from cookbook.helper.CustomStorageClass import CachedS3Boto3Storage
from cookbook.helper.HelperFunctions import str2bool
from cookbook.helper.ai_helper import get_monthly_token_usage
from cookbook.helper.image_processing import is_file_type_allowed
from cookbook.helper.permission_helper import above_space_limit
from cookbook.helper.permission_helper import above_space_limit, create_space_for_user
from cookbook.helper.property_helper import FoodPropertyHelper
from cookbook.helper.shopping_helper import RecipeShoppingEditor
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
@@ -36,7 +37,7 @@ from cookbook.models import (Automation, BookmarkletImport, Comment, CookLog, Cu
ShareLink, ShoppingListEntry, ShoppingListRecipe, Space,
Step, Storage, Supermarket, SupermarketCategory,
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields)
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields, AiLog, AiProvider)
from cookbook.templatetags.custom_tags import markdown
from recipes.settings import AWS_ENABLED, MEDIA_URL, EMAIL_HOST
@@ -150,19 +151,22 @@ class CustomOnHandField(serializers.Field):
return instance
def to_representation(self, obj):
if not self.context["request"].user.is_authenticated:
try:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
return obj.onhand_users.filter(id__in=shared_users).exists()
except AttributeError:
return []
shared_users = []
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
return obj.onhand_users.filter(id__in=shared_users).exists()
def to_internal_value(self, data):
return data
@@ -325,12 +329,62 @@ class UserFileViewSerializer(serializers.ModelSerializer):
read_only_fields = ('id', 'file', 'file_download', 'file_size_kb', 'preview', 'created_by', 'created_at')
class AiProviderSerializer(serializers.ModelSerializer):
api_key = serializers.CharField(required=False, write_only=True)
def create(self, validated_data):
validated_data = self.handle_global_space_logic(validated_data)
return super().create(validated_data)
def update(self, instance, validated_data):
validated_data = self.handle_global_space_logic(validated_data, instance=instance)
return super().update(instance, validated_data)
def handle_global_space_logic(self, validated_data, instance=None):
"""
allow superusers to create AI providers without a space but make sure everyone else only uses their own space
"""
if self.context['request'].user.is_superuser:
if ('space' not in validated_data or not validated_data['space']):
validated_data['space'] = None
else:
validated_data['space'] = self.context['request'].space
else:
if instance:
validated_data['space'] = instance.space
else:
validated_data['space'] = self.context['request'].space
if 'log_credit_cost' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['log_credit_cost']
return validated_data
class Meta:
model = AiProvider
fields = ('id', 'name', 'description', 'api_key', 'model_name', 'url', 'log_credit_cost', 'space', 'created_at', 'updated_at')
read_only_fields = ('created_at', 'updated_at',)
class AiLogSerializer(serializers.ModelSerializer):
ai_provider = AiProviderSerializer(read_only=True)
class Meta:
model = AiLog
fields = ('id', 'ai_provider', 'function', 'credit_cost', 'credits_from_balance', 'input_tokens', 'output_tokens', 'start_time', 'end_time', 'created_by', 'created_at',
'updated_at')
read_only_fields = ('__all__',)
class SpaceSerializer(WritableNestedModelSerializer):
created_by = UserSerializer(read_only=True)
user_count = serializers.SerializerMethodField('get_user_count')
recipe_count = serializers.SerializerMethodField('get_recipe_count')
file_size_mb = serializers.SerializerMethodField('get_file_size_mb')
food_inherit = FoodInheritFieldSerializer(many=True)
user_count = serializers.SerializerMethodField('get_user_count', read_only=True)
recipe_count = serializers.SerializerMethodField('get_recipe_count', read_only=True)
file_size_mb = serializers.SerializerMethodField('get_file_size_mb', read_only=True)
ai_monthly_credits_used = serializers.SerializerMethodField('get_ai_monthly_credits_used', read_only=True)
ai_default_provider = AiProviderSerializer(required=False, allow_null=True)
food_inherit = FoodInheritFieldSerializer(many=True, required=False)
image = UserFileViewSerializer(required=False, many=False, allow_null=True)
nav_logo = UserFileViewSerializer(required=False, many=False, allow_null=True)
custom_space_theme = UserFileViewSerializer(required=False, many=False, allow_null=True)
@@ -350,6 +404,10 @@ class SpaceSerializer(WritableNestedModelSerializer):
def get_recipe_count(self, obj):
return Recipe.objects.filter(space=obj).count()
@extend_schema_field(int)
def get_ai_monthly_credits_used(self, obj):
return get_monthly_token_usage(obj)
@extend_schema_field(float)
def get_file_size_mb(self, obj):
try:
@@ -358,7 +416,36 @@ class SpaceSerializer(WritableNestedModelSerializer):
return 0
def create(self, validated_data):
raise ValidationError('Cannot create using this endpoint')
if Space.objects.filter(created_by=self.context['request'].user).count() >= self.context['request'].user.userpreference.max_owned_spaces:
raise serializers.ValidationError(
_('You have the reached the maximum amount of spaces that can be owned by you.') + f' ({self.context['request'].user.userpreference.max_owned_spaces})')
name = None
if 'name' in validated_data:
name = validated_data['name']
user_space = create_space_for_user(self.context['request'].user, name)
return user_space.space
def update(self, instance, validated_data):
validated_data = self.filter_superuser_parameters(validated_data)
if 'name' in validated_data:
if Space.objects.filter(Q(name=validated_data['name']), ~Q(pk=instance.pk)).exists():
raise ValidationError(_('Space Name must be unique.'))
return super().update(instance, validated_data)
def filter_superuser_parameters(self, validated_data):
if 'ai_enabled' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_enabled']
if 'ai_credits_monthly' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_credits_monthly']
if 'ai_credits_balance' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_credits_balance']
return validated_data
class Meta:
model = Space
@@ -366,10 +453,11 @@ class SpaceSerializer(WritableNestedModelSerializer):
'id', 'name', 'created_by', 'created_at', 'message', 'max_recipes', 'max_file_storage_mb', 'max_users',
'allow_sharing', 'demo', 'food_inherit', 'user_count', 'recipe_count', 'file_size_mb',
'image', 'nav_logo', 'space_theme', 'custom_space_theme', 'nav_bg_color', 'nav_text_color',
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg',)
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg', 'ai_credits_monthly',
'ai_credits_balance', 'ai_monthly_credits_used', 'ai_enabled', 'ai_default_provider', 'space_setup_completed')
read_only_fields = (
'id', 'created_by', 'created_at', 'max_recipes', 'max_file_storage_mb', 'max_users', 'allow_sharing',
'demo',)
'demo', 'ai_monthly_credits_used')
class UserSpaceSerializer(WritableNestedModelSerializer):
@@ -758,28 +846,31 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
@extend_schema_field(bool)
def get_substitute_onhand(self, obj):
if not self.context["request"].user.is_authenticated:
try:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
self.context['request'].user.id]
caches['default'].set(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
filter = Q(id__in=obj.substitute.all())
if obj.substitute_siblings:
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
if obj.substitute_children:
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
except AttributeError:
return []
shared_users = []
if c := caches['default'].get(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
self.context['request'].user.id]
caches['default'].set(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
filter = Q(id__in=obj.substitute.all())
if obj.substitute_siblings:
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
if obj.substitute_children:
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
def create(self, validated_data):
name = validated_data['name'].strip()
@@ -1038,7 +1129,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
fields = (
'id', 'name', 'description', 'image', 'keywords', 'working_time',
'waiting_time', 'created_by', 'created_at', 'updated_at',
'internal', 'private','servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
'internal', 'private', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
)
# TODO having these readonly fields makes "RecipeOverview.ts" (API Client) not generate the RecipeOverviewToJSON second else block which leads to errors when using the api
# TODO find a solution (custom schema?) to have these fields readonly (to save performance) and generate a proper client (two serializers would probably do the trick)
@@ -1134,6 +1225,35 @@ class RecipeBatchUpdateSerializer(serializers.Serializer):
clear_description = serializers.BooleanField(required=False, allow_null=True)
class FoodBatchUpdateSerializer(serializers.Serializer):
foods = serializers.ListField(child=serializers.IntegerField())
category = serializers.IntegerField(required=False, allow_null=True)
substitute_add = serializers.ListField(child=serializers.IntegerField())
substitute_remove = serializers.ListField(child=serializers.IntegerField())
substitute_set = serializers.ListField(child=serializers.IntegerField())
substitute_remove_all = serializers.BooleanField(default=False)
inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
inherit_fields_remove_all = serializers.BooleanField(default=False)
child_inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_remove_all = serializers.BooleanField(default=False)
substitute_children = serializers.BooleanField(required=False, allow_null=True)
substitute_siblings = serializers.BooleanField(required=False, allow_null=True)
ignore_shopping = serializers.BooleanField(required=False, allow_null=True)
on_hand = serializers.BooleanField(required=False, allow_null=True)
parent_remove = serializers.BooleanField(required=False, allow_null=True)
parent_set = serializers.IntegerField(required=False, allow_null=True)
class CustomFilterSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
shared = UserSerializer(many=True, required=False)
@@ -1564,7 +1684,6 @@ class ServerSettingsSerializer(serializers.Serializer):
# TODO add all other relevant settings including path/url related ones?
shopping_min_autosync_interval = serializers.CharField()
enable_pdf_export = serializers.BooleanField()
enable_ai_import = serializers.BooleanField()
disable_external_connectors = serializers.BooleanField()
terms_url = serializers.CharField()
privacy_url = serializers.CharField()
@@ -1605,6 +1724,11 @@ class FdcQuerySerializer(serializers.Serializer):
foods = FdcQueryFoodsSerializer(many=True)
class GenericModelReferenceSerializer(serializers.Serializer):
id = serializers.IntegerField()
model = serializers.CharField()
name = serializers.CharField()
# Export/Import Serializers
class KeywordExportSerializer(KeywordSerializer):
@@ -1788,6 +1912,7 @@ class RecipeFromSourceResponseSerializer(serializers.Serializer):
class AiImportSerializer(serializers.Serializer):
ai_provider_id = serializers.IntegerField()
file = serializers.FileField(allow_null=True)
text = serializers.CharField(allow_null=True, allow_blank=True)
recipe_id = serializers.CharField(allow_null=True, allow_blank=True)

View File

@@ -0,0 +1,168 @@
import json
import pytest
from django.urls import reverse
from django_scopes import scopes_disabled
from cookbook.models import MealType, PropertyType, AiProvider
LIST_URL = 'api:aiprovider-list'
DETAIL_URL = 'api:aiprovider-detail'
@pytest.fixture()
def obj_1(space_1, a1_s1):
return AiProvider.objects.get_or_create(name='test_1', space=space_1)[0]
@pytest.fixture
def obj_2(space_1, a1_s1):
return AiProvider.objects.get_or_create(name='test_2', space=None)[0]
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
['u1_s1', 200],
['a1_s1', 200],
])
def test_list_permission(arg, request):
c = request.getfixturevalue(arg[0])
assert c.get(reverse(LIST_URL)).status_code == arg[1]
def test_list_space(obj_1, obj_2, u1_s1, u1_s2, space_2):
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 1
obj_1.space = space_2
obj_1.save()
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 1
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
obj_1.space = None
obj_1.save()
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
['u1_s1', 403],
['a1_s1', 200],
['g1_s2', 403],
['u1_s2', 403],
['a1_s2', 404],
])
def test_update(arg, request, obj_1):
c = request.getfixturevalue(arg[0])
r = c.patch(
reverse(
DETAIL_URL,
args={obj_1.id}
),
{'name': 'new'},
content_type='application/json'
)
response = json.loads(r.content)
assert r.status_code == arg[1]
if r.status_code == 200:
assert response['name'] == 'new'
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
['u1_s1', 403],
['a1_s1', 403],
['g1_s2', 403],
['u1_s2', 403],
['a1_s2', 403],
['s1_s1', 200],
])
def test_update_global(arg, request, obj_2):
c = request.getfixturevalue(arg[0])
r = c.patch(
reverse(
DETAIL_URL,
args={obj_2.id}
),
{'name': 'new'},
content_type='application/json'
)
response = json.loads(r.content)
assert r.status_code == arg[1]
if r.status_code == 200:
assert response['name'] == 'new'
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
['u1_s1', 403],
['a1_s1', 201],
])
def test_add(arg, request, u1_s2):
c = request.getfixturevalue(arg[0])
r = c.post(
reverse(LIST_URL),
{'name': 'test', 'api_key': 'test', 'model_name': 'test'},
content_type='application/json'
)
response = json.loads(r.content)
assert r.status_code == arg[1]
if r.status_code == 201:
assert response['name'] == 'test'
r = c.get(reverse(DETAIL_URL, args={response['id']}))
assert r.status_code == 200
r = u1_s2.get(reverse(DETAIL_URL, args={response['id']}))
assert r.status_code == 404
def test_delete(a1_s1, a1_s2, obj_1):
# admins cannot delete foreign space providers
r = a1_s2.delete(
reverse(
DETAIL_URL,
args={obj_1.id}
)
)
assert r.status_code == 404
# admins can delete their space providers
r = a1_s1.delete(
reverse(
DETAIL_URL,
args={obj_1.id}
)
)
assert r.status_code == 204
with scopes_disabled():
assert AiProvider.objects.count() == 0
def test_delete_global(a1_s1, s1_s1, obj_2):
# admins cant delete global providers
r = a1_s1.delete(
reverse(
DETAIL_URL,
args={obj_2.id}
)
)
assert r.status_code == 403
# superusers can delete global providers
r = s1_s1.delete(
reverse(
DETAIL_URL,
args={obj_2.id}
)
)
assert r.status_code == 204
with scopes_disabled():
assert AiProvider.objects.count() == 0

View File

@@ -236,42 +236,6 @@ def test_delete(u1_s1, u1_s2, obj_1, obj_tree_1):
assert Food.find_problems() == ([], [], [], [], [])
def test_integrity(u1_s1, recipe_1_s1):
with scopes_disabled():
assert Food.objects.count() == 10
assert Ingredient.objects.count() == 10
f_1 = Food.objects.first()
# deleting food will fail because food is part of recipe
r = u1_s1.delete(
reverse(
DETAIL_URL,
args={f_1.id}
)
)
assert r.status_code == 403
with scopes_disabled():
i_1 = f_1.ingredient_set.first()
# remove Ingredient that references Food from recipe step
i_1.step_set.first().ingredients.remove(i_1)
assert Food.objects.count() == 10
assert Ingredient.objects.count() == 10
# deleting food will succeed because its not part of recipe and delete will cascade to Ingredient
r = u1_s1.delete(
reverse(
DETAIL_URL,
args={f_1.id}
)
)
assert r.status_code == 204
with scopes_disabled():
assert Food.objects.count() == 9
assert Ingredient.objects.count() == 9
def test_move(u1_s1, obj_tree_1, obj_2, obj_3, space_1):
with scope(space=space_1):
# for some reason the 'path' attribute changes between the factory and the test when using both obj_tree and obj

View File

@@ -7,6 +7,7 @@ from django.urls import reverse
from django_scopes import scopes_disabled
from cookbook.models import UserSpace
from recipes import settings
LIST_URL = 'api:space-list'
DETAIL_URL = 'api:space-detail'
@@ -45,7 +46,6 @@ def test_list_multiple(u1_s1, space_1, space_2):
assert u1_response['id'] == space_1.id
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
@@ -70,9 +70,9 @@ def test_update(arg, request, space_1, a1_s1):
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],
['u1_s1', 403],
['a1_s1', 405],
['g1_s1', 201],
['u1_s1', 201],
['a1_s1', 201],
])
def test_add(arg, request, u1_s2):
c = request.getfixturevalue(arg[0])
@@ -90,3 +90,59 @@ def test_delete(u1_s1, u1_s2, a1_s1, space_1):
# event the space owner cannot delete his space over the api (this might change later but for now it's only available in the UI)
r = a1_s1.delete(reverse(DETAIL_URL, args={space_1.id}))
assert r.status_code == 405
def test_superuser_parameters(space_1, a1_s1, s1_s1):
# ------- test as normal user -------
response = a1_s1.post(reverse(LIST_URL), {'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
content_type='application/json')
assert response.status_code == 201
response = json.loads(response.content)
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
assert response['ai_credits_balance'] == 0
space_1.created_by = auth.get_user(a1_s1)
space_1.ai_enabled = False
space_1.ai_credits_monthly = 0
space_1.ai_credits_balance = 0
space_1.save()
response = a1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
content_type='application/json')
assert response.status_code == 200
space_1.refresh_from_db()
assert space_1.ai_enabled == False
assert space_1.ai_credits_monthly == 0
assert space_1.ai_credits_balance == 0
# ------- test as superuser -------
response = s1_s1.post(reverse(LIST_URL),
{'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
content_type='application/json')
assert response.status_code == 201
response = json.loads(response.content)
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
assert response['ai_credits_balance'] == 0
space_1.created_by = auth.get_user(s1_s1)
space_1.ai_enabled = False
space_1.ai_credits_monthly = 0
space_1.ai_credits_balance = 0
space_1.save()
response = s1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
content_type='application/json')
assert response.status_code == 200
space_1.refresh_from_db()
assert space_1.ai_enabled == True
assert space_1.ai_credits_monthly == 100
assert space_1.ai_credits_balance == 100

View File

@@ -5,6 +5,8 @@ from django.contrib import auth
from django.urls import reverse
from django_scopes import scopes_disabled
from cookbook.models import UserSpace
LIST_URL = 'api:userspace-list'
DETAIL_URL = 'api:userspace-detail'
@@ -13,10 +15,10 @@ DETAIL_URL = 'api:userspace-detail'
['a_u', 403, 0],
['g1_s1', 200, 1], # sees only own user space
['u1_s1', 200, 1],
['a1_s1', 200, 3], # sees user space of all users in space
['a2_s1', 200, 1],
['a1_s1', 200, 4], # admins can see all other members
['a2_s1', 200, 4],
])
def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1):
def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1, a2_s1):
space_1.created_by = auth.get_user(a1_s1)
space_1.save()
@@ -27,6 +29,18 @@ def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1):
assert len(json.loads(result.content)['results']) == arg[2]
def test_list_all_personal(space_2, u1_s1):
result = u1_s1.get(reverse('api:userspace-all-personal'))
assert result.status_code == 200
assert len(json.loads(result.content)) == 1
UserSpace.objects.create(user=auth.get_user(u1_s1), space=space_2)
result = u1_s1.get(reverse('api:userspace-all-personal'))
assert result.status_code == 200
assert len(json.loads(result.content)) == 2
@pytest.mark.parametrize("arg", [
['a_u', 403],
['g1_s1', 403],

View File

@@ -298,3 +298,11 @@ def a1_s2(client, space_2):
@pytest.fixture()
def a2_s2(client, space_2):
return create_user(client, space_2, group='admin')
@pytest.fixture()
def s1_s1(client, space_1):
client = create_user(client, space_1, group='admin')
user = auth.get_user(client)
user.is_superuser = True
user.save()
return client

View File

@@ -61,6 +61,8 @@ router.register(r'search-preference', api.SearchPreferenceViewSet)
router.register(r'user-space', api.UserSpaceViewSet)
router.register(r'view-log', api.ViewLogViewSet)
router.register(r'access-token', api.AccessTokenViewSet)
router.register(r'ai-provider', api.AiProviderViewSet)
router.register(r'ai-log', api.AiLogViewSet)
router.register(r'localization', api.LocalizationViewSet, basename='localization')
router.register(r'server-settings', api.ServerSettingsViewSet, basename='server-settings')
@@ -76,10 +78,11 @@ urlpatterns = [
path('setup/', views.setup, name='view_setup'),
path('no-group/', views.no_groups, name='view_no_group'),
path('space-overview/', views.space_overview, name='view_space_overview'),
path('switch-space/<int:space_id>', views.switch_space, name='view_switch_space'),
path('no-perm/', views.no_perm, name='view_no_perm'),
#path('space-overview/', views.space_overview, name='view_space_overview'),
#path('switch-space/<int:space_id>', views.switch_space, name='view_switch_space'),
#path('no-perm/', views.no_perm, name='view_no_perm'),
path('invite/<slug:token>', views.invite_link, name='view_invite'),
path('invite/<slug:token>/', views.invite_link, name='view_invite'),
path('system/', views.system, name='view_system'),
path('plugin/update/', views.plugin_update, name='view_plugin_update'),
@@ -101,6 +104,7 @@ urlpatterns = [
path('api/sync_all/', api.sync_all, name='api_sync'),
path('api/recipe-from-source/', api.RecipeUrlImportView.as_view(), name='api_recipe_from_source'),
path('api/ai-import/', api.AiImportView.as_view(), name='api_ai_import'),
path('api/ai-step-sort/', api.AiStepSortView.as_view(), name='api_ai_step_sort'),
path('api/import-open-data/', api.ImportOpenData.as_view(), name='api_import_open_data'),
path('api/ingredient-from-string/', api.ingredient_from_string, name='api_ingredient_from_string'),
path('api/fdc-search/', api.FdcSearchView.as_view(), name='api_fdc_search'),

View File

@@ -9,6 +9,7 @@ import threading
import traceback
import uuid
from collections import OrderedDict
from functools import wraps
from json import JSONDecodeError
from urllib.parse import unquote
from zipfile import ZipFile
@@ -18,15 +19,16 @@ import litellm
import redis
import requests
from PIL import UnidentifiedImageError
from PIL.ImImagePlugin import number
from PIL.features import check
from django.contrib import messages
from django.contrib.admin.utils import get_deleted_objects, NestedObjects
from django.contrib.auth.models import Group, User
from django.contrib.postgres.search import TrigramSimilarity
from django.core.cache import caches
from django.core.exceptions import FieldError, ValidationError
from django.core.files import File
from django.db.models import Case, Count, Exists, OuterRef, ProtectedError, Q, Subquery, Value, When
from django.db import DEFAULT_DB_ALIAS
from django.db.models import Case, Count, Exists, OuterRef, ProtectedError, Q, Subquery, Value, When, QuerySet
from django.db.models.deletion import Collector
from django.db.models.fields.related import ForeignObjectRel
from django.db.models.functions import Coalesce, Lower
from django.db.models.signals import post_save
@@ -35,7 +37,6 @@ from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from django.utils.datetime_safe import date
from django.utils.translation import gettext as _
from django_scopes import scopes_disabled
from drf_spectacular.types import OpenApiTypes
@@ -65,6 +66,8 @@ from cookbook.connectors.connector_manager import ConnectorManager, ActionType
from cookbook.forms import ImportForm, ImportExportBase
from cookbook.helper import recipe_url_import as helper
from cookbook.helper.HelperFunctions import str2bool, validate_import_url
from cookbook.helper.ai_helper import has_monthly_token, can_perform_ai_request, AiCallbackHandler
from cookbook.helper.batch_edit_helper import add_to_relation, remove_from_relation, remove_all_from_relation, set_relation
from cookbook.helper.image_processing import handle_image
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.open_data_importer import OpenDataImporter
@@ -74,7 +77,7 @@ from cookbook.helper.permission_helper import (CustomIsAdmin, CustomIsOwner, Cus
CustomTokenHasScope, CustomUserPermission, IsReadOnlyDRF,
above_space_limit,
group_required, has_group_permission, is_space_owner,
switch_user_active_space
switch_user_active_space, CustomAiProviderPermission, IsCreateDRF
)
from cookbook.helper.recipe_search import RecipeSearch
from cookbook.helper.recipe_url_import import clean_dict, get_from_youtube_scraper, get_images_from_soup
@@ -85,7 +88,7 @@ from cookbook.models import (Automation, BookmarkletImport, ConnectorConfig, Coo
RecipeBookEntry, ShareLink, ShoppingListEntry,
ShoppingListRecipe, Space, Step, Storage, Supermarket, SupermarketCategory,
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
UserFile, UserPreference, UserSpace, ViewLog, RecipeImport, SearchPreference, SearchFields
UserFile, UserPreference, UserSpace, ViewLog, RecipeImport, SearchPreference, SearchFields, AiLog, AiProvider
)
from cookbook.provider.dropbox import Dropbox
from cookbook.provider.local import Local
@@ -110,12 +113,13 @@ from cookbook.serializer import (AccessTokenSerializer, AutomationSerializer, Au
UserSerializer, UserSpaceSerializer, ViewLogSerializer,
LocalizationSerializer, ServerSettingsSerializer, RecipeFromSourceResponseSerializer, ShoppingListEntryBulkCreateSerializer, FdcQuerySerializer,
AiImportSerializer, ImportOpenDataSerializer, ImportOpenDataMetaDataSerializer, ImportOpenDataResponseSerializer, ExportRequestSerializer,
RecipeImportSerializer, ConnectorConfigSerializer, SearchPreferenceSerializer, SearchFieldsSerializer, RecipeBatchUpdateSerializer
RecipeImportSerializer, ConnectorConfigSerializer, SearchPreferenceSerializer, SearchFieldsSerializer, RecipeBatchUpdateSerializer,
AiProviderSerializer, AiLogSerializer, FoodBatchUpdateSerializer, GenericModelReferenceSerializer
)
from cookbook.version_info import TANDOOR_VERSION
from cookbook.views.import_export import get_integration
from recipes import settings
from recipes.settings import DRF_THROTTLE_RECIPE_URL_IMPORT, FDC_API_KEY, AI_RATELIMIT, AI_API_KEY, AI_MODEL_NAME
from recipes.settings import DRF_THROTTLE_RECIPE_URL_IMPORT, FDC_API_KEY, AI_RATELIMIT
DateExample = OpenApiExample('Date Format', value='1972-12-05', request_only=True)
BeforeDateExample = OpenApiExample('Before Date Format', value='-1972-12-05', request_only=True)
@@ -131,7 +135,7 @@ class LoggingMixin(object):
if settings.REDIS_HOST:
try:
d = date.today().isoformat()
d = timezone.now().isoformat()
space = request.space
endpoint = request.resolver_match.url_name
@@ -179,7 +183,10 @@ class StandardFilterModelViewSet(viewsets.ModelViewSet):
queryset = self.queryset
query = self.request.query_params.get('query', None)
if query is not None:
queryset = queryset.filter(name__icontains=query)
try:
queryset = queryset.filter(name__icontains=query)
except FieldError:
pass
updated_at = self.request.query_params.get('updated_at', None)
if updated_at is not None:
@@ -508,6 +515,143 @@ class TreeMixin(MergeMixin, FuzzyFilterMixin):
return Response(content, status=status.HTTP_400_BAD_REQUEST)
def paginate(func):
"""
pagination decorator for custom ViewSet actions
"""
@wraps(func)
def inner(self, *args, **kwargs):
queryset = func(self, *args, **kwargs)
assert isinstance(queryset, (list, QuerySet))
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
return inner
class DeleteRelationMixing:
"""
mixin to add custom API function for model delete dependency checking
"""
@staticmethod
def collect(obj):
# collector.nested() nested seems to not include protecting but does include cascading
# collector.protected: objects that raise Protected or Restricted error when deleting unit
# collector.field_updates: fields that get updated when deleting the unit
# collector.model_objs: collects the objects that should be deleted together with the selected unit
collector = NestedObjects(using=DEFAULT_DB_ALIAS)
collector.collect([obj])
return collector
@extend_schema(responses=GenericModelReferenceSerializer(many=True), parameters=[
OpenApiParameter(name='cache', description='If results can be cached or not', type=bool, default=True),
])
@decorators.action(detail=True, methods=['GET'], serializer_class=GenericModelReferenceSerializer)
@paginate
def protecting(self, request, pk):
"""
get a paginated list of objects that are protecting the selected object form being deleted
"""
obj = self.queryset.filter(pk=pk, space=request.space).first()
if obj:
CACHE_KEY = f'DELETE_COLLECTOR_{request.space.pk}_PROTECTING_{obj.__class__.__name__}_{obj.pk}'
cache = self.request.query_params.get('cache', "true") == "true"
if caches['default'].has_key(CACHE_KEY) and cache:
return caches['default'].get(CACHE_KEY)
collector = self.collect(obj)
protected_objects = []
for o in collector.protected:
protected_objects.append({
'id': o.pk,
'model': o.__class__.__name__,
'name': str(o),
})
caches['default'].set(CACHE_KEY, protected_objects, 60)
return protected_objects
else:
return []
@extend_schema(responses=GenericModelReferenceSerializer(many=True), parameters=[
OpenApiParameter(name='cache', description='If results can be cached or not', type=bool, default=True),
])
@decorators.action(detail=True, methods=['GET'], serializer_class=GenericModelReferenceSerializer)
@paginate
def cascading(self, request, pk):
"""
get a paginated list of objects that will be cascaded (deleted) when deleting the selected object
"""
obj = self.queryset.filter(pk=pk, space=request.space).first()
if obj:
CACHE_KEY = f'DELETE_COLLECTOR_{request.space.pk}_CASCADING_{obj.__class__.__name__}_{obj.pk}'
cache = self.request.query_params.get('cache', "true") == "true"
if caches['default'].has_key(CACHE_KEY) and cache:
return caches['default'].get(CACHE_KEY)
collector = self.collect(obj)
cascading_objects = []
for model, objs in collector.model_objs.items():
for o in objs:
if o.pk != pk and o.__class__.__name__ != obj.__class__.__name__:
cascading_objects.append({
'id': o.pk,
'model': o.__class__.__name__,
'name': str(o),
})
caches['default'].set(CACHE_KEY, cascading_objects, 60)
return cascading_objects
else:
return []
@extend_schema(responses=GenericModelReferenceSerializer(many=True), parameters=[
OpenApiParameter(name='cache', description='If results can be cached or not', type=bool, default=True),
])
@decorators.action(detail=True, methods=['GET'], serializer_class=GenericModelReferenceSerializer)
@paginate
def nulling(self, request, pk):
"""
get a paginated list of objects where the selected object will be removed whe its deleted
"""
obj = self.queryset.filter(pk=pk, space=request.space).first()
if obj:
CACHE_KEY = f'DELETE_COLLECTOR_{request.space.pk}_NULLING_{obj.__class__.__name__}_{obj.pk}'
cache = self.request.query_params.get('cache', "true") == "true"
if caches['default'].has_key(CACHE_KEY) and cache:
return caches['default'].get(CACHE_KEY)
collector = self.collect(obj)
nulling_objects = []
# field_updates is a dict of relations that will be updated and querysets of items affected
for key, value in collector.field_updates.items():
# iterate over each queryset for relation
for qs in value:
# itereate over each object in queryset of relation
for o in qs:
nulling_objects.append(
{
'id': o.pk,
'model': o.__class__.__name__,
'name': str(o),
}
)
caches['default'].set(CACHE_KEY, nulling_objects, 60)
return nulling_objects
else:
return []
@extend_schema_view(list=extend_schema(parameters=[
OpenApiParameter(name='filter_list', description='User IDs, repeat for multiple', type=str, many=True),
]))
@@ -541,9 +685,9 @@ class GroupViewSet(LoggingMixin, viewsets.ModelViewSet):
class SpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
queryset = Space.objects
serializer_class = SpaceSerializer
permission_classes = [IsReadOnlyDRF & CustomIsGuest | CustomIsOwner & CustomIsAdmin & CustomTokenHasReadWriteScope]
permission_classes = [((IsReadOnlyDRF | IsCreateDRF) & CustomIsGuest) | CustomIsOwner & CustomIsAdmin & CustomTokenHasReadWriteScope]
pagination_class = DefaultPagination
http_method_names = ['get', 'patch']
http_method_names = ['get', 'post', 'put', 'patch']
def get_queryset(self):
return self.queryset.filter(
@@ -562,7 +706,7 @@ class SpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
class UserSpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
queryset = UserSpace.objects
serializer_class = UserSpaceSerializer
permission_classes = [(CustomIsSpaceOwner | CustomIsOwnerReadOnly) & CustomTokenHasReadWriteScope]
permission_classes = [(CustomIsSpaceOwner | (IsReadOnlyDRF & CustomIsUser) | CustomIsOwnerReadOnly) & CustomTokenHasReadWriteScope]
http_method_names = ['get', 'put', 'patch', 'delete']
pagination_class = DefaultPagination
@@ -576,10 +720,23 @@ class UserSpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
if internal_note is not None:
self.queryset = self.queryset.filter(internal_note=internal_note)
if is_space_owner(self.request.user, self.request.space):
# >= admins can see all users, guest/user can only see themselves
if has_group_permission(self.request.user, ['admin']):
return self.queryset.filter(space=self.request.space)
else:
return self.queryset.filter(user=self.request.user, space=self.request.space)
return self.queryset.filter(space=self.request.space, user=self.request.user)
@extend_schema(responses=UserSpaceSerializer(many=True))
@decorators.action(detail=False, pagination_class=DefaultPagination, methods=['GET'], serializer_class=UserSpaceSerializer, )
def all_personal(self, request):
"""
return all userspaces for the user requesting the endpoint
:param request:
:return:
"""
with scopes_disabled():
self.queryset = self.queryset.filter(user=self.request.user)
return Response(self.serializer_class(self.queryset.all(), many=True, context={'request': self.request}).data)
class UserPreferenceViewSet(LoggingMixin, viewsets.ModelViewSet):
@@ -617,7 +774,30 @@ class SearchPreferenceViewSet(LoggingMixin, viewsets.ModelViewSet):
return self.queryset.filter(user=self.request.user)
class StorageViewSet(LoggingMixin, viewsets.ModelViewSet):
class AiProviderViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
queryset = AiProvider.objects
serializer_class = AiProviderSerializer
permission_classes = [CustomAiProviderPermission & CustomTokenHasReadWriteScope]
pagination_class = DefaultPagination
def get_queryset(self):
# read only access to all space and global AiProviders
with scopes_disabled():
return self.queryset.filter(Q(space=self.request.space) | Q(space__isnull=True))
class AiLogViewSet(LoggingMixin, viewsets.ModelViewSet):
queryset = AiLog.objects
serializer_class = AiLogSerializer
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
http_method_names = ['get']
pagination_class = DefaultPagination
def get_queryset(self):
return self.queryset.filter(space=self.request.space)
class StorageViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
# TODO handle delete protect error and adjust test
queryset = Storage.objects
serializer_class = StorageSerializer
@@ -628,7 +808,7 @@ class StorageViewSet(LoggingMixin, viewsets.ModelViewSet):
return self.queryset.filter(space=self.request.space)
class SyncViewSet(LoggingMixin, viewsets.ModelViewSet):
class SyncViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
queryset = Sync.objects
serializer_class = SyncSerializer
permission_classes = [CustomIsAdmin & CustomTokenHasReadWriteScope]
@@ -689,7 +869,7 @@ class RecipeImportViewSet(LoggingMixin, viewsets.ModelViewSet):
return Response({'msg': 'ok'}, status=status.HTTP_200_OK)
class ConnectorConfigViewSet(LoggingMixin, viewsets.ModelViewSet):
class ConnectorConfigViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
queryset = ConnectorConfig.objects
serializer_class = ConnectorConfigSerializer
permission_classes = [CustomIsAdmin & CustomTokenHasReadWriteScope]
@@ -699,7 +879,7 @@ class ConnectorConfigViewSet(LoggingMixin, viewsets.ModelViewSet):
return self.queryset.filter(space=self.request.space)
class SupermarketViewSet(LoggingMixin, StandardFilterModelViewSet):
class SupermarketViewSet(LoggingMixin, StandardFilterModelViewSet, DeleteRelationMixing):
queryset = Supermarket.objects
serializer_class = SupermarketSerializer
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
@@ -711,7 +891,7 @@ class SupermarketViewSet(LoggingMixin, StandardFilterModelViewSet):
# TODO does supermarket category have settings to support fuzzy filtering and/or merge?
class SupermarketCategoryViewSet(LoggingMixin, FuzzyFilterMixin, MergeMixin):
class SupermarketCategoryViewSet(LoggingMixin, FuzzyFilterMixin, MergeMixin, DeleteRelationMixing):
queryset = SupermarketCategory.objects
model = SupermarketCategory
serializer_class = SupermarketCategorySerializer
@@ -734,7 +914,7 @@ class SupermarketCategoryRelationViewSet(LoggingMixin, StandardFilterModelViewSe
return super().get_queryset()
class KeywordViewSet(LoggingMixin, TreeMixin):
class KeywordViewSet(LoggingMixin, TreeMixin, DeleteRelationMixing):
queryset = Keyword.objects
model = Keyword
serializer_class = KeywordSerializer
@@ -742,7 +922,7 @@ class KeywordViewSet(LoggingMixin, TreeMixin):
pagination_class = DefaultPagination
class UnitViewSet(LoggingMixin, MergeMixin, FuzzyFilterMixin):
class UnitViewSet(LoggingMixin, MergeMixin, FuzzyFilterMixin, DeleteRelationMixing):
queryset = Unit.objects
model = Unit
serializer_class = UnitSerializer
@@ -762,7 +942,7 @@ class FoodInheritFieldViewSet(LoggingMixin, viewsets.ReadOnlyModelViewSet):
return super().get_queryset()
class FoodViewSet(LoggingMixin, TreeMixin):
class FoodViewSet(LoggingMixin, TreeMixin, DeleteRelationMixing):
queryset = Food.objects
model = Food
serializer_class = FoodSerializer
@@ -908,6 +1088,82 @@ class FoodViewSet(LoggingMixin, TreeMixin):
return JsonResponse({'msg': 'there was an error parsing the FDC data, please check the server logs'},
status=500, json_dumps_params={'indent': 4})
@extend_schema(
parameters=[
OpenApiParameter(name='provider', description='ID of the AI provider that should be used for this AI request', type=int),
]
)
@decorators.action(detail=True, methods=['POST'], )
def aiproperties(self, request, pk):
serializer = RecipeSerializer(data=request.data, partial=True, context={'request': request})
if serializer.is_valid():
if not request.query_params.get('provider', None) or not re.match(r'^(\d)+$', request.query_params.get('provider', None)):
response = {
'error': True,
'msg': _('You must select an AI provider to perform your request.'),
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
if not can_perform_ai_request(request.space):
response = {
'error': True,
'msg': _("You don't have any credits remaining to use AI or AI features are not enabled for your space."),
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
ai_provider = AiProvider.objects.filter(pk=request.query_params.get('provider')).filter(Q(space=request.space) | Q(space__isnull=True)).first()
litellm.callbacks = [AiCallbackHandler(request.space, request.user, ai_provider, AiLog.F_FOOD_PROPERTIES)]
property_type_list = list(PropertyType.objects.filter(space=request.space).values('id', 'name', 'description', 'unit', 'category', 'fdc_id'))
messages = [
{
"role": "user",
"content": [
{
"type": "text",
"text": "Given the following food and the following different types of properties please update the food so that the properties attribute contains a list with all property types in the following format [{property_amount: <the property value>, property_type: {id: <the ID of the property type>, name: <the name of the property type>}}]."
"The property values should be in the unit given in the property type and for the amount specified in the properties_food_amount attribute of the food, which is given in the properties_food_unit."
"property_amount is a decimal number. Please try to keep a percision of two decimal places if given in your source data."
"Do not make up any data. If there is no data available for the given property type that is ok, just return null as a property_amount for that property type. Do not change anything else!"
"Most property types are likely going to be nutritional values. Please do not make up any values, only return values you can find in the sources available to you."
"Only return values if you are sure they are meant for the food given. Under no circumstance are you allowed to change any other value of the given food or change the structure in any way or form."
},
{
"type": "text",
"text": json.dumps(request.data)
},
{
"type": "text",
"text": json.dumps(property_type_list)
},
]
},
]
try:
ai_request = {
'api_key': ai_provider.api_key,
'model': ai_provider.model_name,
'response_format': {"type": "json_object"},
'messages': messages,
}
if ai_provider.url:
ai_request['api_base'] = ai_provider.url
ai_response = completion(**ai_request)
response_text = ai_response.choices[0].message.content
return Response(json.loads(response_text), status=status.HTTP_200_OK)
except BadRequestError as err:
pass
response = {
'error': True,
'msg': 'The AI could not process your request. \n\n' + err.message,
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, *args, **kwargs):
try:
return (super().destroy(self, *args, **kwargs))
@@ -915,6 +1171,94 @@ class FoodViewSet(LoggingMixin, TreeMixin):
content = {'error': True, 'msg': e.args[0]}
return Response(content, status=status.HTTP_403_FORBIDDEN)
@decorators.action(detail=False, methods=['PUT'], serializer_class=FoodBatchUpdateSerializer)
def batch_update(self, request):
serializer = self.serializer_class(data=request.data, partial=True)
if serializer.is_valid():
foods = Food.objects.filter(id__in=serializer.validated_data['foods'], space=self.request.space)
safe_food_ids = Food.objects.filter(id__in=serializer.validated_data['foods'], space=self.request.space).values_list('id', flat=True)
if 'category' in serializer.validated_data:
foods.update(supermarket_category_id=serializer.validated_data['category'])
if 'ignore_shopping' in serializer.validated_data and serializer.validated_data['ignore_shopping'] is not None:
foods.update(ignore_shopping=serializer.validated_data['ignore_shopping'])
if 'on_hand' in serializer.validated_data and serializer.validated_data['on_hand'] is not None:
if serializer.validated_data['on_hand']:
user_relation = []
for f in safe_food_ids:
user_relation.append(Food.onhand_users.through(food_id=f, user_id=request.user.id))
Food.onhand_users.through.objects.bulk_create(user_relation, ignore_conflicts=True, unique_fields=('food_id', 'user_id',))
else:
Food.onhand_users.through.objects.filter(food_id__in=safe_food_ids, user_id=request.user.id).delete()
if 'substitute_children' in serializer.validated_data and serializer.validated_data['substitute_children'] is not None:
foods.update(substitute_children=serializer.validated_data['substitute_children'])
if 'substitute_siblings' in serializer.validated_data and serializer.validated_data['substitute_siblings'] is not None:
foods.update(substitute_siblings=serializer.validated_data['substitute_siblings'])
# ---------- substitutes -------------
if 'substitute_add' in serializer.validated_data:
add_to_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_add'])
if 'substitute_remove' in serializer.validated_data:
remove_from_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_remove'])
if 'substitute_set' in serializer.validated_data and len(serializer.validated_data['substitute_set']) > 0:
set_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_set'])
if 'substitute_remove_all' in serializer.validated_data and serializer.validated_data['substitute_remove_all']:
remove_all_from_relation(Food.substitute.through, 'from_food_id', safe_food_ids)
# ---------- inherit fields -------------
if 'inherit_fields_add' in serializer.validated_data:
add_to_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_add'])
if 'inherit_fields_remove' in serializer.validated_data:
remove_from_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_remove'])
if 'inherit_fields_set' in serializer.validated_data and len(serializer.validated_data['inherit_fields_set']) > 0:
set_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_set'])
if 'inherit_fields_remove_all' in serializer.validated_data and serializer.validated_data['inherit_fields_remove_all']:
remove_all_from_relation(Food.inherit_fields.through, 'food_id', safe_food_ids)
# ---------- child inherit fields -------------
if 'child_inherit_fields_add' in serializer.validated_data:
add_to_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_add'])
if 'child_inherit_fields_remove' in serializer.validated_data:
remove_from_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_remove'])
if 'child_inherit_fields_set' in serializer.validated_data and len(serializer.validated_data['child_inherit_fields_set']) > 0:
set_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_set'])
if 'child_inherit_fields_remove_all' in serializer.validated_data and serializer.validated_data['child_inherit_fields_remove_all']:
remove_all_from_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids)
# ------- parent --------
if self.model.node_order_by:
node_location = 'sorted'
else:
node_location = 'last'
if 'parent_remove' in serializer.validated_data and serializer.validated_data['parent_remove']:
for f in foods:
f.move(Food.get_first_root_node(), f'{node_location}-sibling')
if 'parent_set' in serializer.validated_data:
parent_food = Food.objects.filter(space=request.space, id=serializer.validated_data['parent_set']).first()
if parent_food:
for f in foods:
f.move(parent_food, f'{node_location}-child')
return Response({}, 200)
return Response(serializer.errors, 400)
@extend_schema_view(list=extend_schema(parameters=[
OpenApiParameter(name='order_field', description='Field to order recipe books on', type=str,
@@ -922,7 +1266,7 @@ class FoodViewSet(LoggingMixin, TreeMixin):
OpenApiParameter(name='order_direction', description='Order ascending or descending', type=str,
enum=['asc', 'desc']),
]))
class RecipeBookViewSet(LoggingMixin, StandardFilterModelViewSet):
class RecipeBookViewSet(LoggingMixin, StandardFilterModelViewSet, DeleteRelationMixing):
queryset = RecipeBook.objects
serializer_class = RecipeBookSerializer
permission_classes = [(CustomIsOwner | CustomIsShared) & CustomTokenHasReadWriteScope]
@@ -1076,7 +1420,7 @@ class AutoPlanViewSet(LoggingMixin, mixins.CreateModelMixin, viewsets.GenericVie
return Response(serializer.errors, 400)
class MealTypeViewSet(LoggingMixin, viewsets.ModelViewSet):
class MealTypeViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
"""
returns list of meal types created by the
requesting user ordered by the order field.
@@ -1108,7 +1452,19 @@ class IngredientViewSet(LoggingMixin, viewsets.ModelViewSet):
return self.serializer_class
def get_queryset(self):
queryset = self.queryset.filter(step__recipe__space=self.request.space)
queryset = self.queryset.prefetch_related('food',
'food__properties',
'food__properties__property_type',
'food__inherit_fields',
'food__supermarket_category',
'food__onhand_users',
'food__substitute',
'food__child_inherit_fields',
'unit',
'unit__unit_conversion_base_relation',
'unit__unit_conversion_base_relation__base_unit',
'unit__unit_conversion_converted_relation',
'unit__unit_conversion_converted_relation__converted_unit', ).filter(step__recipe__space=self.request.space)
food = self.request.query_params.get('food', None)
if food and re.match(r'^(\d)+$', food):
queryset = queryset.filter(food_id=food)
@@ -1214,7 +1570,7 @@ class RecipePagination(PageNumberPagination):
OpenApiParameter(name='filter', description=_('ID of a custom filter. Returns all recipes matched by that filter.'), type=int),
OpenApiParameter(name='makenow', description=_('Filter recipes that can be made with OnHand food. [''true''/''<b>false</b>'']'), type=bool),
]))
class RecipeViewSet(LoggingMixin, viewsets.ModelViewSet):
class RecipeViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
queryset = Recipe.objects
serializer_class = RecipeSerializer
# TODO split read and write permission for meal plan guest
@@ -1496,7 +1852,7 @@ class UnitConversionViewSet(LoggingMixin, viewsets.ModelViewSet):
enum=[m[0] for m in PropertyType.CHOICES])
]
))
class PropertyTypeViewSet(LoggingMixin, viewsets.ModelViewSet):
class PropertyTypeViewSet(LoggingMixin, viewsets.ModelViewSet, DeleteRelationMixing):
queryset = PropertyType.objects
serializer_class = PropertyTypeSerializer
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
@@ -1727,7 +2083,7 @@ class BookmarkletImportViewSet(LoggingMixin, viewsets.ModelViewSet):
return self.queryset.filter(space=self.request.space).all()
class UserFileViewSet(LoggingMixin, StandardFilterModelViewSet):
class UserFileViewSet(LoggingMixin, StandardFilterModelViewSet, DeleteRelationMixing):
queryset = UserFile.objects
serializer_class = UserFileSerializer
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
@@ -1779,8 +2135,8 @@ class InviteLinkViewSet(LoggingMixin, StandardFilterModelViewSet):
if internal_note is not None:
self.queryset = self.queryset.filter(internal_note=internal_note)
unused = self.request.query_params.get('unused', False)
if unused:
used = self.request.query_params.get('used', False)
if not used:
self.queryset = self.queryset.filter(used_by=None)
if is_space_owner(self.request.user, self.request.space):
@@ -2000,6 +2356,24 @@ class AiImportView(APIView):
if serializer.is_valid():
# TODO max file size check
if 'ai_provider_id' not in serializer.validated_data:
response = {
'error': True,
'msg': _('You must select an AI provider to perform your request.'),
}
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
if not can_perform_ai_request(request.space):
response = {
'error': True,
'msg': _("You don't have any credits remaining to use AI or AI features are not enabled for your space."),
}
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
ai_provider = AiProvider.objects.filter(pk=serializer.validated_data['ai_provider_id']).filter(Q(space=request.space) | Q(space__isnull=True)).first()
litellm.callbacks = [AiCallbackHandler(request.space, request.user, ai_provider, AiLog.F_FILE_IMPORT)]
messages = []
uploaded_file = serializer.validated_data['file']
@@ -2068,7 +2442,15 @@ class AiImportView(APIView):
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
try:
ai_response = completion(api_key=AI_API_KEY, model=AI_MODEL_NAME, response_format={"type": "json_object"}, messages=messages, )
ai_request = {
'api_key': ai_provider.api_key,
'model': ai_provider.model_name,
'response_format': {"type": "json_object"},
'messages': messages,
}
if ai_provider.url:
ai_request['api_base'] = ai_provider.url
ai_response = completion(**ai_request)
except BadRequestError as err:
response = {
'error': True,
@@ -2107,6 +2489,80 @@ class AiImportView(APIView):
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
class AiStepSortView(APIView):
throttle_classes = [AiEndpointThrottle]
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
@extend_schema(request=RecipeSerializer(many=False), responses=RecipeSerializer(many=False),
parameters=[
OpenApiParameter(name='provider', description='ID of the AI provider that should be used for this AI request', type=int),
])
def post(self, request, *args, **kwargs):
"""
given an image or PDF file convert its content to a structured recipe using AI and the scraping system
"""
serializer = RecipeSerializer(data=request.data, partial=True, context={'request': request})
if serializer.is_valid():
if not request.query_params.get('provider', None) or not re.match(r'^(\d)+$', request.query_params.get('provider', None)):
response = {
'error': True,
'msg': _('You must select an AI provider to perform your request.'),
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
if not can_perform_ai_request(request.space):
response = {
'error': True,
'msg': _("You don't have any credits remaining to use AI or AI features are not enabled for your space."),
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
ai_provider = AiProvider.objects.filter(pk=request.query_params.get('provider')).filter(Q(space=request.space) | Q(space__isnull=True)).first()
litellm.callbacks = [AiCallbackHandler(request.space, request.user, ai_provider, AiLog.F_STEP_SORT)]
messages = [
{
"role": "user",
"content": [
{
"type": "text",
"text": "You are given data for a recipe formatted as json. You cannot under any circumstance change the value of any of the fields. You are only allowed to split the instructions into multiple steps and to sort the ingredients to their appropriate step. Your goal is to properly structure the recipe by splitting large instructions into multiple coherent steps and putting the ingredients that belong to this step into the ingredients list. Generally an ingredient of a cooking recipe should occur in the first step where its needed. Please sort the ingredients to the appropriate steps without changing any of the actual field values. Return the recipe in the same format you were given as json. Do not change any field value like strings or numbers, or change the sorting, also do not change the language."
},
{
"type": "text",
"text": json.dumps(request.data)
},
]
},
]
try:
ai_request = {
'api_key': ai_provider.api_key,
'model': ai_provider.model_name,
'response_format': {"type": "json_object"},
'messages': messages,
}
if ai_provider.url:
ai_request['api_base'] = ai_provider.url
ai_response = completion(**ai_request)
response_text = ai_response.choices[0].message.content
# TODO validate by loading/dumping using serializer ?
return Response(json.loads(response_text), status=status.HTTP_200_OK)
except BadRequestError as err:
response = {
'error': True,
'msg': 'The AI could not process your request. \n\n' + err.message,
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
class AppImportView(APIView):
parser_classes = [MultiPartParser]
throttle_classes = [RecipeImportThrottle]
@@ -2127,7 +2583,13 @@ class AppImportView(APIView):
files = []
for f in request.FILES.getlist('files'):
files.append({'file': io.BytesIO(f.read()), 'name': f.name})
t = threading.Thread(target=integration.do_import, args=[files, il, form.cleaned_data['duplicates']])
t = threading.Thread(target=integration.do_import,
args=[files, il, form.cleaned_data['duplicates']],
kwargs={'meal_plans': form.cleaned_data['meal_plans'],
'shopping_lists': form.cleaned_data['shopping_lists'],
'nutrition_per_serving': form.cleaned_data['nutrition_per_serving']
}
)
t.setDaemon(True)
t.start()
@@ -2373,7 +2835,6 @@ class ServerSettingsViewSet(viewsets.GenericViewSet):
# Attention: No login required, do not return sensitive data
s['shopping_min_autosync_interval'] = settings.SHOPPING_MIN_AUTOSYNC_INTERVAL
s['enable_pdf_export'] = settings.ENABLE_PDF_EXPORT
s['enable_ai_import'] = settings.AI_API_KEY != ''
s['disable_external_connectors'] = settings.DISABLE_EXTERNAL_CONNECTORS
s['terms_url'] = settings.TERMS_URL
s['privacy_url'] = settings.PRIVACY_URL
@@ -2546,10 +3007,9 @@ def ingredient_from_string(request):
if unit:
if unit_obj := Unit.objects.filter(space=request.space).filter(Q(name=unit) | Q(plural_name=unit)).first():
ingredient['food'] = {'name': unit_obj.name, 'id': unit_obj.id}
ingredient['unit'] = {'name': unit_obj.name, 'id': unit_obj.id}
else:
unit_obj = Unit.objects.create(space=request.space, name=unit)
ingredient['food'] = {'name': unit_obj.name, 'id': unit_obj.id}
ingredient['unit'] = {'name': unit.name, 'id': unit.id}
ingredient['unit'] = {'name': unit_obj.name, 'id': unit_obj.id}
return JsonResponse(ingredient, status=200)

View File

@@ -17,6 +17,7 @@ from cookbook.integration.copymethat import CopyMeThat
from cookbook.integration.default import Default
from cookbook.integration.domestica import Domestica
from cookbook.integration.mealie import Mealie
from cookbook.integration.mealie1 import Mealie1
from cookbook.integration.mealmaster import MealMaster
from cookbook.integration.melarecipes import MelaRecipes
from cookbook.integration.nextcloud_cookbook import NextcloudCookbook
@@ -45,6 +46,8 @@ def get_integration(request, export_type):
return NextcloudCookbook(request, export_type)
if export_type == ImportExportBase.MEALIE:
return Mealie(request, export_type)
if export_type == ImportExportBase.MEALIE1:
return Mealie1(request, export_type)
if export_type == ImportExportBase.CHOWDOWN:
return Chowdown(request, export_type)
if export_type == ImportExportBase.SAFFRON:

View File

@@ -54,7 +54,7 @@ def hook(request, token):
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
ShoppingListEntry.objects.create(food=f, unit=u, amount=amount, created_by=request.user, space=request.space)
ShoppingListEntry.objects.create(food=f, unit=u, amount=max(1, amount), created_by=request.user, space=request.space)
return JsonResponse({'data': data['message']['text']})
except Exception:

Some files were not shown because too many files have changed in this diff Show More