mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-25 11:19:39 -05:00
Compare commits
293 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d26faf14b1 | ||
|
|
d5d5c2c52b | ||
|
|
7ffabfe711 | ||
|
|
49e0b5b962 | ||
|
|
a05f1ece24 | ||
|
|
748b91bb8a | ||
|
|
bd2e9cc3d9 | ||
|
|
c40bb20a7a | ||
|
|
b377d2cd35 | ||
|
|
dc0e91d0f9 | ||
|
|
5f12907544 | ||
|
|
889ddac7dc | ||
|
|
b369e2618a | ||
|
|
5a4e0204c9 | ||
|
|
bfc2e96b54 | ||
|
|
f065ef80aa | ||
|
|
61c14b8b05 | ||
|
|
35d5d64809 | ||
|
|
63c711d18c | ||
|
|
59e3ea70d1 | ||
|
|
6771662a9f | ||
|
|
9b792a1393 | ||
|
|
862957c121 | ||
|
|
bdcbafd52f | ||
|
|
5e454a5212 | ||
|
|
20bea63997 | ||
|
|
8a265772c0 | ||
|
|
6febb4e3e8 | ||
|
|
04f9167fd8 | ||
|
|
8f29e01daf | ||
|
|
e810363b22 | ||
|
|
b5a2120bdf | ||
|
|
643fcbad9b | ||
|
|
4a3b834463 | ||
|
|
003149133a | ||
|
|
a43de0ca4d | ||
|
|
e05aaed75c | ||
|
|
4984e3e31b | ||
|
|
11dce4c6ad | ||
|
|
8d0d338ea2 | ||
|
|
d09e629415 | ||
|
|
53ef2ef99f | ||
|
|
d7b26d1b29 | ||
|
|
673d12d233 | ||
|
|
6359245925 | ||
|
|
a7c4822322 | ||
|
|
e94419f320 | ||
|
|
01f46483ff | ||
|
|
d6da5688af | ||
|
|
680ae39201 | ||
|
|
2472ee9c26 | ||
|
|
4428b06d4a | ||
|
|
e9c38d7d5e | ||
|
|
6f28d58807 | ||
|
|
88db611f0a | ||
|
|
f3302b4014 | ||
|
|
d4bb161275 | ||
|
|
32f1538938 | ||
|
|
029baea4c7 | ||
|
|
38d1b7cef5 | ||
|
|
85821bcc94 | ||
|
|
2345af8fd6 | ||
|
|
51107c64ee | ||
|
|
81983c5ae2 | ||
|
|
f7713a43a7 | ||
|
|
ffd951a7f4 | ||
|
|
319ac8e191 | ||
|
|
e292b72e34 | ||
|
|
4e795ecf55 | ||
|
|
e3c2a66723 | ||
|
|
eec3e97f97 | ||
|
|
3f481d6922 | ||
|
|
0810ab7210 | ||
|
|
abd621145c | ||
|
|
7d218aa93d | ||
|
|
1b41bd9115 | ||
|
|
d456fcf0f2 | ||
|
|
aea247b4a3 | ||
|
|
e2843bb02f | ||
|
|
e3aa3e1137 | ||
|
|
da1187b03a | ||
|
|
f9ed79978c | ||
|
|
920a3ed4a3 | ||
|
|
2077eae142 | ||
|
|
b1ef35e415 | ||
|
|
0a687d840c | ||
|
|
6a3034b966 | ||
|
|
3d7afbfe4f | ||
|
|
02e43730bd | ||
|
|
6adf077ee5 | ||
|
|
d73ffa46ff | ||
|
|
8572f338ad | ||
|
|
920ec8e74b | ||
|
|
2328bf2342 | ||
|
|
85620a1431 | ||
|
|
0037858885 | ||
|
|
9df3ff0028 | ||
|
|
0a43272126 | ||
|
|
ff96eb194f | ||
|
|
6b69c4184b | ||
|
|
e90e21181c | ||
|
|
5237228673 | ||
|
|
ecb3172085 | ||
|
|
b4f4e9fd2b | ||
|
|
6d0f3b99c8 | ||
|
|
cdb94ae628 | ||
|
|
0d589444fd | ||
|
|
95fa420c3a | ||
|
|
dd4dc1083f | ||
|
|
04f889b742 | ||
|
|
67d374c071 | ||
|
|
8d749e351d | ||
|
|
417ffcab5d | ||
|
|
d4f654554b | ||
|
|
c8115545b8 | ||
|
|
6dbf0871ec | ||
|
|
f1c5c8bc43 | ||
|
|
22e0108992 | ||
|
|
e2e05c8d1d | ||
|
|
b02b36812d | ||
|
|
0c0012aab8 | ||
|
|
e562883da3 | ||
|
|
7f6025c99c | ||
|
|
a81bc335cc | ||
|
|
ebee1ccd4b | ||
|
|
b1104b4581 | ||
|
|
f5e952d88c | ||
|
|
968fcc3936 | ||
|
|
73d3d87217 | ||
|
|
9050f648f9 | ||
|
|
a4a9e104b5 | ||
|
|
3ed85ea0c4 | ||
|
|
da8ceb7abe | ||
|
|
5ff3a6bb2e | ||
|
|
3ed750b330 | ||
|
|
0315911802 | ||
|
|
1fe96f2b3d | ||
|
|
11761c0b15 | ||
|
|
a7b0a1ab30 | ||
|
|
e4fcae3b00 | ||
|
|
b0401639f1 | ||
|
|
5f8770f502 | ||
|
|
aaa627d3b6 | ||
|
|
e77734f696 | ||
|
|
73df6bb961 | ||
|
|
7d187b638e | ||
|
|
68bb750f8c | ||
|
|
1e35035540 | ||
|
|
561ba2f1da | ||
|
|
bd600301f9 | ||
|
|
cf5483a4d9 | ||
|
|
ba417c49dd | ||
|
|
0259b1dc08 | ||
|
|
34553dadd7 | ||
|
|
535b88c8db | ||
|
|
71eb8818b5 | ||
|
|
0dc94a817f | ||
|
|
4df862c7f3 | ||
|
|
69f013c980 | ||
|
|
23c420dda8 | ||
|
|
63daf1e958 | ||
|
|
52b44eacdd | ||
|
|
fa7cc12b99 | ||
|
|
64d2108ef6 | ||
|
|
dccfdcc11c | ||
|
|
974e72631d | ||
|
|
70f31b8553 | ||
|
|
3cb980c0e7 | ||
|
|
b8a403b7c1 | ||
|
|
b037d90220 | ||
|
|
ad32e457fa | ||
|
|
8e2726caeb | ||
|
|
e693737c57 | ||
|
|
9f239c06d3 | ||
|
|
0f551c5f88 | ||
|
|
eb224a769d | ||
|
|
4515eba9d7 | ||
|
|
30b37bf0b6 | ||
|
|
f17207e56e | ||
|
|
2cba0e18af | ||
|
|
ec6e81316a | ||
|
|
b72897b222 | ||
|
|
bca1ebbf99 | ||
|
|
f0342d4568 | ||
|
|
81f62de500 | ||
|
|
f783949a61 | ||
|
|
820fad1b5c | ||
|
|
1169abd942 | ||
|
|
48e175f58f | ||
|
|
5450e18342 | ||
|
|
ea590f8e49 | ||
|
|
13626ca11b | ||
|
|
f53fe1e3c4 | ||
|
|
d177316b47 | ||
|
|
338db1fac2 | ||
|
|
377619473c | ||
|
|
000962c5bb | ||
|
|
9228c1d59f | ||
|
|
27007de7a0 | ||
|
|
29c99b66a1 | ||
|
|
bc179f430d | ||
|
|
58c412ad95 | ||
|
|
4f248afe76 | ||
|
|
f722d24eaa | ||
|
|
723b74509f | ||
|
|
ad4b1393dd | ||
|
|
04bab7072c | ||
|
|
6391cee9eb | ||
|
|
14884fc0d4 | ||
|
|
f2191f79dd | ||
|
|
c2533d9ea2 | ||
|
|
db72fdb1bb | ||
|
|
78252662cb | ||
|
|
4e078bf477 | ||
|
|
2e9e226fe0 | ||
|
|
18cfbd80ab | ||
|
|
4d284b4fff | ||
|
|
b1128dd134 | ||
|
|
3aebf58406 | ||
|
|
f3816a77df | ||
|
|
e4183d79ab | ||
|
|
f4aa1a083f | ||
|
|
ed5508b576 | ||
|
|
040e247487 | ||
|
|
5d28c7b17d | ||
|
|
15b2df07f2 | ||
|
|
ed8f97e9e0 | ||
|
|
034f68fc28 | ||
|
|
0158087a0b | ||
|
|
cb6bfd741d | ||
|
|
afeee5f7cb | ||
|
|
b43d6e08d4 | ||
|
|
1188624376 | ||
|
|
9ac837c969 | ||
|
|
fc4b017d30 | ||
|
|
4636ac28f9 | ||
|
|
397912e87f | ||
|
|
d0b860e623 | ||
|
|
8a90ed1274 | ||
|
|
163c2a53b6 | ||
|
|
b97e04ead8 | ||
|
|
286d707347 | ||
|
|
98d308aee9 | ||
|
|
fc236c97b4 | ||
|
|
a7c5240227 | ||
|
|
75fcff8e70 | ||
|
|
2f27cf4deb | ||
|
|
686b595f45 | ||
|
|
0f9f9e8f7c | ||
|
|
7be7c5b954 | ||
|
|
0853a9ec64 | ||
|
|
fa3daee965 | ||
|
|
aba45657c3 | ||
|
|
e6abdf8cd4 | ||
|
|
6cedde7b2d | ||
|
|
741e9eb370 | ||
|
|
7db523d8c4 | ||
|
|
41f0060c43 | ||
|
|
5572833f64 | ||
|
|
780e441a3b | ||
|
|
c4fd2d0b4e | ||
|
|
1c6618f452 | ||
|
|
8c96a75a1e | ||
|
|
f099e2e5d3 | ||
|
|
774c05e76f | ||
|
|
b08c39e284 | ||
|
|
ae036cfa9a | ||
|
|
37628c1735 | ||
|
|
530a6db35c | ||
|
|
2930093da0 | ||
|
|
b7e63a466b | ||
|
|
5653aca056 | ||
|
|
fdb05c5a9e | ||
|
|
a35c92439c | ||
|
|
eed09a7891 | ||
|
|
2dffde4091 | ||
|
|
cdd700d2e6 | ||
|
|
ad6fe5fa4d | ||
|
|
ac31c112f3 | ||
|
|
0104b600cc | ||
|
|
7baad85112 | ||
|
|
4b0bfa9a85 | ||
|
|
5e7c75ef68 | ||
|
|
954a35bea2 | ||
|
|
88347d44c8 | ||
|
|
2c13e76fbb | ||
|
|
362f634828 | ||
|
|
2fb968cfd3 | ||
|
|
4d3dab6edd | ||
|
|
8f1b593ad1 | ||
|
|
1002f0d61f | ||
|
|
20cb218688 | ||
|
|
bba44b0c1e |
4
.github/workflows/build-docker.yml
vendored
4
.github/workflows/build-docker.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
fi
|
||||
|
||||
# Build Vue 3 frontend
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '22'
|
||||
cache: yarn
|
||||
|
||||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -12,15 +12,15 @@ jobs:
|
||||
python-version: ["3.12"]
|
||||
node-version: ["22"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
- uses: actions/checkout@v5
|
||||
- uses: awalsh128/cache-apt-pkgs-action@v1.6.0
|
||||
with:
|
||||
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libxslt-dev libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
|
||||
version: 1.0
|
||||
|
||||
# Setup python & dependencies
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
# Build Vue frontend & Dependencies
|
||||
- name: Set up Node ${{ matrix.node-version }}
|
||||
if: steps.django_cache.outputs.cache-hit != 'true'
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: "yarn"
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
with:
|
||||
languages: python, javascript
|
||||
@@ -47,6 +47,6 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
languages: javascript, python
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -12,8 +12,8 @@ jobs:
|
||||
if: github.repository_owner == 'TandoorRecipes' && ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.x
|
||||
- run: pip install mkdocs-material mkdocs-include-markdown-plugin
|
||||
|
||||
@@ -15,14 +15,14 @@
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer"><img src="https://badgen.net/badge/icon/discord?icon=discord&label" ></a>
|
||||
<a href="https://hub.docker.com/r/vabene1111/recipes" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/docker/pulls/vabene1111/recipes" ></a>
|
||||
<a href="https://github.com/vabene1111/recipes/releases/latest" rel="noopener noreferrer"><img src="https://img.shields.io/github/v/release/vabene1111/recipes" ></a>
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
|
||||
<a href="https://app.tandoor.dev/e/demo-auto-login/" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://tandoor.dev" target="_blank" rel="noopener noreferrer">Website</a> •
|
||||
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
|
||||
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Docs</a> •
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://app.tandoor.dev/e/demo-auto-login/" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://community.tandoor.dev" target="_blank" rel="noopener noreferrer">Community</a> •
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord</a>
|
||||
</p>
|
||||
@@ -30,9 +30,11 @@
|
||||

|
||||
|
||||
## Core Features
|
||||
|
||||
- 🥗 **Manage your recipes** - Manage your ever growing recipe collection
|
||||
- 📆 **Plan** - multiple meals for each day
|
||||
- 🛒 **Shopping lists** - via the meal plan or straight from recipes
|
||||
- 🪄 **use AI** to recognize images, sort recipe steps, find nutrition facts and more
|
||||
- 📚 **Cookbooks** - collect recipes into books
|
||||
- 👪 **Share and collaborate** on recipes with friends and family
|
||||
|
||||
@@ -62,12 +64,13 @@ a public page.
|
||||
|
||||
Documentation can be found [here](https://docs.tandoor.dev/).
|
||||
|
||||
## Support our work
|
||||
## ❤️ Support our work ❤️
|
||||
Tandoor is developed by volunteers in their free time just because its fun. That said earning
|
||||
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
|
||||
Because of that there are several ways you can support us
|
||||
|
||||
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
|
||||
- **Patron** You can sponsor contributors of this project on Patron: [vabene111](https://www.patreon.com/cw/vabene1111)
|
||||
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
|
||||
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
|
||||
|
||||
|
||||
31
boot.sh
31
boot.sh
@@ -2,7 +2,7 @@
|
||||
source venv/bin/activate
|
||||
|
||||
# these are envsubst in the nginx config, make sure they default to something sensible when unset
|
||||
export TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
export TANDOOR_PORT="${TANDOOR_PORT:-80}"
|
||||
export MEDIA_ROOT=${MEDIA_ROOT:-/opt/recipes/mediafiles};
|
||||
export STATIC_ROOT=${STATIC_ROOT:-/opt/recipes/staticfiles};
|
||||
|
||||
@@ -12,16 +12,18 @@ GUNICORN_LOG_LEVEL="${GUNICORN_LOG_LEVEL:-'info'}"
|
||||
|
||||
PLUGINS_BUILD="${PLUGINS_BUILD:-0}"
|
||||
|
||||
if [ "${TANDOOR_PORT}" -eq 80 ]; then
|
||||
echo "TANDOOR_PORT set to 8080 because 80 is now taken by the integrated nginx"
|
||||
TANDOOR_PORT=8080
|
||||
fi
|
||||
|
||||
display_warning() {
|
||||
echo "[WARNING]"
|
||||
echo -e "$1"
|
||||
}
|
||||
|
||||
# prepare nginx config
|
||||
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
|
||||
|
||||
# start nginx early to display error pages
|
||||
echo "Starting nginx"
|
||||
nginx
|
||||
|
||||
echo "Checking configuration..."
|
||||
|
||||
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
|
||||
@@ -93,7 +95,7 @@ fi
|
||||
|
||||
echo "Collecting static files, this may take a while..."
|
||||
|
||||
python manage.py collectstatic --noinput
|
||||
python manage.py collectstatic --noinput --clear
|
||||
|
||||
echo "Done"
|
||||
|
||||
@@ -101,17 +103,6 @@ chmod -R 755 ${MEDIA_ROOT:-/opt/recipes/mediafiles}
|
||||
|
||||
ipv6_disable=$(cat /sys/module/ipv6/parameters/disable)
|
||||
|
||||
# prepare nginx config
|
||||
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
|
||||
|
||||
# start nginx
|
||||
echo "Starting nginx"
|
||||
nginx
|
||||
|
||||
echo "Starting gunicorn"
|
||||
# Check if IPv6 is enabled, only then run gunicorn with ipv6 support
|
||||
if [ "$ipv6_disable" -eq 0 ]; then
|
||||
exec gunicorn -b "[::]:$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
|
||||
else
|
||||
exec gunicorn -b ":$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
|
||||
fi
|
||||
exec gunicorn --bind unix:/run/tandoor.sock --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --timeout ${GUNICORN_TIMEOUT:-30} --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ from .models import (BookmarkletImport, Comment, CookLog, CustomFilter, Food, Im
|
||||
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
|
||||
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
|
||||
ViewLog, ConnectorConfig)
|
||||
ViewLog, ConnectorConfig, AiProvider, AiLog)
|
||||
|
||||
admin.site.login = secure_admin_login(admin.site.login)
|
||||
|
||||
@@ -90,6 +90,20 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
|
||||
|
||||
class AiProviderAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'space', 'model_name',)
|
||||
search_fields = ('name', 'space', 'model_name',)
|
||||
|
||||
|
||||
admin.site.register(AiProvider, AiProviderAdmin)
|
||||
|
||||
|
||||
class AiLogAdmin(admin.ModelAdmin):
|
||||
list_display = ('ai_provider', 'function', 'credit_cost', 'created_by', 'created_at',)
|
||||
|
||||
admin.site.register(AiLog, AiLogAdmin)
|
||||
|
||||
|
||||
class StorageAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'method')
|
||||
search_fields = ('name',)
|
||||
|
||||
@@ -26,6 +26,7 @@ class ImportExportBase(forms.Form):
|
||||
PAPRIKA = 'PAPRIKA'
|
||||
NEXTCLOUD = 'NEXTCLOUD'
|
||||
MEALIE = 'MEALIE'
|
||||
MEALIE1 = 'MEALIE1'
|
||||
CHOWDOWN = 'CHOWDOWN'
|
||||
SAFFRON = 'SAFFRON'
|
||||
CHEFTAP = 'CHEFTAP'
|
||||
@@ -46,7 +47,7 @@ class ImportExportBase(forms.Form):
|
||||
PDF = 'PDF'
|
||||
GOURMET = 'GOURMET'
|
||||
|
||||
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'),
|
||||
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (MEALIE1, 'Mealie1'), (CHOWDOWN, 'Chowdown'),
|
||||
(SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'), (PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'),
|
||||
(DOMESTICA, 'Domestica'), (MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
|
||||
@@ -75,6 +76,11 @@ class ImportForm(ImportExportBase):
|
||||
files = MultipleFileField(required=True)
|
||||
duplicates = forms.BooleanField(help_text=_('To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
|
||||
required=False)
|
||||
meal_plans = forms.BooleanField(required=False)
|
||||
shopping_lists = forms.BooleanField(required=False)
|
||||
nutrition_per_serving = forms.BooleanField(required=False) # some managers (e.g. mealie) do not specify what the nutrition's relate to so we let the user choose
|
||||
|
||||
|
||||
class ExportForm(ImportExportBase):
|
||||
recipes = forms.ModelMultipleChoiceField(widget=MultiSelectWidget, queryset=Recipe.objects.none(), required=False)
|
||||
all = forms.BooleanField(required=False)
|
||||
|
||||
85
cookbook/helper/ai_helper.py
Normal file
85
cookbook/helper/ai_helper.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from decimal import Decimal
|
||||
|
||||
from django.utils import timezone
|
||||
from django.db.models import Sum
|
||||
from litellm import CustomLogger
|
||||
|
||||
from cookbook.models import AiLog
|
||||
from recipes import settings
|
||||
|
||||
|
||||
def get_monthly_token_usage(space):
|
||||
"""
|
||||
returns the number of credits the space has used in the current month
|
||||
"""
|
||||
token_usage = AiLog.objects.filter(space=space, credits_from_balance=False, created_at__month=timezone.now().month).aggregate(Sum('credit_cost'))['credit_cost__sum']
|
||||
if token_usage is None:
|
||||
token_usage = 0
|
||||
return token_usage
|
||||
|
||||
|
||||
def has_monthly_token(space):
|
||||
"""
|
||||
checks if the monthly credit limit has been exceeded
|
||||
"""
|
||||
return get_monthly_token_usage(space) < space.ai_credits_monthly
|
||||
|
||||
|
||||
def can_perform_ai_request(space):
|
||||
return (has_monthly_token(space) or space.ai_credits_balance > 0) and space.ai_enabled
|
||||
|
||||
|
||||
class AiCallbackHandler(CustomLogger):
|
||||
space = None
|
||||
user = None
|
||||
ai_provider = None
|
||||
function = None
|
||||
|
||||
def __init__(self, space, user, ai_provider, function):
|
||||
super().__init__()
|
||||
self.space = space
|
||||
self.user = user
|
||||
self.ai_provider = ai_provider
|
||||
self.function = function
|
||||
|
||||
def log_pre_api_call(self, model, messages, kwargs):
|
||||
pass
|
||||
|
||||
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
|
||||
pass
|
||||
|
||||
def log_success_event(self, kwargs, response_obj, start_time, end_time):
|
||||
self.create_ai_log(kwargs, response_obj, start_time, end_time)
|
||||
|
||||
def log_failure_event(self, kwargs, response_obj, start_time, end_time):
|
||||
self.create_ai_log(kwargs, response_obj, start_time, end_time)
|
||||
|
||||
def create_ai_log(self, kwargs, response_obj, start_time, end_time):
|
||||
credit_cost = 0
|
||||
credits_from_balance = False
|
||||
if self.ai_provider.log_credit_cost:
|
||||
credit_cost = kwargs.get("response_cost", 0) * 100
|
||||
|
||||
if (not has_monthly_token(self.space)) and self.space.ai_credits_balance > 0:
|
||||
remaining_balance = self.space.ai_credits_balance - Decimal(str(credit_cost))
|
||||
if remaining_balance < 0:
|
||||
remaining_balance = 0
|
||||
if settings.HOSTED and self.space.ai_credits_monthly == 0:
|
||||
self.space.ai_enabled = False
|
||||
|
||||
self.space.ai_credits_balance = remaining_balance
|
||||
credits_from_balance = True
|
||||
self.space.save()
|
||||
|
||||
AiLog.objects.create(
|
||||
created_by=self.user,
|
||||
space=self.space,
|
||||
ai_provider=self.ai_provider,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
input_tokens=response_obj['usage']['prompt_tokens'],
|
||||
output_tokens=response_obj['usage']['completion_tokens'],
|
||||
function=self.function,
|
||||
credit_cost=credit_cost,
|
||||
credits_from_balance=credits_from_balance,
|
||||
)
|
||||
22
cookbook/helper/batch_edit_helper.py
Normal file
22
cookbook/helper/batch_edit_helper.py
Normal file
@@ -0,0 +1,22 @@
|
||||
def add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
"""
|
||||
given a model, the base and related field and the base and related ids, bulk create relation objects
|
||||
"""
|
||||
relation_objects = []
|
||||
for b in base_ids:
|
||||
for r in related_ids:
|
||||
relation_objects.append(relation_model(**{base_field_name: b, related_field_name: r}))
|
||||
relation_model.objects.bulk_create(relation_objects, ignore_conflicts=True, unique_fields=(base_field_name, related_field_name,))
|
||||
|
||||
|
||||
def remove_from_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids, f'{related_field_name}__in': related_ids}).delete()
|
||||
|
||||
|
||||
def remove_all_from_relation(relation_model, base_field_name, base_ids):
|
||||
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids}).delete()
|
||||
|
||||
|
||||
def set_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
remove_all_from_relation(relation_model, base_field_name, base_ids)
|
||||
add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids)
|
||||
@@ -51,10 +51,10 @@ class OpenDataImporter:
|
||||
for field in field_list:
|
||||
if isinstance(getattr(obj, field), float) or isinstance(getattr(obj, field), Decimal):
|
||||
if abs(float(getattr(obj, field)) - float(existing_obj[field])) > 0.001: # convert both to float and check if basically equal
|
||||
print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
#print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
return False
|
||||
elif getattr(obj, field) != existing_obj[field]:
|
||||
print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
#print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -342,7 +342,7 @@ class OpenDataImporter:
|
||||
'name': self.data[datatype][k]['name'],
|
||||
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']] if self.data[datatype][k]['store_category'] in self.slug_id_cache['category'] else None,
|
||||
'fdc_id': re.sub(r'\D', '', self.data[datatype][k]['fdc_id']) if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'fdc_id': re.sub(r'\D', '', str(self.data[datatype][k]['fdc_id'])) if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'open_data_slug': k,
|
||||
'properties_food_unit_id': None,
|
||||
'space_id': self.request.space.id,
|
||||
|
||||
@@ -3,17 +3,19 @@ import inspect
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.contrib.auth.models import Group
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace
|
||||
import random
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace, Space
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -331,6 +333,25 @@ class CustomRecipePermission(permissions.BasePermission):
|
||||
or has_group_permission(request.user, ['user'])) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomAiProviderPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for the AiProvider api endpoint
|
||||
users: can read all
|
||||
admins: can read and write
|
||||
superusers: can read and write + write providers without a space
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a user and the request is safe
|
||||
return (has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS) or (has_group_permission(request.user, ['admin']) or request.user.is_superuser)
|
||||
|
||||
# editing of global providers allowed for superusers, space providers by admins and users can read only access
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return ((obj.space is None and request.user.is_superuser)
|
||||
or (obj.space == request.space and has_group_permission(request.user, ['admin']))
|
||||
or (obj.space == request.space and has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS))
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for user api endpoint
|
||||
@@ -437,3 +458,36 @@ class IsReadOnlyDRF(permissions.BasePermission):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method in SAFE_METHODS
|
||||
|
||||
|
||||
class IsCreateDRF(permissions.BasePermission):
|
||||
message = 'You cannot interact with this object, you can only create'
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method == 'POST'
|
||||
|
||||
|
||||
def create_space_for_user(user, name=None):
|
||||
with scopes_disabled():
|
||||
if not name:
|
||||
name = f"{user.username}'s Space"
|
||||
|
||||
if Space.objects.filter(name=name).exists():
|
||||
name = f'{name} #{random.randrange(1, 10 ** 5)}'
|
||||
|
||||
created_space = Space(name=name,
|
||||
created_by=user,
|
||||
max_file_storage_mb=settings.SPACE_DEFAULT_MAX_FILES,
|
||||
max_recipes=settings.SPACE_DEFAULT_MAX_RECIPES,
|
||||
max_users=settings.SPACE_DEFAULT_MAX_USERS,
|
||||
allow_sharing=settings.SPACE_DEFAULT_ALLOW_SHARING,
|
||||
ai_enabled=settings.SPACE_AI_ENABLED,
|
||||
ai_credits_monthly=settings.SPACE_AI_CREDITS_MONTHLY,
|
||||
space_setup_completed=False, )
|
||||
created_space.save()
|
||||
|
||||
UserSpace.objects.filter(user=user).update(active=False)
|
||||
user_space = UserSpace.objects.create(space=created_space, user=user, active=True)
|
||||
user_space.groups.add(Group.objects.filter(name='admin').get())
|
||||
|
||||
return user_space
|
||||
|
||||
@@ -48,7 +48,7 @@ class FoodPropertyHelper:
|
||||
found_property = False
|
||||
# if food has a value for the given property type (no matter if conversion is possible)
|
||||
has_property_value = False
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None and not (i.amount == 0 or i.no_amount): # if food is configured incorrectly
|
||||
if (i.food.properties_food_amount == 0 or i.food.properties_food_unit is None) and not (i.amount == 0 or i.no_amount): # if food is configured incorrectly
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
else:
|
||||
@@ -63,8 +63,9 @@ class FoodPropertyHelper:
|
||||
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
|
||||
if not found_property:
|
||||
# if no amount and food does not exist yet add it but don't count as missing
|
||||
if i.amount == 0 or i.no_amount and i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': 0}
|
||||
if i.amount == 0 or i.no_amount:
|
||||
if i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': 0}
|
||||
# if amount is present but unit is missing indicate it in the result
|
||||
elif i.unit is None:
|
||||
if i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
@@ -72,7 +73,8 @@ class FoodPropertyHelper:
|
||||
computed_properties[pt.id]['food_values'][i.food.id]['missing_unit'] = True
|
||||
else:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
if i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
if has_property_value and i.unit is not None:
|
||||
computed_properties[pt.id]['food_values'][i.food.id]['missing_conversion'] = {'base_unit': {'id': i.unit.id, 'name': i.unit.name}, 'converted_unit': {'id': i.food.properties_food_unit.id, 'name': i.food.properties_food_unit.name}}
|
||||
|
||||
@@ -82,8 +84,12 @@ class FoodPropertyHelper:
|
||||
# TODO move to central helper ? --> use defaultdict
|
||||
@staticmethod
|
||||
def add_or_create(d, key, value, food):
|
||||
if key in d and d[key]['value']:
|
||||
d[key]['value'] += value
|
||||
if key in d:
|
||||
# value can be None if a previous instance of the same food was missing a conversion
|
||||
if d[key]['value']:
|
||||
d[key]['value'] += value
|
||||
else:
|
||||
d[key]['value'] = value
|
||||
else:
|
||||
d[key] = {'id': food.id, 'food': {'id': food.id, 'name': food.name}, 'value': value}
|
||||
return d
|
||||
|
||||
@@ -288,7 +288,7 @@ class RecipeSearch():
|
||||
|
||||
def _updated_on_filter(self):
|
||||
if self._updatedon:
|
||||
self._queryset = self._queryset.filter(updated_at__date__date=self._updatedon)
|
||||
self._queryset = self._queryset.filter(updated_at__date=self._updatedon)
|
||||
elif self._updatedon_lte:
|
||||
self._queryset = self._queryset.filter(updated_at__date__lte=self._updatedon_lte)
|
||||
elif self._updatedon_gte:
|
||||
@@ -326,7 +326,7 @@ class RecipeSearch():
|
||||
def _favorite_recipes(self):
|
||||
if self._sort_includes('favorite') or self._timescooked or self._timescooked_gte or self._timescooked_lte:
|
||||
less_than = self._timescooked_lte and not self._sort_includes('-favorite')
|
||||
if less_than:
|
||||
if less_than or self._timescooked == 0:
|
||||
default = 1000
|
||||
else:
|
||||
default = 0
|
||||
@@ -339,7 +339,7 @@ class RecipeSearch():
|
||||
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
|
||||
|
||||
if self._timescooked:
|
||||
self._queryset = self._queryset.filter(favorite=0)
|
||||
self._queryset = self._queryset.filter(favorite=self._timescooked)
|
||||
elif self._timescooked_lte:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(self._timescooked_lte)).exclude(favorite=0)
|
||||
elif self._timescooked_gte:
|
||||
|
||||
@@ -69,15 +69,8 @@ def get_from_scraper(scrape, request):
|
||||
recipe_json['description'] = parse_description(description)
|
||||
recipe_json['description'] = automation_engine.apply_regex_replace_automation(recipe_json['description'], Automation.DESCRIPTION_REPLACE)
|
||||
|
||||
# assign servings attributes
|
||||
try:
|
||||
# dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
except Exception:
|
||||
servings = 1
|
||||
|
||||
recipe_json['servings'] = parse_servings(servings)
|
||||
recipe_json['servings_text'] = parse_servings_text(servings)
|
||||
recipe_json['servings'] = parse_servings(scrape.schema.data.get('recipeYield'))
|
||||
recipe_json['servings_text'] = parse_servings_text(scrape.schema.data.get('recipeYield'))
|
||||
|
||||
# assign time attributes
|
||||
try:
|
||||
@@ -155,7 +148,7 @@ def get_from_scraper(scrape, request):
|
||||
|
||||
# assign steps
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
for i in parse_instructions(scrape.instructions_list()):
|
||||
recipe_json['steps'].append({
|
||||
'instruction': i,
|
||||
'ingredients': [],
|
||||
@@ -177,11 +170,11 @@ def get_from_scraper(scrape, request):
|
||||
for x in scrape.ingredients():
|
||||
if x.strip() != '':
|
||||
try:
|
||||
amount, unit, ingredient, note = ingredient_parser.parse(x)
|
||||
amount, unit, food, note = ingredient_parser.parse(x)
|
||||
ingredient = {
|
||||
'amount': amount,
|
||||
'food': {
|
||||
'name': ingredient,
|
||||
'name': food,
|
||||
},
|
||||
'unit': None,
|
||||
'note': note,
|
||||
@@ -315,14 +308,29 @@ def clean_instruction_string(instruction):
|
||||
# handle unsupported, special UTF8 character in Thermomix-specific instructions,
|
||||
# that happen in nearly every recipe on Cookidoo, Zaubertopf Club, Rezeptwelt
|
||||
# and in Thermomix-specific recipes on many other sites
|
||||
return normalized_string \
|
||||
.replace("", _('reverse rotation')) \
|
||||
.replace("", _('careful rotation')) \
|
||||
.replace("", _('knead')) \
|
||||
.replace("Andicken ", _('thicken')) \
|
||||
.replace("Erwärmen ", _('warm up')) \
|
||||
.replace("Fermentieren ", _('ferment')) \
|
||||
.replace("Sous-vide ", _("sous-vide"))
|
||||
normalized_string = normalized_string \
|
||||
.replace(u"\uE003", _('reverse rotation')) \
|
||||
.replace(u"\uE002", _('careful rotation')) \
|
||||
.replace(u"\uE001", _('knead')) \
|
||||
.replace(u"\uE031", _('thicken')) \
|
||||
.replace(u"\uE019", _('warm up')) \
|
||||
.replace(u"\uE02E", _('ferment')) \
|
||||
.replace(u"\uE018", _('slow cook')) \
|
||||
.replace(u"\uE033", _('egg boiler')) \
|
||||
.replace(u"\uE016", _('kettle')) \
|
||||
.replace(u"\uE01E", _('blend')) \
|
||||
.replace(u"\uE011", _('pre-clean')) \
|
||||
.replace(u"\uE026", _('high temperature')) \
|
||||
.replace(u"\uE00D", _('rice cooker')) \
|
||||
.replace(u"\uE00C", _('caramelize')) \
|
||||
.replace(u"\uE038", _('peeler')) \
|
||||
.replace(u"\uE037", _('slicer')) \
|
||||
.replace(u"\uE036", _('grater')) \
|
||||
.replace(u"\uE04C", _('spiralizer')) \
|
||||
.replace(u"\uE02D", _("sous-vide"))
|
||||
|
||||
|
||||
return normalized_string
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
@@ -391,7 +399,7 @@ def parse_servings(servings):
|
||||
def parse_servings_text(servings):
|
||||
if isinstance(servings, str):
|
||||
try:
|
||||
servings = re.sub("\\d+", '', servings).strip()
|
||||
servings = re.sub("\\d+", '', servings, 1).strip()
|
||||
except Exception:
|
||||
servings = ''
|
||||
if isinstance(servings, list):
|
||||
@@ -403,6 +411,8 @@ def parse_servings_text(servings):
|
||||
|
||||
|
||||
def parse_time(recipe_time):
|
||||
if not recipe_time:
|
||||
return 0
|
||||
if type(recipe_time) not in [int, float]:
|
||||
try:
|
||||
recipe_time = float(re.search(r'\d+', recipe_time).group())
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
from django.contrib.auth.models import Group
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from psycopg2.errors import UniqueViolation
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
import random
|
||||
|
||||
from cookbook.helper.permission_helper import create_space_for_user
|
||||
from cookbook.models import Space, UserSpace
|
||||
from cookbook.views import views
|
||||
from recipes import settings
|
||||
|
||||
@@ -34,16 +41,28 @@ class ScopeMiddleware:
|
||||
if request.path.startswith(prefix + '/switch-space/'):
|
||||
return self.get_response(request)
|
||||
|
||||
with scopes_disabled():
|
||||
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.space_overview(request)
|
||||
if request.path.startswith(prefix + '/invite/'):
|
||||
return self.get_response(request)
|
||||
|
||||
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
|
||||
user_space = request.user.userspace_set.filter(active=True).first()
|
||||
|
||||
if not user_space:
|
||||
return views.space_overview(request)
|
||||
if not user_space and request.user.userspace_set.count() > 0:
|
||||
# if the users has a userspace but nothing is active, activate the first one
|
||||
user_space = request.user.userspace_set.first()
|
||||
if user_space:
|
||||
user_space.active = True
|
||||
user_space.save()
|
||||
|
||||
if not user_space:
|
||||
if 'signup_token' in request.session:
|
||||
# if user is authenticated, has no space but a signup token (InviteLink) is present, redirect to invite link logic
|
||||
return HttpResponseRedirect(reverse('view_invite', args=[request.session.pop('signup_token', '')]))
|
||||
else:
|
||||
# if user does not yet have a space create one for him
|
||||
user_space = create_space_for_user(request.user)
|
||||
|
||||
# TODO remove the need for this view
|
||||
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.no_groups(request)
|
||||
|
||||
|
||||
@@ -135,8 +135,9 @@ class UnitConversionHelper:
|
||||
:param food: base food
|
||||
:return: converted ingredient object from base amount/unit/food
|
||||
"""
|
||||
if uc.food is None or uc.food == food:
|
||||
if (uc.food is None or uc.food == food) and uc.converted_amount > 0 and uc.base_amount > 0:
|
||||
if unit == uc.base_unit:
|
||||
return Ingredient(amount=amount * (uc.converted_amount / uc.base_amount), unit=uc.converted_unit, food=food, space=self.space)
|
||||
else:
|
||||
return Ingredient(amount=amount * (uc.base_amount / uc.converted_amount), unit=uc.base_unit, food=food, space=self.space)
|
||||
return None
|
||||
|
||||
@@ -26,6 +26,12 @@ class Integration:
|
||||
files = None
|
||||
export_type = None
|
||||
ignored_recipes = []
|
||||
import_log = None
|
||||
import_duplicates = False
|
||||
|
||||
import_meal_plans = True
|
||||
import_shopping_lists = True
|
||||
nutrition_per_serving = False
|
||||
|
||||
def __init__(self, request, export_type):
|
||||
"""
|
||||
@@ -102,7 +108,7 @@ class Integration:
|
||||
"""
|
||||
return True
|
||||
|
||||
def do_import(self, files, il, import_duplicates):
|
||||
def do_import(self, files, il, import_duplicates, meal_plans=True, shopping_lists=True, nutrition_per_serving=False):
|
||||
"""
|
||||
Imports given files
|
||||
:param import_duplicates: if true duplicates are imported as well
|
||||
@@ -111,6 +117,12 @@ class Integration:
|
||||
:return: HttpResponseRedirect to the recipe search showing all imported recipes
|
||||
"""
|
||||
with scope(space=self.request.space):
|
||||
self.import_log = il
|
||||
self.import_duplicates = import_duplicates
|
||||
|
||||
self.import_meal_plans = meal_plans
|
||||
self.import_shopping_lists = shopping_lists
|
||||
self.nutrition_per_serving = nutrition_per_serving
|
||||
|
||||
try:
|
||||
self.files = files
|
||||
@@ -166,20 +178,24 @@ class Integration:
|
||||
il.total_recipes = len(new_file_list)
|
||||
file_list = new_file_list
|
||||
|
||||
for z in file_list:
|
||||
try:
|
||||
if not hasattr(z, 'filename') or isinstance(z, Tag):
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
recipe.keywords.add(self.keyword)
|
||||
il.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.handle_duplicates(recipe, import_duplicates)
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
if isinstance(self, cookbook.integration.mealie1.Mealie1):
|
||||
# since the mealie 1.0 export is a backup and not a classic recipe export we treat it a bit differently
|
||||
recipes = self.get_recipe_from_file(import_zip)
|
||||
else:
|
||||
for z in file_list:
|
||||
try:
|
||||
if not hasattr(z, 'filename') or isinstance(z, Tag):
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
recipe.keywords.add(self.keyword)
|
||||
il.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.handle_duplicates(recipe, import_duplicates)
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
|
||||
366
cookbook/integration/mealie1.py
Normal file
366
cookbook/integration/mealie1.py
Normal file
@@ -0,0 +1,366 @@
|
||||
import json
|
||||
import re
|
||||
import traceback
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
from gettext import gettext as _
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from cookbook.helper import ingredient_parser
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step, Food, Unit, SupermarketCategory, PropertyType, Property, MealType, MealPlan, CookLog, ShoppingListEntry
|
||||
|
||||
|
||||
class Mealie1(Integration):
|
||||
"""
|
||||
integration for mealie past version 1.0
|
||||
"""
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
mealie_database = json.loads(BytesIO(file.read('database.json')).getvalue().decode("utf-8"))
|
||||
self.import_log.total_recipes = len(mealie_database['recipes'])
|
||||
self.import_log.msg += f"Importing {len(mealie_database["categories"]) + len(mealie_database["tags"])} tags and categories as keywords...\n"
|
||||
self.import_log.save()
|
||||
|
||||
keywords_categories_dict = {}
|
||||
for c in mealie_database['categories']:
|
||||
if keyword := Keyword.objects.filter(name=c['name'], space=self.request.space).first():
|
||||
keywords_categories_dict[c['id']] = keyword.pk
|
||||
else:
|
||||
keyword = Keyword.objects.create(name=c['name'], space=self.request.space)
|
||||
keywords_categories_dict[c['id']] = keyword.pk
|
||||
|
||||
keywords_tags_dict = {}
|
||||
for t in mealie_database['tags']:
|
||||
if keyword := Keyword.objects.filter(name=t['name'], space=self.request.space).first():
|
||||
keywords_tags_dict[t['id']] = keyword.pk
|
||||
else:
|
||||
keyword = Keyword.objects.create(name=t['name'], space=self.request.space)
|
||||
keywords_tags_dict[t['id']] = keyword.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["multi_purpose_labels"])} multi purpose labels as supermarket categories...\n"
|
||||
self.import_log.save()
|
||||
|
||||
supermarket_categories_dict = {}
|
||||
for m in mealie_database['multi_purpose_labels']:
|
||||
if supermarket_category := SupermarketCategory.objects.filter(name=m['name'], space=self.request.space).first():
|
||||
supermarket_categories_dict[m['id']] = supermarket_category.pk
|
||||
else:
|
||||
supermarket_category = SupermarketCategory.objects.create(name=m['name'], space=self.request.space)
|
||||
supermarket_categories_dict[m['id']] = supermarket_category.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["ingredient_foods"])} foods...\n"
|
||||
self.import_log.save()
|
||||
|
||||
foods_dict = {}
|
||||
for f in mealie_database['ingredient_foods']:
|
||||
if food := Food.objects.filter(name=f['name'], space=self.request.space).first():
|
||||
foods_dict[f['id']] = food.pk
|
||||
else:
|
||||
food = {'name': f['name'],
|
||||
'plural_name': f['plural_name'],
|
||||
'description': f['description'],
|
||||
'space': self.request.space}
|
||||
|
||||
if f['label_id'] and f['label_id'] in supermarket_categories_dict:
|
||||
food['supermarket_category_id'] = supermarket_categories_dict[f['label_id']]
|
||||
|
||||
food = Food.objects.create(**food)
|
||||
if f['on_hand']:
|
||||
food.onhand_users.add(self.request.user)
|
||||
foods_dict[f['id']] = food.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["ingredient_units"])} units...\n"
|
||||
self.import_log.save()
|
||||
|
||||
units_dict = {}
|
||||
for u in mealie_database['ingredient_units']:
|
||||
if unit := Unit.objects.filter(name=u['name'], space=self.request.space).first():
|
||||
units_dict[u['id']] = unit.pk
|
||||
else:
|
||||
unit = Unit.objects.create(name=u['name'], plural_name=u['plural_name'], description=u['description'], space=self.request.space)
|
||||
units_dict[u['id']] = unit.pk
|
||||
|
||||
recipes_dict = {}
|
||||
recipe_property_factor_dict = {}
|
||||
recipes = []
|
||||
recipe_keyword_relation = []
|
||||
for r in mealie_database['recipes']:
|
||||
if Recipe.objects.filter(space=self.request.space, name=r['name']).exists() and not self.import_duplicates:
|
||||
self.import_log.msg += f"Ignoring {r['name']} because a recipe with this name already exists.\n"
|
||||
self.import_log.save()
|
||||
else:
|
||||
recipe = Recipe.objects.create(
|
||||
waiting_time=parse_time(r['perform_time']),
|
||||
working_time=parse_time(r['prep_time']),
|
||||
description=r['description'][:512],
|
||||
name=r['name'],
|
||||
source_url=r['org_url'],
|
||||
servings=r['recipe_servings'] if r['recipe_servings'] and r['recipe_servings'] != 0 else 1,
|
||||
servings_text=r['recipe_yield'].strip() if r['recipe_yield'] else "",
|
||||
internal=True,
|
||||
created_at=r['created_at'],
|
||||
space=self.request.space,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
||||
if not self.nutrition_per_serving:
|
||||
recipe_property_factor_dict[r['id']] = recipe.servings
|
||||
|
||||
self.import_log.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.import_log.imported_recipes += 1
|
||||
self.import_log.save()
|
||||
|
||||
recipes.append(recipe)
|
||||
recipes_dict[r['id']] = recipe.pk
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipe.pk, keyword_id=self.keyword.pk))
|
||||
|
||||
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_instructions"])} instructions...\n"
|
||||
self.import_log.save()
|
||||
|
||||
steps_relation = []
|
||||
first_step_of_recipe_dict = {}
|
||||
step_id_dict = {}
|
||||
for s in mealie_database['recipe_instructions']:
|
||||
if s['recipe_id'] in recipes_dict:
|
||||
step = Step.objects.create(instruction=(s['text'] if s['text'] else "") + (f" \n {s['summary']}" if s['summary'] else ""),
|
||||
order=s['position'],
|
||||
name=s['title'],
|
||||
space=self.request.space)
|
||||
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[s['recipe_id']], step_id=step.pk))
|
||||
step_id_dict[s["id"]] = step.pk
|
||||
if s['recipe_id'] not in first_step_of_recipe_dict:
|
||||
first_step_of_recipe_dict[s['recipe_id']] = step.pk
|
||||
|
||||
# it is possible for a recipe to not have steps but have ingredients, in that case create an empty step to add them to later
|
||||
for r in recipes_dict.keys():
|
||||
if r not in first_step_of_recipe_dict:
|
||||
step = Step.objects.create(instruction='',
|
||||
order=0,
|
||||
name='',
|
||||
space=self.request.space)
|
||||
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[r], step_id=step.pk))
|
||||
first_step_of_recipe_dict[r] = step.pk
|
||||
|
||||
for n in mealie_database['notes']:
|
||||
if n['recipe_id'] in recipes_dict:
|
||||
step = Step.objects.create(instruction=n['text'],
|
||||
name=n['title'],
|
||||
order=100,
|
||||
space=self.request.space)
|
||||
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[n['recipe_id']], step_id=step.pk))
|
||||
|
||||
Recipe.steps.through.objects.bulk_create(steps_relation)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipes_ingredients"])} ingredients...\n"
|
||||
self.import_log.save()
|
||||
|
||||
# mealie stores the reference to a step (instruction) from an ingredient (reference) in the recipe_ingredient_ref_link table
|
||||
recipe_ingredient_ref_link_dict = {}
|
||||
for ref in mealie_database['recipe_ingredient_ref_link']:
|
||||
recipe_ingredient_ref_link_dict[ref["reference_id"]] = ref["instruction_id"]
|
||||
|
||||
ingredients_relation = []
|
||||
for i in mealie_database['recipes_ingredients']:
|
||||
if i['recipe_id'] in recipes_dict:
|
||||
if i['title']:
|
||||
title_ingredient = Ingredient.objects.create(
|
||||
note=i['title'],
|
||||
is_header=True,
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=title_ingredient.pk))
|
||||
if i['food_id']:
|
||||
ingredient = Ingredient.objects.create(
|
||||
food_id=foods_dict[i['food_id']] if i['food_id'] in foods_dict else None,
|
||||
unit_id=units_dict[i['unit_id']] if i['unit_id'] in units_dict else None,
|
||||
original_text=i['original_text'],
|
||||
order=i['position'],
|
||||
amount=i['quantity'] if i['quantity'] else 0,
|
||||
note=i['note'],
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=ingredient.pk))
|
||||
elif i['note'].strip():
|
||||
amount, unit, food, note = ingredient_parser.parse(i['note'].strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
ingredient = Ingredient.objects.create(
|
||||
food=f,
|
||||
unit=u,
|
||||
amount=amount,
|
||||
note=note,
|
||||
original_text=i['original_text'],
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=ingredient.pk))
|
||||
Step.ingredients.through.objects.bulk_create(ingredients_relation)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipes_to_categories"]) + len(mealie_database["recipes_to_tags"])} category and keyword relations...\n"
|
||||
self.import_log.save()
|
||||
|
||||
recipe_keyword_relation = []
|
||||
for rC in mealie_database['recipes_to_categories']:
|
||||
if rC['recipe_id'] in recipes_dict:
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rC['recipe_id']], keyword_id=keywords_categories_dict[rC['category_id']]))
|
||||
|
||||
for rT in mealie_database['recipes_to_tags']:
|
||||
if rT['recipe_id'] in recipes_dict:
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rT['recipe_id']], keyword_id=keywords_tags_dict[rT['tag_id']]))
|
||||
|
||||
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_nutrition"])} properties...\n"
|
||||
self.import_log.save()
|
||||
|
||||
property_types_dict = {
|
||||
'calories': PropertyType.objects.get_or_create(name=_('Calories'), space=self.request.space, defaults={'unit': 'kcal', 'fdc_id': 1008})[0],
|
||||
'carbohydrate_content': PropertyType.objects.get_or_create(name=_('Carbohydrates'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1005})[0],
|
||||
'cholesterol_content': PropertyType.objects.get_or_create(name=_('Cholesterol'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1253})[0],
|
||||
'fat_content': PropertyType.objects.get_or_create(name=_('Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1004})[0],
|
||||
'fiber_content': PropertyType.objects.get_or_create(name=_('Fiber'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1079})[0],
|
||||
'protein_content': PropertyType.objects.get_or_create(name=_('Protein'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1003})[0],
|
||||
'saturated_fat_content': PropertyType.objects.get_or_create(name=_('Saturated Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1258})[0],
|
||||
'sodium_content': PropertyType.objects.get_or_create(name=_('Sodium'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1093})[0],
|
||||
'sugar_content': PropertyType.objects.get_or_create(name=_('Sugar'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1063})[0],
|
||||
'trans_fat_content': PropertyType.objects.get_or_create(name=_('Trans Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1257})[0],
|
||||
'unsaturated_fat_content': PropertyType.objects.get_or_create(name=_('Unsaturated Fat'), space=self.request.space, defaults={'unit': 'g'})[0],
|
||||
}
|
||||
|
||||
with transaction.atomic():
|
||||
recipe_properties_relation = []
|
||||
properties_relation = []
|
||||
for r in mealie_database['recipe_nutrition']:
|
||||
if r['recipe_id'] in recipes_dict:
|
||||
for key in property_types_dict:
|
||||
if r[key]:
|
||||
properties_relation.append(
|
||||
Property(property_type_id=property_types_dict[key].pk,
|
||||
property_amount=Decimal(str(r[key])) / (
|
||||
Decimal(str(recipe_property_factor_dict[r['recipe_id']])) if r['recipe_id'] in recipe_property_factor_dict else 1),
|
||||
open_data_food_slug=r['recipe_id'],
|
||||
space=self.request.space))
|
||||
properties = Property.objects.bulk_create(properties_relation)
|
||||
property_ids = []
|
||||
for p in properties:
|
||||
recipe_properties_relation.append(Recipe.properties.through(recipe_id=recipes_dict[p.open_data_food_slug], property_id=p.pk))
|
||||
property_ids.append(p.pk)
|
||||
Recipe.properties.through.objects.bulk_create(recipe_properties_relation, ignore_conflicts=True)
|
||||
Property.objects.filter(id__in=property_ids).update(open_data_food_slug=None)
|
||||
|
||||
# delete unused property types
|
||||
for pT in property_types_dict:
|
||||
try:
|
||||
property_types_dict[pT].delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_comments"]) + len(mealie_database["recipe_timeline_events"])} comments and cook logs...\n"
|
||||
self.import_log.save()
|
||||
|
||||
cook_log_list = []
|
||||
for c in mealie_database['recipe_comments']:
|
||||
if c['recipe_id'] in recipes_dict:
|
||||
cook_log_list.append(CookLog(
|
||||
recipe_id=recipes_dict[c['recipe_id']],
|
||||
comment=c['text'],
|
||||
created_at=c['created_at'],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
for c in mealie_database['recipe_timeline_events']:
|
||||
if c['recipe_id'] in recipes_dict:
|
||||
if c['event_type'] == 'comment':
|
||||
cook_log_list.append(CookLog(
|
||||
recipe_id=recipes_dict[c['recipe_id']],
|
||||
comment=c['message'],
|
||||
created_at=c['created_at'],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
CookLog.objects.bulk_create(cook_log_list)
|
||||
|
||||
if self.import_meal_plans:
|
||||
self.import_log.msg += f"Importing {len(mealie_database["group_meal_plans"])} meal plans...\n"
|
||||
self.import_log.save()
|
||||
|
||||
meal_types_dict = {}
|
||||
meal_plans = []
|
||||
for m in mealie_database['group_meal_plans']:
|
||||
if m['recipe_id'] in recipes_dict:
|
||||
if not m['entry_type'] in meal_types_dict:
|
||||
meal_type = MealType.objects.get_or_create(name=m['entry_type'], created_by=self.request.user, space=self.request.space)[0]
|
||||
meal_types_dict[m['entry_type']] = meal_type.pk
|
||||
meal_plans.append(MealPlan(
|
||||
recipe_id=recipes_dict[m['recipe_id']] if m['recipe_id'] else None,
|
||||
title=m['title'] if m['title'] else "",
|
||||
note=m['text'] if m['text'] else "",
|
||||
from_date=m['date'],
|
||||
to_date=m['date'],
|
||||
meal_type_id=meal_types_dict[m['entry_type']],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
MealPlan.objects.bulk_create(meal_plans)
|
||||
|
||||
if self.import_shopping_lists:
|
||||
self.import_log.msg += f"Importing {len(mealie_database["shopping_list_items"])} shopping list items...\n"
|
||||
self.import_log.save()
|
||||
|
||||
shopping_list_items = []
|
||||
for sli in mealie_database['shopping_list_items']:
|
||||
if not sli['checked']:
|
||||
if sli['food_id']:
|
||||
shopping_list_items.append(ShoppingListEntry(
|
||||
amount=sli['quantity'],
|
||||
unit_id=units_dict[sli['unit_id']] if sli['unit_id'] else None,
|
||||
food_id=foods_dict[sli['food_id']] if sli['food_id'] else None,
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
elif not sli['food_id'] and sli['note'].strip():
|
||||
amount, unit, food, note = ingredient_parser.parse(sli['note'].strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
shopping_list_items.append(ShoppingListEntry(
|
||||
amount=amount,
|
||||
unit=u,
|
||||
food=f,
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
ShoppingListEntry.objects.bulk_create(shopping_list_items)
|
||||
|
||||
self.import_log.msg += f"Importing Images. This might take some time ...\n"
|
||||
self.import_log.save()
|
||||
for r in mealie_database['recipes']:
|
||||
try:
|
||||
if recipe := Recipe.objects.filter(pk=recipes_dict[r['id']]).first():
|
||||
self.import_recipe_image(recipe, BytesIO(file.read(f'data/recipes/{str(uuid.UUID(str(r['id'])))}/images/original.webp')), filetype='.webp')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return recipes
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
|
||||
|
||||
def get_step_id(i, first_step_of_recipe_dict, step_id_dict, recipe_ingredient_ref_link_dict):
|
||||
try:
|
||||
return step_id_dict[recipe_ingredient_ref_link_dict[i['reference_id']]]
|
||||
except KeyError:
|
||||
return first_step_of_recipe_dict[i['recipe_id']]
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
BIN
cookbook/locale/hr/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/hr/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
2453
cookbook/locale/ko/LC_MESSAGES/django.po
Normal file
2453
cookbook/locale/ko/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -13,7 +13,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-07-31 19:14+0000\n"
|
||||
"PO-Revision-Date: 2025-09-23 19:45+0000\n"
|
||||
"Last-Translator: Justin Straver <justin.straver@gmail.com>\n"
|
||||
"Language-Team: Dutch <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/nl/>\n"
|
||||
@@ -22,14 +22,14 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=2; plural=n != 1;\n"
|
||||
"X-Generator: Weblate 5.8.4\n"
|
||||
"X-Generator: Weblate 5.13.1\n"
|
||||
|
||||
#: .\cookbook\forms.py:45
|
||||
msgid ""
|
||||
"Both fields are optional. If none are given the username will be displayed "
|
||||
"instead"
|
||||
msgstr ""
|
||||
"Beide velden zijn optioneel. Indien niks is opgegeven wordt de "
|
||||
"Beide velden zijn optioneel. Indien niets is opgegeven wordt de "
|
||||
"gebruikersnaam weergegeven"
|
||||
|
||||
#: .\cookbook\forms.py:62 .\cookbook\forms.py:246
|
||||
@@ -771,7 +771,7 @@ msgstr ""
|
||||
#: .\cookbook\templates\account\email_confirm.html:22
|
||||
#: .\cookbook\templates\generic\delete_template.html:72
|
||||
msgid "Confirm"
|
||||
msgstr "Bevestigen"
|
||||
msgstr "Bevestig"
|
||||
|
||||
#: .\cookbook\templates\account\email_confirm.html:29
|
||||
#, python-format
|
||||
@@ -1052,7 +1052,7 @@ msgstr "Beheer"
|
||||
#: .\cookbook\templates\base.html:351
|
||||
#: .\cookbook\templates\space_overview.html:25
|
||||
msgid "Your Spaces"
|
||||
msgstr "Jouw Spaces"
|
||||
msgstr "Jouw Ruimtes"
|
||||
|
||||
#: .\cookbook\templates\base.html:362
|
||||
#: .\cookbook\templates\space_overview.html:6
|
||||
@@ -1996,12 +1996,12 @@ msgstr "Eigenaar"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:57
|
||||
msgid "Leave Space"
|
||||
msgstr "Verlaat Space"
|
||||
msgstr "Verlaat Ruimte"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:78
|
||||
#: .\cookbook\templates\space_overview.html:88
|
||||
msgid "Join Space"
|
||||
msgstr "Sluit aan bij ruimte"
|
||||
msgstr "Sluit aan bij Ruimte"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:81
|
||||
msgid "Join an existing space."
|
||||
@@ -2237,7 +2237,7 @@ msgstr "Er bestaat geen {self.basename} met id {target}"
|
||||
|
||||
#: .\cookbook\views\api.py:250 .\cookbook\views\api.py:251
|
||||
msgid "Cannot merge with child object!"
|
||||
msgstr "Kan niet met kindobject samenvoegen!"
|
||||
msgstr "Kan niet met sub object samenvoegen!"
|
||||
|
||||
#: .\cookbook\views\api.py:288 .\cookbook\views\api.py:289
|
||||
#, python-brace-format
|
||||
@@ -2592,7 +2592,7 @@ msgstr "Uitnodigingslink"
|
||||
|
||||
#: .\cookbook\views\delete.py:168
|
||||
msgid "Space Membership"
|
||||
msgstr "Space Lidmaatschap"
|
||||
msgstr "Ruimte Lidmaatschap"
|
||||
|
||||
#: .\cookbook\views\edit.py:84
|
||||
msgid "You cannot edit this storage!"
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-01-16 18:58+0000\n"
|
||||
"Last-Translator: Anton Shevtsov <ashevtsovs@gmail.com>\n"
|
||||
"PO-Revision-Date: 2025-09-23 19:45+0000\n"
|
||||
"Last-Translator: S <snoblomma@protonmail.com>\n"
|
||||
"Language-Team: Ukrainian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/uk/>\n"
|
||||
"Language: uk\n"
|
||||
@@ -18,17 +18,19 @@ msgstr ""
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && "
|
||||
"n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
|
||||
"X-Generator: Weblate 5.8.4\n"
|
||||
"X-Generator: Weblate 5.13.1\n"
|
||||
|
||||
#: .\cookbook\forms.py:45
|
||||
msgid ""
|
||||
"Both fields are optional. If none are given the username will be displayed "
|
||||
"instead"
|
||||
msgstr ""
|
||||
"Обидва поля необов'язкові. Якщо нічого не вказано, замість цього буде "
|
||||
"відображено ім'я користувача"
|
||||
|
||||
#: .\cookbook\forms.py:62 .\cookbook\forms.py:246
|
||||
msgid "Name"
|
||||
msgstr ""
|
||||
msgstr "Назва"
|
||||
|
||||
#: .\cookbook\forms.py:62 .\cookbook\forms.py:246 .\cookbook\views\lists.py:103
|
||||
msgid "Keywords"
|
||||
@@ -87,22 +89,25 @@ msgid ""
|
||||
"<a href=\"https://www.home-assistant.io/docs/authentication/#your-account-"
|
||||
"profile\">Long Lived Access Token</a> for your HomeAssistant instance"
|
||||
msgstr ""
|
||||
"<a href="
|
||||
"\"https://www.home-assistant.io/docs/authentication/#your-account-profile\""
|
||||
">Довговічний токен доступу</a> для вашої інстанції HomeAssistant"
|
||||
|
||||
#: .\cookbook\forms.py:193
|
||||
msgid "Something like http://homeassistant.local:8123/api"
|
||||
msgstr ""
|
||||
msgstr "Щось на кшталт http://homeassistant.local:8123/api"
|
||||
|
||||
#: .\cookbook\forms.py:205
|
||||
msgid "http://homeassistant.local:8123/api for example"
|
||||
msgstr ""
|
||||
msgstr "http://homeassistant.local:8123/api наприклад"
|
||||
|
||||
#: .\cookbook\forms.py:222 .\cookbook\views\edit.py:117
|
||||
msgid "Storage"
|
||||
msgstr ""
|
||||
msgstr "Сховище"
|
||||
|
||||
#: .\cookbook\forms.py:222
|
||||
msgid "Active"
|
||||
msgstr ""
|
||||
msgstr "Активний"
|
||||
|
||||
#: .\cookbook\forms.py:226
|
||||
msgid "Search String"
|
||||
@@ -110,89 +115,109 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\forms.py:246
|
||||
msgid "File ID"
|
||||
msgstr ""
|
||||
msgstr "ID файлу"
|
||||
|
||||
#: .\cookbook\forms.py:262
|
||||
msgid "Maximum number of users for this space reached."
|
||||
msgstr ""
|
||||
msgstr "Досягнута максимальна кількість користувачів для цього простору."
|
||||
|
||||
#: .\cookbook\forms.py:268
|
||||
msgid "Email address already taken!"
|
||||
msgstr ""
|
||||
msgstr "Ця адреса електронної пошти вже зайнята!"
|
||||
|
||||
#: .\cookbook\forms.py:275
|
||||
msgid ""
|
||||
"An email address is not required but if present the invite link will be sent "
|
||||
"to the user."
|
||||
msgstr ""
|
||||
"Адреса електронної пошти не обов'язкова, але якщо вона вказана, "
|
||||
"користувачеві буде надіслано посилання для запрошення."
|
||||
|
||||
#: .\cookbook\forms.py:287
|
||||
msgid "Name already taken."
|
||||
msgstr ""
|
||||
msgstr "Ім'я вже зайняте."
|
||||
|
||||
#: .\cookbook\forms.py:298
|
||||
msgid "Accept Terms and Privacy"
|
||||
msgstr ""
|
||||
msgstr "Прийняти умови використування та конфіденційності"
|
||||
|
||||
#: .\cookbook\forms.py:332
|
||||
msgid ""
|
||||
"Determines how fuzzy a search is if it uses trigram similarity matching (e."
|
||||
"g. low values mean more typos are ignored)."
|
||||
msgstr ""
|
||||
"Визначає, наскільки нечітким є пошук, якщо він використовує зіставлення за "
|
||||
"схожістю триграм (наприклад, низькі значення означають, що більше "
|
||||
"друкарських помилок ігнорується)."
|
||||
|
||||
#: .\cookbook\forms.py:340
|
||||
msgid ""
|
||||
"Select type method of search. Click <a href=\"/docs/search/\">here</a> for "
|
||||
"full description of choices."
|
||||
msgstr ""
|
||||
"Виберіть метод пошуку. Натисніть <a href=\"/docs/search/\">тут</a> для "
|
||||
"повного опису опцій."
|
||||
|
||||
#: .\cookbook\forms.py:341
|
||||
msgid ""
|
||||
"Use fuzzy matching on units, keywords and ingredients when editing and "
|
||||
"importing recipes."
|
||||
msgstr ""
|
||||
"Використовуйте нечітке зіставлення одиниць вимірювання, ключових слів та "
|
||||
"інгредієнтів під час редагування та імпорту рецептів."
|
||||
|
||||
#: .\cookbook\forms.py:342
|
||||
msgid ""
|
||||
"Fields to search ignoring accents. Selecting this option can improve or "
|
||||
"degrade search quality depending on language"
|
||||
msgstr ""
|
||||
"Поля для пошуку без урахування діакритичних знаків. Вибір цієї опції може "
|
||||
"покращити або погіршити якість пошуку залежно від мови"
|
||||
|
||||
#: .\cookbook\forms.py:343
|
||||
msgid ""
|
||||
"Fields to search for partial matches. (e.g. searching for 'Pie' will return "
|
||||
"'pie' and 'piece' and 'soapie')"
|
||||
msgstr ""
|
||||
"Поля для пошуку часткових збігів. (наприклад, пошук за запитом «Pie» поверне "
|
||||
"«pie», «piece» та «soapie»)"
|
||||
|
||||
#: .\cookbook\forms.py:344
|
||||
msgid ""
|
||||
"Fields to search for beginning of word matches. (e.g. searching for 'sa' "
|
||||
"will return 'salad' and 'sandwich')"
|
||||
msgstr ""
|
||||
"Поля для пошуку збігів на початку слів. (наприклад, пошук за запитом «sa» "
|
||||
"поверне «salad» та «sandwich»)"
|
||||
|
||||
#: .\cookbook\forms.py:345
|
||||
msgid ""
|
||||
"Fields to 'fuzzy' search. (e.g. searching for 'recpie' will find 'recipe'.) "
|
||||
"Note: this option will conflict with 'web' and 'raw' methods of search."
|
||||
msgstr ""
|
||||
"Поля для «нечіткого» пошуку. (наприклад, пошук за запитом «recpie» знайде "
|
||||
"«recipe»). Примітка: цей параметр конфліктуватиме з методами пошуку «web» та "
|
||||
"«raw»."
|
||||
|
||||
#: .\cookbook\forms.py:346
|
||||
msgid ""
|
||||
"Fields to full text search. Note: 'web', 'phrase', and 'raw' search methods "
|
||||
"only function with fulltext fields."
|
||||
msgstr ""
|
||||
"Поля для повнотекстового пошуку. Примітка: методи пошуку «web», «phrase» та "
|
||||
"«raw» працюють лише з повнотекстовими полями."
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Search Method"
|
||||
msgstr ""
|
||||
msgstr "Метод пошуку"
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Fuzzy Lookups"
|
||||
msgstr ""
|
||||
msgstr "Нечіткі пошуки"
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Ignore Accent"
|
||||
msgstr ""
|
||||
msgstr "Ігнорувати акцент"
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Partial Match"
|
||||
@@ -200,26 +225,28 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Starts With"
|
||||
msgstr ""
|
||||
msgstr "Починається з"
|
||||
|
||||
#: .\cookbook\forms.py:351
|
||||
msgid "Fuzzy Search"
|
||||
msgstr ""
|
||||
msgstr "Нечіткий пошук"
|
||||
|
||||
#: .\cookbook\forms.py:351
|
||||
msgid "Full Text"
|
||||
msgstr ""
|
||||
msgstr "Повний текст"
|
||||
|
||||
#: .\cookbook\helper\AllAuthCustomAdapter.py:41
|
||||
msgid ""
|
||||
"In order to prevent spam, the requested email was not send. Please wait a "
|
||||
"few minutes and try again."
|
||||
msgstr ""
|
||||
"Щоб запобігти спаму, запитуваний електронний лист не було надіслано. Будь "
|
||||
"ласка, зачекайте кілька хвилин і спробуйте ще раз."
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:164
|
||||
#: .\cookbook\helper\permission_helper.py:187 .\cookbook\views\views.py:117
|
||||
msgid "You are not logged in and therefore cannot view this page!"
|
||||
msgstr ""
|
||||
msgstr "Ви не ввійшли в систему, тому не можете переглянути цю сторінку!"
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:168
|
||||
#: .\cookbook\helper\permission_helper.py:174
|
||||
@@ -232,22 +259,22 @@ msgstr ""
|
||||
#: .\cookbook\helper\permission_helper.py:341 .\cookbook\views\data.py:35
|
||||
#: .\cookbook\views\views.py:127 .\cookbook\views\views.py:131
|
||||
msgid "You do not have the required permissions to view this page!"
|
||||
msgstr ""
|
||||
msgstr "У вас немає необхідних дозволів для перегляду цієї сторінки!"
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:192
|
||||
#: .\cookbook\helper\permission_helper.py:215
|
||||
#: .\cookbook\helper\permission_helper.py:237
|
||||
#: .\cookbook\helper\permission_helper.py:252
|
||||
msgid "You cannot interact with this object as it is not owned by you!"
|
||||
msgstr ""
|
||||
msgstr "Ви не можете взаємодіяти з цим об'єктом, оскільки він вам не належить!"
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:402
|
||||
msgid "You have reached the maximum number of recipes for your space."
|
||||
msgstr ""
|
||||
msgstr "Ви досягли максимальної кількості рецептів для вашого простору."
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:414
|
||||
msgid "You have more users than allowed in your space."
|
||||
msgstr ""
|
||||
msgstr "У вашому просторі більше користувачів, ніж дозволено."
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:310
|
||||
msgid "reverse rotation"
|
||||
@@ -259,19 +286,19 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:312
|
||||
msgid "knead"
|
||||
msgstr ""
|
||||
msgstr "замісити"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:313
|
||||
msgid "thicken"
|
||||
msgstr ""
|
||||
msgstr "згустити"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:314
|
||||
msgid "warm up"
|
||||
msgstr ""
|
||||
msgstr "розігріти"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:315
|
||||
msgid "ferment"
|
||||
msgstr ""
|
||||
msgstr "ферментувати"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:316
|
||||
msgid "sous-vide"
|
||||
@@ -279,27 +306,29 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\helper\shopping_helper.py:150
|
||||
msgid "You must supply a servings size"
|
||||
msgstr ""
|
||||
msgstr "Ви повинні вказати розмір порції"
|
||||
|
||||
#: .\cookbook\helper\template_helper.py:95
|
||||
#: .\cookbook\helper\template_helper.py:97
|
||||
msgid "Could not parse template code."
|
||||
msgstr ""
|
||||
msgstr "Не вдалося проаналізувати код шаблону."
|
||||
|
||||
#: .\cookbook\integration\copymethat.py:44
|
||||
#: .\cookbook\integration\melarecipes.py:37
|
||||
msgid "Favorite"
|
||||
msgstr ""
|
||||
msgstr "Улюблене"
|
||||
|
||||
#: .\cookbook\integration\copymethat.py:50
|
||||
msgid "I made this"
|
||||
msgstr ""
|
||||
msgstr "Я зробив це"
|
||||
|
||||
#: .\cookbook\integration\integration.py:209
|
||||
msgid ""
|
||||
"Importer expected a .zip file. Did you choose the correct importer type for "
|
||||
"your data ?"
|
||||
msgstr ""
|
||||
"Імпортер очікував ZIP-файл. Чи ви обрали правильний тип імпортера для своїх "
|
||||
"даних?"
|
||||
|
||||
#: .\cookbook\integration\integration.py:212
|
||||
msgid ""
|
||||
@@ -309,24 +338,24 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\integration\integration.py:217
|
||||
msgid "The following recipes were ignored because they already existed:"
|
||||
msgstr ""
|
||||
msgstr "Наступні рецепти були проігноровані, оскільки вони вже існували:"
|
||||
|
||||
#: .\cookbook\integration\integration.py:221
|
||||
#, python-format
|
||||
msgid "Imported %s recipes."
|
||||
msgstr ""
|
||||
msgstr "Імпортовано %s рецептів."
|
||||
|
||||
#: .\cookbook\integration\openeats.py:28
|
||||
msgid "Recipe source:"
|
||||
msgstr ""
|
||||
msgstr "Джерело рецепту:"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:49
|
||||
msgid "Notes"
|
||||
msgstr ""
|
||||
msgstr "Нотатки"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:52
|
||||
msgid "Nutritional Information"
|
||||
msgstr ""
|
||||
msgstr "Харчова цінність"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:56
|
||||
msgid "Source"
|
||||
@@ -339,7 +368,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\integration\saffron.py:23
|
||||
msgid "Servings"
|
||||
msgstr ""
|
||||
msgstr "Порції"
|
||||
|
||||
#: .\cookbook\integration\saffron.py:25
|
||||
msgid "Waiting time"
|
||||
@@ -383,41 +412,41 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\migrations\0047_auto_20200602_1133.py:19
|
||||
msgid "Lunch"
|
||||
msgstr ""
|
||||
msgstr "Обід"
|
||||
|
||||
#: .\cookbook\migrations\0047_auto_20200602_1133.py:24
|
||||
msgid "Dinner"
|
||||
msgstr ""
|
||||
msgstr "Вечеря"
|
||||
|
||||
#: .\cookbook\migrations\0047_auto_20200602_1133.py:29 .\cookbook\models.py:919
|
||||
msgid "Other"
|
||||
msgstr ""
|
||||
msgstr "Інше"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
msgid "Fat"
|
||||
msgstr ""
|
||||
msgstr "Жир"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:19
|
||||
msgid "g"
|
||||
msgstr ""
|
||||
msgstr "г"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
msgid "Carbohydrates"
|
||||
msgstr ""
|
||||
msgstr "Вуглеводи"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:19
|
||||
msgid "Proteins"
|
||||
msgstr ""
|
||||
msgstr "Білки"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:20
|
||||
msgid "Calories"
|
||||
msgstr ""
|
||||
msgstr "Калорії"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:20
|
||||
msgid "kcal"
|
||||
msgstr ""
|
||||
msgstr "ккал"
|
||||
|
||||
#: .\cookbook\models.py:325
|
||||
msgid ""
|
||||
@@ -428,54 +457,54 @@ msgstr ""
|
||||
#: .\cookbook\models.py:454 .\cookbook\templates\search.html:7
|
||||
#: .\cookbook\templates\settings.html:18
|
||||
msgid "Search"
|
||||
msgstr ""
|
||||
msgstr "Пошук"
|
||||
|
||||
#: .\cookbook\models.py:455 .\cookbook\templates\base.html:114
|
||||
#: .\cookbook\templates\meal_plan.html:7
|
||||
msgid "Meal-Plan"
|
||||
msgstr ""
|
||||
msgstr "План харчування"
|
||||
|
||||
#: .\cookbook\models.py:456 .\cookbook\templates\base.html:122
|
||||
#: .\cookbook\views\views.py:459
|
||||
msgid "Books"
|
||||
msgstr ""
|
||||
msgstr "Книги"
|
||||
|
||||
#: .\cookbook\models.py:457 .\cookbook\templates\base.html:118
|
||||
#: .\cookbook\views\views.py:460
|
||||
msgid "Shopping"
|
||||
msgstr ""
|
||||
msgstr "Покупки"
|
||||
|
||||
#: .\cookbook\models.py:752
|
||||
msgid " is part of a recipe step and cannot be deleted"
|
||||
msgstr ""
|
||||
msgstr " є частиною кроку рецепта та не може бути видалений"
|
||||
|
||||
#: .\cookbook\models.py:918
|
||||
msgid "Nutrition"
|
||||
msgstr ""
|
||||
msgstr "Харчова цінніцть"
|
||||
|
||||
#: .\cookbook\models.py:918
|
||||
msgid "Allergen"
|
||||
msgstr ""
|
||||
msgstr "Алерген"
|
||||
|
||||
#: .\cookbook\models.py:919
|
||||
msgid "Price"
|
||||
msgstr ""
|
||||
msgstr "Ціна"
|
||||
|
||||
#: .\cookbook\models.py:919
|
||||
msgid "Goal"
|
||||
msgstr ""
|
||||
msgstr "Мета"
|
||||
|
||||
#: .\cookbook\models.py:1408 .\cookbook\templates\search_info.html:28
|
||||
msgid "Simple"
|
||||
msgstr ""
|
||||
msgstr "Простий"
|
||||
|
||||
#: .\cookbook\models.py:1409 .\cookbook\templates\search_info.html:33
|
||||
msgid "Phrase"
|
||||
msgstr ""
|
||||
msgstr "Фраза"
|
||||
|
||||
#: .\cookbook\models.py:1410 .\cookbook\templates\search_info.html:38
|
||||
msgid "Web"
|
||||
msgstr ""
|
||||
msgstr "Веб"
|
||||
|
||||
#: .\cookbook\models.py:1411 .\cookbook\templates\search_info.html:47
|
||||
msgid "Raw"
|
||||
@@ -483,15 +512,15 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1467
|
||||
msgid "Food Alias"
|
||||
msgstr ""
|
||||
msgstr "Псевдонім продукту"
|
||||
|
||||
#: .\cookbook\models.py:1468
|
||||
msgid "Unit Alias"
|
||||
msgstr ""
|
||||
msgstr "Псевдонім одиниці вимірювання"
|
||||
|
||||
#: .\cookbook\models.py:1469
|
||||
msgid "Keyword Alias"
|
||||
msgstr ""
|
||||
msgstr "Псевдонім ключового слова"
|
||||
|
||||
#: .\cookbook\models.py:1470
|
||||
msgid "Description Replace"
|
||||
@@ -528,7 +557,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1504
|
||||
msgid "Food"
|
||||
msgstr ""
|
||||
msgstr "Їжа"
|
||||
|
||||
#: .\cookbook\models.py:1505 .\cookbook\templates\base.html:149
|
||||
msgid "Keyword"
|
||||
@@ -643,7 +672,7 @@ msgstr ""
|
||||
#: .\cookbook\templates\socialaccount\connections.html:10
|
||||
#: .\cookbook\templates\user_settings.html:8
|
||||
msgid "Settings"
|
||||
msgstr ""
|
||||
msgstr "Налаштування"
|
||||
|
||||
#: .\cookbook\templates\account\email.html:13
|
||||
msgid "Email"
|
||||
@@ -871,7 +900,7 @@ msgstr ""
|
||||
#: .\cookbook\templates\account\signup.html:48
|
||||
#: .\cookbook\templates\socialaccount\signup.html:39
|
||||
msgid "and"
|
||||
msgstr ""
|
||||
msgstr "і"
|
||||
|
||||
#: .\cookbook\templates\account\signup.html:52
|
||||
#: .\cookbook\templates\socialaccount\signup.html:43
|
||||
@@ -951,7 +980,7 @@ msgstr "Експорт"
|
||||
|
||||
#: .\cookbook\templates\base.html:287
|
||||
msgid "Properties"
|
||||
msgstr ""
|
||||
msgstr "Властивості"
|
||||
|
||||
#: .\cookbook\templates\base.html:301 .\cookbook\views\lists.py:255
|
||||
msgid "Unit Conversions"
|
||||
@@ -1827,7 +1856,7 @@ msgstr ""
|
||||
#: .\cookbook\templates\system.html:75 .\cookbook\templates\system.html:88
|
||||
#: .\cookbook\templates\system.html:102 .\cookbook\templates\system.html:113
|
||||
msgid "Warning"
|
||||
msgstr ""
|
||||
msgstr "Застереження"
|
||||
|
||||
#: .\cookbook\templates\system.html:47 .\cookbook\templates\system.html:61
|
||||
#: .\cookbook\templates\system.html:75 .\cookbook\templates\system.html:88
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,60 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-05 06:51
|
||||
|
||||
import cookbook.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('cookbook', '0223_auto_20250831_1111'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_credits_balance',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_credits_monthly',
|
||||
field=models.IntegerField(default=100),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AiProvider',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=128)),
|
||||
('description', models.TextField(blank=True)),
|
||||
('api_key', models.CharField(max_length=2048)),
|
||||
('model_name', models.CharField(max_length=256)),
|
||||
('url', models.CharField(blank=True, max_length=2048, null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('space', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AiLog',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('function', models.CharField(max_length=64)),
|
||||
('credit_cost', models.DecimalField(decimal_places=4, max_digits=16)),
|
||||
('credits_from_balance', models.BooleanField(default=False)),
|
||||
('input_tokens', models.IntegerField(default=0)),
|
||||
('output_tokens', models.IntegerField(default=0)),
|
||||
('start_time', models.DateTimeField(null=True)),
|
||||
('end_time', models.DateTimeField(null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('ai_provider', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cookbook.aiprovider')),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
|
||||
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
],
|
||||
bases=(models.Model, cookbook.models.PermissionModelMixin),
|
||||
),
|
||||
]
|
||||
18
cookbook/migrations/0225_space_ai_enabled.py
Normal file
18
cookbook/migrations/0225_space_ai_enabled.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-08 19:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0224_space_ai_credits_balance_space_ai_credits_monthly_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_enabled',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-08 20:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0225_space_ai_enabled'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='aiprovider',
|
||||
name='log_credit_cost',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='space',
|
||||
name='ai_credits_monthly',
|
||||
field=models.IntegerField(default=10000),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-09 11:40
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0226_aiprovider_log_credit_cost_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_default_provider',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='space_ai_default_provider', to='cookbook.aiprovider'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='space',
|
||||
name='ai_credits_balance',
|
||||
field=models.DecimalField(decimal_places=4, default=0, max_digits=16),
|
||||
),
|
||||
]
|
||||
18
cookbook/migrations/0228_space_space_setup_completed.py
Normal file
18
cookbook/migrations/0228_space_space_setup_completed.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-10 20:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0001_squashed_0227_space_ai_default_provider_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='space_setup_completed',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-24 17:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0228_space_space_setup_completed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='ailog',
|
||||
options={'ordering': ('-created_at',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='aiprovider',
|
||||
options={'ordering': ('id',)},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='storage',
|
||||
name='token',
|
||||
field=models.CharField(blank=True, max_length=4098, null=True),
|
||||
),
|
||||
]
|
||||
15
cookbook/migrations/0230_auto_20250925_2056.py
Normal file
15
cookbook/migrations/0230_auto_20250925_2056.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-25 18:56
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.postgres.operations import TrigramExtension, UnaccentExtension
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0229_alter_ailog_options_alter_aiprovider_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
TrigramExtension(),
|
||||
UnaccentExtension(),
|
||||
]
|
||||
@@ -0,0 +1,141 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-30 18:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0230_auto_20250925_2056'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='aiprovider',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='automation',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='bookmarkletimport',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='comment',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='connectorconfig',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='cooklog',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='customfilter',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='exportlog',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='food',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='importlog',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='invitelink',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='keyword',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='mealplan',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='mealtype',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recipe',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recipebook',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recipeimport',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='sharelink',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='shoppinglistentry',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='shoppinglistrecipe',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='space',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='storage',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='supermarket',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='supermarketcategory',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='sync',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='synclog',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='telegrambot',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='unit',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='unitconversion',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='userfile',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='userspace',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='viewlog',
|
||||
options={'ordering': ('pk',)},
|
||||
),
|
||||
]
|
||||
@@ -329,6 +329,13 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
demo = models.BooleanField(default=False)
|
||||
food_inherit = models.ManyToManyField(FoodInheritField, blank=True)
|
||||
|
||||
space_setup_completed = models.BooleanField(default=True)
|
||||
|
||||
ai_enabled = models.BooleanField(default=True)
|
||||
ai_credits_monthly = models.IntegerField(default=100)
|
||||
ai_credits_balance = models.DecimalField(default=0, max_digits=16, decimal_places=4)
|
||||
ai_default_provider = models.ForeignKey("AiProvider", on_delete=models.SET_NULL, null=True, blank=True, related_name='space_ai_default_provider')
|
||||
|
||||
internal_note = models.TextField(blank=True, null=True)
|
||||
|
||||
def safe_delete(self):
|
||||
@@ -341,6 +348,9 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
BookmarkletImport.objects.filter(space=self).delete()
|
||||
CustomFilter.objects.filter(space=self).delete()
|
||||
|
||||
AiLog.objects.filter(space=self).delete()
|
||||
AiProvider.objects.filter(space=self).delete()
|
||||
|
||||
Property.objects.filter(space=self).delete()
|
||||
PropertyType.objects.filter(space=self).delete()
|
||||
|
||||
@@ -392,6 +402,59 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class AiProvider(models.Model):
|
||||
name = models.CharField(max_length=128)
|
||||
description = models.TextField(blank=True)
|
||||
# AiProviders can be global, so space=null is allowed (configurable by superusers)
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE, null=True)
|
||||
|
||||
api_key = models.CharField(max_length=2048)
|
||||
model_name = models.CharField(max_length=256)
|
||||
url = models.CharField(max_length=2048, blank=True, null=True)
|
||||
log_credit_cost = models.BooleanField(default=True)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class AiLog(models.Model, PermissionModelMixin):
|
||||
F_FILE_IMPORT = 'FILE_IMPORT'
|
||||
F_STEP_SORT = 'STEP_SORT'
|
||||
F_FOOD_PROPERTIES = 'FOOD_PROPERTIES'
|
||||
F_RECIPE_PROPERTIES = 'RECIPE_PROPERTIES'
|
||||
|
||||
ai_provider = models.ForeignKey(AiProvider, on_delete=models.SET_NULL, null=True)
|
||||
function = models.CharField(max_length=64)
|
||||
credit_cost = models.DecimalField(max_digits=16, decimal_places=4)
|
||||
# if credits from balance were used, else its from monthly quota
|
||||
credits_from_balance = models.BooleanField(default=False)
|
||||
|
||||
input_tokens = models.IntegerField(default=0)
|
||||
output_tokens = models.IntegerField(default=0)
|
||||
start_time = models.DateTimeField(null=True)
|
||||
end_time = models.DateTimeField(null=True)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
created_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.function} {self.ai_provider.name} {self.created_at}"
|
||||
|
||||
class Meta:
|
||||
ordering = ('-created_at',)
|
||||
|
||||
|
||||
class ConnectorConfig(models.Model, PermissionModelMixin):
|
||||
HOMEASSISTANT = 'HomeAssistant'
|
||||
@@ -417,6 +480,9 @@ class ConnectorConfig(models.Model, PermissionModelMixin):
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
objects = ScopedManager(space='space')
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class UserPreference(models.Model, PermissionModelMixin):
|
||||
# Themes
|
||||
@@ -520,6 +586,9 @@ class UserSpace(models.Model, PermissionModelMixin):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class Storage(models.Model, PermissionModelMixin):
|
||||
DROPBOX = 'DB'
|
||||
@@ -533,7 +602,7 @@ class Storage(models.Model, PermissionModelMixin):
|
||||
)
|
||||
username = models.CharField(max_length=128, blank=True, null=True)
|
||||
password = models.CharField(max_length=128, blank=True, null=True)
|
||||
token = models.CharField(max_length=512, blank=True, null=True)
|
||||
token = models.CharField(max_length=4098, blank=True, null=True)
|
||||
url = models.URLField(blank=True, null=True)
|
||||
path = models.CharField(blank=True, default='', max_length=256)
|
||||
created_by = models.ForeignKey(User, on_delete=models.PROTECT)
|
||||
@@ -544,6 +613,9 @@ class Storage(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class Sync(models.Model, PermissionModelMixin):
|
||||
storage = models.ForeignKey(Storage, on_delete=models.PROTECT)
|
||||
@@ -559,6 +631,9 @@ class Sync(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class SupermarketCategory(models.Model, PermissionModelMixin, MergeModelMixin):
|
||||
name = models.CharField(max_length=128, validators=[MinLengthValidator(1)])
|
||||
@@ -584,6 +659,7 @@ class SupermarketCategory(models.Model, PermissionModelMixin, MergeModelMixin):
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='smc_unique_name_per_space'),
|
||||
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='supermarket_category_unique_open_data_slug_per_space')
|
||||
]
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class Supermarket(models.Model, PermissionModelMixin):
|
||||
@@ -603,6 +679,7 @@ class Supermarket(models.Model, PermissionModelMixin):
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='sm_unique_name_per_space'),
|
||||
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='supermarket_unique_open_data_slug_per_space')
|
||||
]
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class SupermarketCategoryRelation(models.Model, PermissionModelMixin):
|
||||
@@ -634,6 +711,9 @@ class SyncLog(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return f"{self.created_at}:{self.sync} - {self.status}"
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class Keyword(ExportModelOperationsMixin('keyword'), TreeModel, PermissionModelMixin):
|
||||
if SORT_TREE_BY_NAME:
|
||||
@@ -651,6 +731,7 @@ class Keyword(ExportModelOperationsMixin('keyword'), TreeModel, PermissionModelM
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='kw_unique_name_per_space')
|
||||
]
|
||||
indexes = (Index(fields=['id', 'name']),)
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixin, MergeModelMixin):
|
||||
@@ -682,6 +763,7 @@ class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixi
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='u_unique_name_per_space'),
|
||||
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='unit_unique_open_data_slug_per_space')
|
||||
]
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
|
||||
@@ -748,14 +830,7 @@ class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
|
||||
self.delete()
|
||||
return target
|
||||
|
||||
def delete(self):
|
||||
if self.ingredient_set.all().exclude(step=None).count() > 0:
|
||||
raise ProtectedError(self.name + _(" is part of a recipe step and cannot be deleted"), self.ingredient_set.all().exclude(step=None))
|
||||
else:
|
||||
return super().delete()
|
||||
|
||||
# MP_Tree move uses raw SQL to execute move, override behavior to force a save triggering post_save signal
|
||||
|
||||
def move(self, *args, **kwargs):
|
||||
super().move(*args, **kwargs)
|
||||
# treebeard bypasses ORM, need to explicity save to trigger post save signals retrieve the object again to avoid writing previous state back to disk
|
||||
@@ -822,6 +897,7 @@ class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
|
||||
Index(fields=['id']),
|
||||
Index(fields=['name']),
|
||||
)
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class UnitConversion(ExportModelOperationsMixin('unit_conversion'), models.Model, PermissionModelMixin):
|
||||
@@ -848,6 +924,7 @@ class UnitConversion(ExportModelOperationsMixin('unit_conversion'), models.Model
|
||||
models.UniqueConstraint(fields=['space', 'base_unit', 'converted_unit', 'food'], name='f_unique_conversion_per_space'),
|
||||
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='unit_conversion_unique_open_data_slug_per_space')
|
||||
]
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class Ingredient(ExportModelOperationsMixin('ingredient'), models.Model, PermissionModelMixin):
|
||||
@@ -1052,13 +1129,14 @@ class Recipe(ExportModelOperationsMixin('recipe'), models.Model, PermissionModel
|
||||
sub_food_recipes = Q(id__in=Food.objects.filter(ingredient__step__recipe__in=related_recipes).exclude(recipe=None).values_list('recipe'))
|
||||
return Recipe.objects.filter(Q(id__in=related_recipes.values_list('id')) | sub_step_recipes | sub_food_recipes)
|
||||
|
||||
class Meta():
|
||||
class Meta:
|
||||
indexes = (
|
||||
GinIndex(fields=["name_search_vector"]),
|
||||
GinIndex(fields=["desc_search_vector"]),
|
||||
Index(fields=['id']),
|
||||
Index(fields=['name']),
|
||||
)
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class Comment(ExportModelOperationsMixin('comment'), models.Model, PermissionModelMixin):
|
||||
@@ -1079,6 +1157,9 @@ class Comment(ExportModelOperationsMixin('comment'), models.Model, PermissionMod
|
||||
|
||||
def __str__(self):
|
||||
return self.text
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class RecipeImport(models.Model, PermissionModelMixin):
|
||||
@@ -1107,6 +1188,9 @@ class RecipeImport(models.Model, PermissionModelMixin):
|
||||
self.delete()
|
||||
return recipe
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class RecipeBook(ExportModelOperationsMixin('book'), models.Model, PermissionModelMixin):
|
||||
name = models.CharField(max_length=128)
|
||||
@@ -1124,6 +1208,7 @@ class RecipeBook(ExportModelOperationsMixin('book'), models.Model, PermissionMod
|
||||
|
||||
class Meta():
|
||||
indexes = (Index(fields=['name']),)
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class RecipeBookEntry(ExportModelOperationsMixin('book_entry'), models.Model, PermissionModelMixin):
|
||||
@@ -1169,6 +1254,7 @@ class MealType(models.Model, PermissionModelMixin):
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name', 'created_by'], name='mt_unique_name_per_space'),
|
||||
]
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class MealPlan(ExportModelOperationsMixin('meal_plan'), models.Model, PermissionModelMixin):
|
||||
@@ -1196,6 +1282,9 @@ class MealPlan(ExportModelOperationsMixin('meal_plan'), models.Model, Permission
|
||||
def __str__(self):
|
||||
return f'{self.get_label()} - {self.from_date} - {self.meal_type.name}'
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ShoppingListRecipe(ExportModelOperationsMixin('shopping_list_recipe'), models.Model, PermissionModelMixin):
|
||||
name = models.CharField(max_length=32, blank=True, default='')
|
||||
@@ -1211,6 +1300,9 @@ class ShoppingListRecipe(ExportModelOperationsMixin('shopping_list_recipe'), mod
|
||||
def __str__(self):
|
||||
return f'Shopping list recipe {self.id} - {self.recipe}'
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ShoppingListEntry(ExportModelOperationsMixin('shopping_list_entry'), models.Model, PermissionModelMixin):
|
||||
list_recipe = models.ForeignKey(ShoppingListRecipe, on_delete=models.CASCADE, null=True, blank=True, related_name='entries')
|
||||
@@ -1242,6 +1334,9 @@ class ShoppingListEntry(ExportModelOperationsMixin('shopping_list_entry'), model
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ShareLink(ExportModelOperationsMixin('share_link'), models.Model, PermissionModelMixin):
|
||||
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
|
||||
@@ -1257,6 +1352,9 @@ class ShareLink(ExportModelOperationsMixin('share_link'), models.Model, Permissi
|
||||
def __str__(self):
|
||||
return f'{self.recipe} - {self.uuid}'
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
def default_valid_until():
|
||||
return date.today() + timedelta(days=14)
|
||||
@@ -1280,6 +1378,9 @@ class InviteLink(ExportModelOperationsMixin('invite_link'), models.Model, Permis
|
||||
def __str__(self):
|
||||
return f'{self.uuid}'
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class TelegramBot(models.Model, PermissionModelMixin):
|
||||
token = models.CharField(max_length=256)
|
||||
@@ -1294,6 +1395,9 @@ class TelegramBot(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return f"{self.name}"
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionModelMixin):
|
||||
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
|
||||
@@ -1311,7 +1415,7 @@ class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionMo
|
||||
def __str__(self):
|
||||
return self.recipe.name
|
||||
|
||||
class Meta():
|
||||
class Meta:
|
||||
indexes = (
|
||||
Index(fields=['id']),
|
||||
Index(fields=['recipe']),
|
||||
@@ -1320,6 +1424,7 @@ class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionMo
|
||||
Index(fields=['created_by']),
|
||||
Index(fields=['created_by', 'rating']),
|
||||
)
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionModelMixin):
|
||||
@@ -1333,13 +1438,14 @@ class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionMo
|
||||
def __str__(self):
|
||||
return self.recipe.name
|
||||
|
||||
class Meta():
|
||||
class Meta:
|
||||
indexes = (
|
||||
Index(fields=['recipe']),
|
||||
Index(fields=['-created_at']),
|
||||
Index(fields=['created_by']),
|
||||
Index(fields=['recipe', '-created_at', 'created_by']),
|
||||
)
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ImportLog(models.Model, PermissionModelMixin):
|
||||
@@ -1360,6 +1466,9 @@ class ImportLog(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return f"{self.created_at}:{self.type}"
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class ExportLog(models.Model, PermissionModelMixin):
|
||||
type = models.CharField(max_length=32)
|
||||
@@ -1380,6 +1489,9 @@ class ExportLog(models.Model, PermissionModelMixin):
|
||||
def __str__(self):
|
||||
return f"{self.created_at}:{self.type}"
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class BookmarkletImport(ExportModelOperationsMixin('bookmarklet_import'), models.Model, PermissionModelMixin):
|
||||
html = models.TextField()
|
||||
@@ -1390,6 +1502,9 @@ class BookmarkletImport(ExportModelOperationsMixin('bookmarklet_import'), models
|
||||
objects = ScopedManager(space='space')
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
# field names used to configure search behavior - all data populated during data migration
|
||||
# other option is to use a MultiSelectField from https://github.com/goinnn/django-multiselectfield
|
||||
@@ -1457,6 +1572,9 @@ class UserFile(ExportModelOperationsMixin('user_files'), models.Model, Permissio
|
||||
def __str__(self):
|
||||
return f'{self.name} (#{self.id})'
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class Automation(ExportModelOperationsMixin('automations'), models.Model, PermissionModelMixin):
|
||||
FOOD_ALIAS = 'FOOD_ALIAS'
|
||||
@@ -1503,6 +1621,9 @@ class Automation(ExportModelOperationsMixin('automations'), models.Model, Permis
|
||||
objects = ScopedManager(space='space')
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
ordering = ('pk',)
|
||||
|
||||
|
||||
class CustomFilter(models.Model, PermissionModelMixin):
|
||||
RECIPE = 'RECIPE'
|
||||
@@ -1533,3 +1654,4 @@ class CustomFilter(models.Model, PermissionModelMixin):
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['space', 'name'], name='cf_unique_name_per_space')
|
||||
]
|
||||
ordering = ('pk',)
|
||||
|
||||
@@ -24,8 +24,9 @@ from rest_framework.fields import IntegerField
|
||||
|
||||
from cookbook.helper.CustomStorageClass import CachedS3Boto3Storage
|
||||
from cookbook.helper.HelperFunctions import str2bool
|
||||
from cookbook.helper.ai_helper import get_monthly_token_usage
|
||||
from cookbook.helper.image_processing import is_file_type_allowed
|
||||
from cookbook.helper.permission_helper import above_space_limit
|
||||
from cookbook.helper.permission_helper import above_space_limit, create_space_for_user
|
||||
from cookbook.helper.property_helper import FoodPropertyHelper
|
||||
from cookbook.helper.shopping_helper import RecipeShoppingEditor
|
||||
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
|
||||
@@ -36,7 +37,7 @@ from cookbook.models import (Automation, BookmarkletImport, Comment, CookLog, Cu
|
||||
ShareLink, ShoppingListEntry, ShoppingListRecipe, Space,
|
||||
Step, Storage, Supermarket, SupermarketCategory,
|
||||
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
|
||||
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields)
|
||||
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields, AiLog, AiProvider)
|
||||
from cookbook.templatetags.custom_tags import markdown
|
||||
from recipes.settings import AWS_ENABLED, MEDIA_URL, EMAIL_HOST
|
||||
|
||||
@@ -150,19 +151,22 @@ class CustomOnHandField(serializers.Field):
|
||||
return instance
|
||||
|
||||
def to_representation(self, obj):
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
try:
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
return []
|
||||
shared_users = []
|
||||
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
shared_users = c
|
||||
else:
|
||||
try:
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
|
||||
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
|
||||
# TODO ugly hack that improves API performance significantly, should be done properly
|
||||
except AttributeError: # Anonymous users (using share links) don't have shared users
|
||||
pass
|
||||
return obj.onhand_users.filter(id__in=shared_users).exists()
|
||||
except AttributeError:
|
||||
return []
|
||||
shared_users = []
|
||||
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
shared_users = c
|
||||
else:
|
||||
try:
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
|
||||
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
|
||||
# TODO ugly hack that improves API performance significantly, should be done properly
|
||||
except AttributeError: # Anonymous users (using share links) don't have shared users
|
||||
pass
|
||||
return obj.onhand_users.filter(id__in=shared_users).exists()
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
@@ -325,12 +329,62 @@ class UserFileViewSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = ('id', 'file', 'file_download', 'file_size_kb', 'preview', 'created_by', 'created_at')
|
||||
|
||||
|
||||
class AiProviderSerializer(serializers.ModelSerializer):
|
||||
api_key = serializers.CharField(required=False, write_only=True)
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data = self.handle_global_space_logic(validated_data)
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data = self.handle_global_space_logic(validated_data, instance=instance)
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def handle_global_space_logic(self, validated_data, instance=None):
|
||||
"""
|
||||
allow superusers to create AI providers without a space but make sure everyone else only uses their own space
|
||||
"""
|
||||
if self.context['request'].user.is_superuser:
|
||||
if ('space' not in validated_data or not validated_data['space']):
|
||||
validated_data['space'] = None
|
||||
else:
|
||||
validated_data['space'] = self.context['request'].space
|
||||
else:
|
||||
if instance:
|
||||
validated_data['space'] = instance.space
|
||||
else:
|
||||
validated_data['space'] = self.context['request'].space
|
||||
|
||||
if 'log_credit_cost' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['log_credit_cost']
|
||||
|
||||
return validated_data
|
||||
|
||||
class Meta:
|
||||
model = AiProvider
|
||||
fields = ('id', 'name', 'description', 'api_key', 'model_name', 'url', 'log_credit_cost', 'space', 'created_at', 'updated_at')
|
||||
read_only_fields = ('created_at', 'updated_at',)
|
||||
|
||||
|
||||
class AiLogSerializer(serializers.ModelSerializer):
|
||||
ai_provider = AiProviderSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AiLog
|
||||
fields = ('id', 'ai_provider', 'function', 'credit_cost', 'credits_from_balance', 'input_tokens', 'output_tokens', 'start_time', 'end_time', 'created_by', 'created_at',
|
||||
'updated_at')
|
||||
read_only_fields = ('__all__',)
|
||||
|
||||
|
||||
class SpaceSerializer(WritableNestedModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
user_count = serializers.SerializerMethodField('get_user_count')
|
||||
recipe_count = serializers.SerializerMethodField('get_recipe_count')
|
||||
file_size_mb = serializers.SerializerMethodField('get_file_size_mb')
|
||||
food_inherit = FoodInheritFieldSerializer(many=True)
|
||||
user_count = serializers.SerializerMethodField('get_user_count', read_only=True)
|
||||
recipe_count = serializers.SerializerMethodField('get_recipe_count', read_only=True)
|
||||
file_size_mb = serializers.SerializerMethodField('get_file_size_mb', read_only=True)
|
||||
ai_monthly_credits_used = serializers.SerializerMethodField('get_ai_monthly_credits_used', read_only=True)
|
||||
ai_default_provider = AiProviderSerializer(required=False, allow_null=True)
|
||||
food_inherit = FoodInheritFieldSerializer(many=True, required=False)
|
||||
image = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
nav_logo = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
custom_space_theme = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
@@ -350,6 +404,10 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
def get_recipe_count(self, obj):
|
||||
return Recipe.objects.filter(space=obj).count()
|
||||
|
||||
@extend_schema_field(int)
|
||||
def get_ai_monthly_credits_used(self, obj):
|
||||
return get_monthly_token_usage(obj)
|
||||
|
||||
@extend_schema_field(float)
|
||||
def get_file_size_mb(self, obj):
|
||||
try:
|
||||
@@ -358,7 +416,36 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
return 0
|
||||
|
||||
def create(self, validated_data):
|
||||
raise ValidationError('Cannot create using this endpoint')
|
||||
if Space.objects.filter(created_by=self.context['request'].user).count() >= self.context['request'].user.userpreference.max_owned_spaces:
|
||||
raise serializers.ValidationError(
|
||||
_('You have the reached the maximum amount of spaces that can be owned by you.') + f' ({self.context['request'].user.userpreference.max_owned_spaces})')
|
||||
|
||||
name = None
|
||||
if 'name' in validated_data:
|
||||
name = validated_data['name']
|
||||
user_space = create_space_for_user(self.context['request'].user, name)
|
||||
return user_space.space
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data = self.filter_superuser_parameters(validated_data)
|
||||
|
||||
if 'name' in validated_data:
|
||||
if Space.objects.filter(Q(name=validated_data['name']), ~Q(pk=instance.pk)).exists():
|
||||
raise ValidationError(_('Space Name must be unique.'))
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def filter_superuser_parameters(self, validated_data):
|
||||
if 'ai_enabled' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_enabled']
|
||||
|
||||
if 'ai_credits_monthly' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_credits_monthly']
|
||||
|
||||
if 'ai_credits_balance' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_credits_balance']
|
||||
|
||||
return validated_data
|
||||
|
||||
class Meta:
|
||||
model = Space
|
||||
@@ -366,10 +453,11 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
'id', 'name', 'created_by', 'created_at', 'message', 'max_recipes', 'max_file_storage_mb', 'max_users',
|
||||
'allow_sharing', 'demo', 'food_inherit', 'user_count', 'recipe_count', 'file_size_mb',
|
||||
'image', 'nav_logo', 'space_theme', 'custom_space_theme', 'nav_bg_color', 'nav_text_color',
|
||||
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg',)
|
||||
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg', 'ai_credits_monthly',
|
||||
'ai_credits_balance', 'ai_monthly_credits_used', 'ai_enabled', 'ai_default_provider', 'space_setup_completed')
|
||||
read_only_fields = (
|
||||
'id', 'created_by', 'created_at', 'max_recipes', 'max_file_storage_mb', 'max_users', 'allow_sharing',
|
||||
'demo',)
|
||||
'demo', 'ai_monthly_credits_used')
|
||||
|
||||
|
||||
class UserSpaceSerializer(WritableNestedModelSerializer):
|
||||
@@ -758,28 +846,31 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
|
||||
|
||||
@extend_schema_field(bool)
|
||||
def get_substitute_onhand(self, obj):
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
try:
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
return []
|
||||
shared_users = []
|
||||
if c := caches['default'].get(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
shared_users = c
|
||||
else:
|
||||
try:
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
|
||||
self.context['request'].user.id]
|
||||
caches['default'].set(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
|
||||
shared_users, timeout=5 * 60)
|
||||
# TODO ugly hack that improves API performance significantly, should be done properly
|
||||
except AttributeError: # Anonymous users (using share links) don't have shared users
|
||||
pass
|
||||
filter = Q(id__in=obj.substitute.all())
|
||||
if obj.substitute_siblings:
|
||||
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
|
||||
if obj.substitute_children:
|
||||
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
|
||||
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
|
||||
except AttributeError:
|
||||
return []
|
||||
shared_users = []
|
||||
if c := caches['default'].get(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
shared_users = c
|
||||
else:
|
||||
try:
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
|
||||
self.context['request'].user.id]
|
||||
caches['default'].set(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
|
||||
shared_users, timeout=5 * 60)
|
||||
# TODO ugly hack that improves API performance significantly, should be done properly
|
||||
except AttributeError: # Anonymous users (using share links) don't have shared users
|
||||
pass
|
||||
filter = Q(id__in=obj.substitute.all())
|
||||
if obj.substitute_siblings:
|
||||
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
|
||||
if obj.substitute_children:
|
||||
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
|
||||
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
|
||||
|
||||
def create(self, validated_data):
|
||||
name = validated_data['name'].strip()
|
||||
@@ -1038,7 +1129,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
fields = (
|
||||
'id', 'name', 'description', 'image', 'keywords', 'working_time',
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
'internal', 'private','servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
|
||||
'internal', 'private', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
|
||||
)
|
||||
# TODO having these readonly fields makes "RecipeOverview.ts" (API Client) not generate the RecipeOverviewToJSON second else block which leads to errors when using the api
|
||||
# TODO find a solution (custom schema?) to have these fields readonly (to save performance) and generate a proper client (two serializers would probably do the trick)
|
||||
@@ -1134,6 +1225,35 @@ class RecipeBatchUpdateSerializer(serializers.Serializer):
|
||||
clear_description = serializers.BooleanField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class FoodBatchUpdateSerializer(serializers.Serializer):
|
||||
foods = serializers.ListField(child=serializers.IntegerField())
|
||||
|
||||
category = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
substitute_add = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_set = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
child_inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
substitute_children = serializers.BooleanField(required=False, allow_null=True)
|
||||
substitute_siblings = serializers.BooleanField(required=False, allow_null=True)
|
||||
ignore_shopping = serializers.BooleanField(required=False, allow_null=True)
|
||||
on_hand = serializers.BooleanField(required=False, allow_null=True)
|
||||
|
||||
parent_remove = serializers.BooleanField(required=False, allow_null=True)
|
||||
parent_set = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class CustomFilterSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
shared = UserSerializer(many=True, required=False)
|
||||
|
||||
@@ -1564,7 +1684,6 @@ class ServerSettingsSerializer(serializers.Serializer):
|
||||
# TODO add all other relevant settings including path/url related ones?
|
||||
shopping_min_autosync_interval = serializers.CharField()
|
||||
enable_pdf_export = serializers.BooleanField()
|
||||
enable_ai_import = serializers.BooleanField()
|
||||
disable_external_connectors = serializers.BooleanField()
|
||||
terms_url = serializers.CharField()
|
||||
privacy_url = serializers.CharField()
|
||||
@@ -1605,6 +1724,11 @@ class FdcQuerySerializer(serializers.Serializer):
|
||||
foods = FdcQueryFoodsSerializer(many=True)
|
||||
|
||||
|
||||
class GenericModelReferenceSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
model = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
|
||||
# Export/Import Serializers
|
||||
|
||||
class KeywordExportSerializer(KeywordSerializer):
|
||||
@@ -1788,6 +1912,7 @@ class RecipeFromSourceResponseSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class AiImportSerializer(serializers.Serializer):
|
||||
ai_provider_id = serializers.IntegerField()
|
||||
file = serializers.FileField(allow_null=True)
|
||||
text = serializers.CharField(allow_null=True, allow_blank=True)
|
||||
recipe_id = serializers.CharField(allow_null=True, allow_blank=True)
|
||||
|
||||
@@ -51,11 +51,6 @@
|
||||
{# {% endif %}#}
|
||||
<p class="card-text"><small
|
||||
class="text-muted">{% trans 'Owner' %}: {{ us.space.created_by }}</small>
|
||||
{% if us.space.created_by != us.user %}
|
||||
<p class="card-text"><small
|
||||
class="text-muted"><a
|
||||
href="{% url 'delete_user_space' us.pk %}">{% trans 'Leave Space' %}</a></small>
|
||||
{% endif %}
|
||||
<!--TODO add direct link to management page -->
|
||||
</p>
|
||||
</div>
|
||||
|
||||
168
cookbook/tests/api/test_api_ai_provider.py
Normal file
168
cookbook/tests/api/test_api_ai_provider.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.models import MealType, PropertyType, AiProvider
|
||||
|
||||
LIST_URL = 'api:aiprovider-list'
|
||||
DETAIL_URL = 'api:aiprovider-detail'
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def obj_1(space_1, a1_s1):
|
||||
return AiProvider.objects.get_or_create(name='test_1', space=space_1)[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def obj_2(space_1, a1_s1):
|
||||
return AiProvider.objects.get_or_create(name='test_2', space=None)[0]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 200],
|
||||
['a1_s1', 200],
|
||||
])
|
||||
def test_list_permission(arg, request):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
assert c.get(reverse(LIST_URL)).status_code == arg[1]
|
||||
|
||||
|
||||
def test_list_space(obj_1, obj_2, u1_s1, u1_s2, space_2):
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 1
|
||||
|
||||
obj_1.space = space_2
|
||||
obj_1.save()
|
||||
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 1
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
|
||||
obj_1.space = None
|
||||
obj_1.save()
|
||||
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 200],
|
||||
['g1_s2', 403],
|
||||
['u1_s2', 403],
|
||||
['a1_s2', 404],
|
||||
])
|
||||
def test_update(arg, request, obj_1):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.patch(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
),
|
||||
{'name': 'new'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 200:
|
||||
assert response['name'] == 'new'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 403],
|
||||
['g1_s2', 403],
|
||||
['u1_s2', 403],
|
||||
['a1_s2', 403],
|
||||
['s1_s1', 200],
|
||||
])
|
||||
def test_update_global(arg, request, obj_2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.patch(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
),
|
||||
{'name': 'new'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 200:
|
||||
assert response['name'] == 'new'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 201],
|
||||
])
|
||||
def test_add(arg, request, u1_s2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.post(
|
||||
reverse(LIST_URL),
|
||||
{'name': 'test', 'api_key': 'test', 'model_name': 'test'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 201:
|
||||
assert response['name'] == 'test'
|
||||
r = c.get(reverse(DETAIL_URL, args={response['id']}))
|
||||
assert r.status_code == 200
|
||||
r = u1_s2.get(reverse(DETAIL_URL, args={response['id']}))
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
def test_delete(a1_s1, a1_s2, obj_1):
|
||||
# admins cannot delete foreign space providers
|
||||
r = a1_s2.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 404
|
||||
|
||||
# admins can delete their space providers
|
||||
r = a1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
)
|
||||
)
|
||||
|
||||
assert r.status_code == 204
|
||||
with scopes_disabled():
|
||||
assert AiProvider.objects.count() == 0
|
||||
|
||||
|
||||
def test_delete_global(a1_s1, s1_s1, obj_2):
|
||||
# admins cant delete global providers
|
||||
r = a1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
# superusers can delete global providers
|
||||
r = s1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
)
|
||||
)
|
||||
|
||||
assert r.status_code == 204
|
||||
with scopes_disabled():
|
||||
assert AiProvider.objects.count() == 0
|
||||
@@ -236,42 +236,6 @@ def test_delete(u1_s1, u1_s2, obj_1, obj_tree_1):
|
||||
assert Food.find_problems() == ([], [], [], [], [])
|
||||
|
||||
|
||||
def test_integrity(u1_s1, recipe_1_s1):
|
||||
with scopes_disabled():
|
||||
assert Food.objects.count() == 10
|
||||
assert Ingredient.objects.count() == 10
|
||||
f_1 = Food.objects.first()
|
||||
|
||||
# deleting food will fail because food is part of recipe
|
||||
r = u1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={f_1.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
with scopes_disabled():
|
||||
i_1 = f_1.ingredient_set.first()
|
||||
# remove Ingredient that references Food from recipe step
|
||||
i_1.step_set.first().ingredients.remove(i_1)
|
||||
assert Food.objects.count() == 10
|
||||
assert Ingredient.objects.count() == 10
|
||||
|
||||
# deleting food will succeed because its not part of recipe and delete will cascade to Ingredient
|
||||
r = u1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={f_1.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 204
|
||||
|
||||
with scopes_disabled():
|
||||
assert Food.objects.count() == 9
|
||||
assert Ingredient.objects.count() == 9
|
||||
|
||||
|
||||
def test_move(u1_s1, obj_tree_1, obj_2, obj_3, space_1):
|
||||
with scope(space=space_1):
|
||||
# for some reason the 'path' attribute changes between the factory and the test when using both obj_tree and obj
|
||||
|
||||
@@ -99,19 +99,19 @@ def test_list_filter(obj_1, u1_s1):
|
||||
|
||||
response = json.loads(
|
||||
u1_s1.get(
|
||||
f'{reverse(LIST_URL)}?from_date={(timezone.now() + timedelta(days=2)).strftime("%Y-%m-%d")}'
|
||||
f'{reverse(LIST_URL)}?from_date={(timezone.localtime(timezone.now()) + timedelta(days=1)).strftime("%Y-%m-%d")}'
|
||||
).content)['results']
|
||||
assert len(response) == 0
|
||||
|
||||
response = json.loads(
|
||||
u1_s1.get(
|
||||
f'{reverse(LIST_URL)}?to_date={(timezone.now() - timedelta(days=2)).strftime("%Y-%m-%d")}'
|
||||
f'{reverse(LIST_URL)}?to_date={(timezone.localtime(timezone.now()) - timedelta(days=1)).strftime("%Y-%m-%d")}'
|
||||
).content)['results']
|
||||
assert len(response) == 0
|
||||
|
||||
response = json.loads(
|
||||
u1_s1.get(
|
||||
f'{reverse(LIST_URL)}?from_date={(timezone.now() - timedelta(days=2)).strftime("%Y-%m-%d")}&to_date={(timezone.now() + timedelta(days=2)).strftime("%Y-%m-%d")}'
|
||||
f'{reverse(LIST_URL)}?from_date={(timezone.localtime(timezone.now()) - timedelta(days=1)).strftime("%Y-%m-%d")}&to_date={(timezone.localtime(timezone.now()) + timedelta(days=1)).strftime("%Y-%m-%d")}'
|
||||
).content)['results']
|
||||
assert len(response) == 1
|
||||
|
||||
@@ -153,8 +153,8 @@ def test_add(arg, request, u1_s2, recipe_1_s1, meal_type):
|
||||
'id': meal_type.id,
|
||||
'name': meal_type.name
|
||||
},
|
||||
'from_date': (timezone.now()).strftime("%Y-%m-%d"),
|
||||
'to_date': (timezone.now()).strftime("%Y-%m-%d"),
|
||||
'from_date': (timezone.localtime(timezone.now())).strftime("%Y-%m-%d"),
|
||||
'to_date': (timezone.localtime(timezone.now())).strftime("%Y-%m-%d"),
|
||||
'servings': 1,
|
||||
'title': 'test',
|
||||
'shared': []
|
||||
@@ -196,8 +196,8 @@ def test_add_with_shopping(u1_s1, meal_type):
|
||||
'id': meal_type.id,
|
||||
'name': meal_type.name
|
||||
},
|
||||
'from_date': (timezone.now()).strftime("%Y-%m-%d"),
|
||||
'to_date': (timezone.now()).strftime("%Y-%m-%d"),
|
||||
'from_date': (timezone.localtime(timezone.now())).strftime("%Y-%m-%d"),
|
||||
'to_date': (timezone.localtime(timezone.now())).strftime("%Y-%m-%d"),
|
||||
'servings': 1,
|
||||
'title': 'test',
|
||||
'shared': [],
|
||||
@@ -212,13 +212,13 @@ def test_add_with_shopping(u1_s1, meal_type):
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['', 2],
|
||||
[f'?from_date={timezone.now().strftime("%Y-%m-%d")}', 1],
|
||||
[f'?from_date={timezone.localtime(timezone.now()).strftime("%Y-%m-%d")}', 1],
|
||||
[
|
||||
f'?to_date={(timezone.now() - timedelta(days=1)).strftime("%Y-%m-%d")}',
|
||||
f'?to_date={(timezone.localtime(timezone.now()) - timedelta(days=1)).strftime("%Y-%m-%d")}',
|
||||
1
|
||||
],
|
||||
[
|
||||
f'?from_date={(timezone.now() + timedelta(days=2)).strftime("%Y-%m-%d")}&to_date={(timezone.now() + timedelta(days=2)).strftime("%Y-%m-%d")}',
|
||||
f'?from_date={(timezone.localtime(timezone.now()) + timedelta(days=1)).strftime("%Y-%m-%d")}&to_date={(timezone.localtime(timezone.now()) + timedelta(days=1)).strftime("%Y-%m-%d")}',
|
||||
0
|
||||
],
|
||||
])
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.urls import reverse
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.models import UserSpace
|
||||
from recipes import settings
|
||||
|
||||
LIST_URL = 'api:space-list'
|
||||
DETAIL_URL = 'api:space-detail'
|
||||
@@ -45,7 +46,6 @@ def test_list_multiple(u1_s1, space_1, space_2):
|
||||
assert u1_response['id'] == space_1.id
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
@@ -70,9 +70,9 @@ def test_update(arg, request, space_1, a1_s1):
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 405],
|
||||
['g1_s1', 201],
|
||||
['u1_s1', 201],
|
||||
['a1_s1', 201],
|
||||
])
|
||||
def test_add(arg, request, u1_s2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
@@ -90,3 +90,59 @@ def test_delete(u1_s1, u1_s2, a1_s1, space_1):
|
||||
# event the space owner cannot delete his space over the api (this might change later but for now it's only available in the UI)
|
||||
r = a1_s1.delete(reverse(DETAIL_URL, args={space_1.id}))
|
||||
assert r.status_code == 405
|
||||
|
||||
|
||||
def test_superuser_parameters(space_1, a1_s1, s1_s1):
|
||||
# ------- test as normal user -------
|
||||
response = a1_s1.post(reverse(LIST_URL), {'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 201
|
||||
response = json.loads(response.content)
|
||||
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
|
||||
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
|
||||
assert response['ai_credits_balance'] == 0
|
||||
|
||||
space_1.created_by = auth.get_user(a1_s1)
|
||||
space_1.ai_enabled = False
|
||||
space_1.ai_credits_monthly = 0
|
||||
space_1.ai_credits_balance = 0
|
||||
space_1.save()
|
||||
|
||||
response = a1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
space_1.refresh_from_db()
|
||||
assert space_1.ai_enabled == False
|
||||
assert space_1.ai_credits_monthly == 0
|
||||
assert space_1.ai_credits_balance == 0
|
||||
|
||||
# ------- test as superuser -------
|
||||
|
||||
response = s1_s1.post(reverse(LIST_URL),
|
||||
{'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 201
|
||||
response = json.loads(response.content)
|
||||
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
|
||||
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
|
||||
assert response['ai_credits_balance'] == 0
|
||||
|
||||
space_1.created_by = auth.get_user(s1_s1)
|
||||
space_1.ai_enabled = False
|
||||
space_1.ai_credits_monthly = 0
|
||||
space_1.ai_credits_balance = 0
|
||||
space_1.save()
|
||||
|
||||
response = s1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
space_1.refresh_from_db()
|
||||
assert space_1.ai_enabled == True
|
||||
assert space_1.ai_credits_monthly == 100
|
||||
assert space_1.ai_credits_balance == 100
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user