Compare commits

..

17 Commits

Author SHA1 Message Date
vabene1111
cdd700d2e6 Merge branch 'develop' into beta 2025-07-29 17:37:48 +02:00
vabene1111
ad6fe5fa4d Merge branch 'develop' into beta 2025-07-18 15:48:02 +02:00
vabene1111
ac31c112f3 Merge branch 'develop' into beta 2025-07-11 21:59:18 +02:00
vabene1111
0104b600cc Merge branch 'develop' into beta 2025-07-07 18:28:50 +02:00
vabene1111
7baad85112 Merge branch 'develop' into beta 2025-06-22 10:35:01 +02:00
vabene1111
4b0bfa9a85 Merge branch 'master' into beta 2025-06-22 10:29:43 +02:00
vabene1111
5e7c75ef68 Merge branch 'develop' into beta 2025-01-18 09:24:08 +01:00
vabene1111
954a35bea2 Merge branch 'develop' into beta 2025-01-01 08:17:32 +01:00
vabene1111
88347d44c8 Merge branch 'beta' of https://github.com/TandoorRecipes/recipes into beta 2024-11-23 21:56:13 +01:00
vabene1111
2c13e76fbb Merge branch 'develop' into beta 2024-03-05 08:54:58 +01:00
vabene1111
362f634828 Merge branch 'develop' into beta 2024-03-02 07:41:28 +01:00
vabene1111
2fb968cfd3 Merge branch 'develop' into beta 2024-03-01 07:42:28 +01:00
vabene1111
4d3dab6edd Merge branch 'develop' into beta 2024-02-28 17:21:22 +01:00
vabene1111
8f1b593ad1 Merge branch 'develop' into beta 2024-02-28 17:19:15 +01:00
vabene1111
1002f0d61f Merge branch 'develop' into beta 2024-02-28 17:12:35 +01:00
vabene1111
20cb218688 Merge branch 'develop' into beta 2024-02-26 16:29:16 +01:00
vabene1111
bba44b0c1e Merge branch 'develop' into beta 2024-02-20 07:54:28 +01:00
346 changed files with 41203 additions and 53372 deletions

View File

@@ -1,7 +1,12 @@
FROM python:3.13-alpine3.22
FROM python:3.10-alpine3.18
#Install all dependencies.
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git yarn libgcc libstdc++ nginx tini envsubst nodejs npm
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git yarn
# Fix libxml error from xmlsec https://github.com/xmlsec/python-xmlsec/issues/257#issuecomment-1738620862
RUN echo "https://dl-cdn.alpinelinux.org/alpine/v3.15/community/" | tee -a /etc/apk/repositories
RUN echo "https://dl-cdn.alpinelinux.org/alpine/v3.15/main" | tee -a /etc/apk/repositories
RUN apk add --no-cache libxml2-dev=2.9.14-r2 xmlsec-dev=1.2.33-r0
#Print all logs without buffering it.
ENV PYTHONUNBUFFERED 1
@@ -19,10 +24,8 @@ RUN \
if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl rust && \
python -m pip install --upgrade pip && \
pip debug -v && \
pip install wheel==0.45.1 && \
pip install setuptools_rust==1.10.2 && \
pip install -r /tmp/pip-tmp/requirements.txt --no-cache-dir &&\
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt && \
rm -rf /tmp/pip-tmp && \
apk --purge del .build-deps

View File

@@ -21,7 +21,7 @@ jobs:
suffix: ""
continue-on-error: false
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- name: Get version number
id: get_version
@@ -74,8 +74,9 @@ jobs:
flavor: |
latest=false
suffix=${{ matrix.suffix }}
# disable latest for tagged releases while in beta
# type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
tags: |
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
@@ -93,34 +94,34 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
notify-stable:
name: Notify Stable
runs-on: ubuntu-latest
needs: build-container
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Set tag name
run: |
# Strip "refs/tags/" prefix
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
# Send stable discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
uses: Ilshidur/action-discord@0.4.0
with:
args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
# notify-stable:
# name: Notify Stable
# runs-on: ubuntu-latest
# needs: build-container
# if: startsWith(github.ref, 'refs/tags/')
# steps:
# - name: Set tag name
# run: |
# # Strip "refs/tags/" prefix
# echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
# # Send stable discord notification
# - name: Discord notification
# env:
# DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
# uses: Ilshidur/action-discord@0.3.2
# with:
# args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
notify-beta:
name: Notify Beta
runs-on: ubuntu-latest
needs: build-container
if: github.ref == 'refs/heads/beta'
if: startsWith(github.ref, 'refs/tags/')
steps:
# Send beta discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
uses: Ilshidur/action-discord@0.4.0
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 The Tandoor 2 Image has been updated! 🥳'

View File

@@ -12,8 +12,8 @@ jobs:
python-version: ["3.12"]
node-version: ["22"]
steps:
- uses: actions/checkout@v5
- uses: awalsh128/cache-apt-pkgs-action@v1.5.3
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@v1.4.3
with:
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libxslt-dev libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
version: 1.0

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.

View File

@@ -12,7 +12,7 @@ jobs:
if: github.repository_owner == 'TandoorRecipes' && ${{ github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x

1
.gitignore vendored
View File

@@ -91,4 +91,3 @@ cookbook/static/vue3
vue3/node_modules
cookbook/tests/other/docs/reports/tests/tests.html
cookbook/tests/other/docs/reports/tests/pytest.xml
vue3/src/plugins

62
.vscode/tasks.json vendored
View File

@@ -14,16 +14,28 @@
},
{
"label": "Setup Dev Server",
"dependsOn": ["Run Migrations"]
"dependsOn": ["Run Migrations", "Yarn Build"]
},
{
"label": "Run Dev Server",
"type": "shell",
"type": "shell",
"dependsOn": ["Setup Dev Server"],
"command": "DEBUG=1 python3 manage.py runserver"
"command": "python3 manage.py runserver"
},
{
"label": "Yarn Install",
"dependsOn": ["Yarn Install - Vue", "Yarn Install - Vue3"]
},
{
"label": "Yarn Install - Vue",
"type": "shell",
"command": "yarn install --force",
"options": {
"cwd": "${workspaceFolder}/vue"
}
},
{
"label": "Yarn Install - Vue3",
"type": "shell",
"command": "yarn install --force",
"options": {
@@ -32,6 +44,18 @@
},
{
"label": "Generate API",
"dependsOn": ["Generate API - Vue", "Generate API - Vue3"]
},
{
"label": "Generate API - Vue",
"type": "shell",
"command": "openapi-generator-cli generate -g typescript-axios -i http://127.0.0.1:8000/openapi/",
"options": {
"cwd": "${workspaceFolder}/vue/src/utils/openapi"
}
},
{
"label": "Generate API - Vue3",
"type": "shell",
"command": "openapi-generator-cli generate -g typescript-fetch -i http://127.0.0.1:8000/openapi/",
"options": {
@@ -39,19 +63,43 @@
}
},
{
"label": "Yarn Dev",
"label": "Yarn Serve",
"type": "shell",
"command": "yarn dev",
"dependsOn": ["Yarn Install"],
"command": "yarn serve",
"dependsOn": ["Yarn Install - Vue"],
"options": {
"cwd": "${workspaceFolder}/vue"
}
},
{
"label": "Vite Serve",
"type": "shell",
"command": "vite",
"dependsOn": ["Yarn Install - Vue3"],
"options": {
"cwd": "${workspaceFolder}/vue3"
}
},
{
"label": "Yarn Build",
"dependsOn": ["Yarn Build - Vue", "Vite Build - Vue3"],
"group": "build"
},
{
"label": "Yarn Build - Vue",
"type": "shell",
"command": "yarn build",
"dependsOn": ["Yarn Install"],
"dependsOn": ["Yarn Install - Vue"],
"options": {
"cwd": "${workspaceFolder}/vue"
},
"group": "build"
},
{
"label": "Vite Build - Vue3",
"type": "shell",
"command": "vite build",
"dependsOn": ["Yarn Install - Vue3"],
"options": {
"cwd": "${workspaceFolder}/vue3"
},

View File

@@ -1,11 +1,12 @@
FROM python:3.13-alpine3.22
FROM python:3.13-alpine3.21
#Install all dependencies.
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git libgcc libstdc++ nginx tini envsubst nodejs npm
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git libgcc libstdc++ nginx
#Print all logs without buffering it.
ENV PYTHONUNBUFFERED=1 \
DOCKER=true
ENV PYTHONUNBUFFERED 1
ENV DOCKER true
#This port will be used by gunicorn.
EXPOSE 80 8080
@@ -16,14 +17,23 @@ WORKDIR /opt/recipes
COPY requirements.txt ./
RUN \
if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
# remove Development dependencies from requirements.txt
RUN sed -i '/# Development/,$d' requirements.txt
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl rust && \
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl && \
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
python -m venv venv && \
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
venv/bin/pip debug -v && \
venv/bin/pip install wheel==0.45.1 && \
venv/bin/pip install setuptools_rust==1.10.2 && \
if [ `apk --print-arch` = "aarch64" ]; then \
curl https://sh.rustup.rs -sSf | sh -s -- -y; \
fi &&\
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
apk --purge del .build-deps
@@ -31,11 +41,8 @@ RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-de
COPY . ./
# delete default nginx config and link it to tandoors config
# create symlinks to access and error log to show them on stdout
RUN rm -rf /etc/nginx/http.d && \
ln -s /opt/recipes/http.d /etc/nginx/http.d && \
ln -sf /dev/stdout /var/log/nginx/access.log && \
ln -sf /dev/stderr /var/log/nginx/error.log
RUN rm -rf /etc/nginx/http.d
RUN ln -s /opt/recipes/http.d /etc/nginx/http.d
# commented for now https://github.com/TandoorRecipes/recipes/issues/3478
#HEALTHCHECK --interval=30s \
@@ -50,4 +57,4 @@ RUN /opt/recipes/venv/bin/python version.py
RUN find . -type d -name ".git" | xargs rm -rf
RUN chmod +x boot.sh
ENTRYPOINT ["/sbin/tini", "--", "/opt/recipes/boot.sh"]
ENTRYPOINT ["/opt/recipes/boot.sh"]

View File

@@ -15,26 +15,23 @@
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer"><img src="https://badgen.net/badge/icon/discord?icon=discord&label" ></a>
<a href="https://hub.docker.com/r/vabene1111/recipes" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/docker/pulls/vabene1111/recipes" ></a>
<a href="https://github.com/vabene1111/recipes/releases/latest" rel="noopener noreferrer"><img src="https://img.shields.io/github/v/release/vabene1111/recipes" ></a>
<a href="https://app.tandoor.dev/e/demo-auto-login/" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
<a href="https://app.tandoor.dev/accounts/login/?demo" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
</p>
<p align="center">
<a href="https://tandoor.dev" target="_blank" rel="noopener noreferrer">Website</a> •
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Docs</a> •
<a href="https://app.tandoor.dev/e/demo-auto-login/" target="_blank" rel="noopener noreferrer">Demo</a> •
<a href="https://community.tandoor.dev" target="_blank" rel="noopener noreferrer">Community</a> •
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord</a>
</p>
![Preview](docs/preview.png)
## Core Features
- 🥗 **Manage your recipes** - Manage your ever growing recipe collection
- 📆 **Plan** - multiple meals for each day
- 🛒 **Shopping lists** - via the meal plan or straight from recipes
- 🪄 **use AI** to recognize images, sort recipe steps, find nutrition facts and more
- 📚 **Cookbooks** - collect recipes into books
- 👪 **Share and collaborate** on recipes with friends and family
@@ -64,13 +61,12 @@ a public page.
Documentation can be found [here](https://docs.tandoor.dev/).
## ❤️ Support our work ❤️
## Support our work
Tandoor is developed by volunteers in their free time just because its fun. That said earning
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
Because of that there are several ways you can support us
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
- **Patron** You can sponsor contributors of this project on Patron: [vabene111](https://www.patreon.com/cw/vabene1111)
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
@@ -85,13 +81,13 @@ Share some information on how you use Tandoor to help me improve the application
<table>
<tr>
<td><a href="https://community.tandoor.dev">Community</a></td>
<td>Get support, share best practices, discuss feature ideas, and meet other Tandoor users.</td>
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
</tr>
<tr>
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
<td><a href="https://twitter.com/TandoorRecipes">Twitter</a></td>
<td>You can follow our Twitter account to get updates on new features or releases</td>
</tr>
</table>

37
boot.sh Executable file → Normal file
View File

@@ -1,29 +1,16 @@
#!/bin/sh
source venv/bin/activate
# these are envsubst in the nginx config, make sure they default to something sensible when unset
export TANDOOR_PORT="${TANDOOR_PORT:-80}"
export MEDIA_ROOT=${MEDIA_ROOT:-/opt/recipes/mediafiles};
export STATIC_ROOT=${STATIC_ROOT:-/opt/recipes/staticfiles};
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
GUNICORN_LOG_LEVEL="${GUNICORN_LOG_LEVEL:-'info'}"
PLUGINS_BUILD="${PLUGINS_BUILD:-0}"
display_warning() {
echo "[WARNING]"
echo -e "$1"
}
# prepare nginx config
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
# start nginx early to display error pages
echo "Starting nginx"
nginx
echo "Checking configuration..."
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
@@ -86,23 +73,27 @@ echo "Database is ready"
echo "Migrating database"
python manage.py migrate
if [ "${PLUGINS_BUILD}" -eq 1 ]; then
echo "Running yarn build at startup because PLUGINS_BUILD is enabled"
python plugin.py
fi
python manage.py migrate
echo "Collecting static files, this may take a while..."
python manage.py collectstatic --noinput --clear
python manage.py collectstatic --noinput
echo "Done"
chmod -R 755 ${MEDIA_ROOT:-/opt/recipes/mediafiles}
chmod -R 755 /opt/recipes/mediafiles
ipv6_disable=$(cat /sys/module/ipv6/parameters/disable)
echo "Starting gunicorn"
exec gunicorn --bind unix:/run/tandoor.sock --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
# start nginx
echo "Starting nginx"
nginx
echo "Starting gunicorn"
# Check if IPv6 is enabled, only then run gunicorn with ipv6 support
if [ "$ipv6_disable" -eq 0 ]; then
exec gunicorn -b "[::]:$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
else
exec gunicorn -b ":$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
fi

View File

@@ -17,7 +17,7 @@ from .models import (BookmarkletImport, Comment, CookLog, CustomFilter, Food, Im
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
ViewLog, ConnectorConfig, AiProvider, AiLog)
ViewLog, ConnectorConfig)
admin.site.login = secure_admin_login(admin.site.login)
@@ -90,20 +90,6 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
admin.site.register(SearchPreference, SearchPreferenceAdmin)
class AiProviderAdmin(admin.ModelAdmin):
list_display = ('name', 'space', 'model_name',)
search_fields = ('name', 'space', 'model_name',)
admin.site.register(AiProvider, AiProviderAdmin)
class AiLogAdmin(admin.ModelAdmin):
list_display = ('ai_provider', 'function', 'credit_cost', 'created_by', 'created_at',)
admin.site.register(AiLog, AiLogAdmin)
class StorageAdmin(admin.ModelAdmin):
list_display = ('name', 'method')
search_fields = ('name',)

View File

@@ -26,7 +26,6 @@ class ImportExportBase(forms.Form):
PAPRIKA = 'PAPRIKA'
NEXTCLOUD = 'NEXTCLOUD'
MEALIE = 'MEALIE'
MEALIE1 = 'MEALIE1'
CHOWDOWN = 'CHOWDOWN'
SAFFRON = 'SAFFRON'
CHEFTAP = 'CHEFTAP'
@@ -47,7 +46,7 @@ class ImportExportBase(forms.Form):
PDF = 'PDF'
GOURMET = 'GOURMET'
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (MEALIE1, 'Mealie1'), (CHOWDOWN, 'Chowdown'),
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'),
(SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'), (PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'),
(DOMESTICA, 'Domestica'), (MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
@@ -76,11 +75,6 @@ class ImportForm(ImportExportBase):
files = MultipleFileField(required=True)
duplicates = forms.BooleanField(help_text=_('To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
required=False)
meal_plans = forms.BooleanField(required=False)
shopping_lists = forms.BooleanField(required=False)
nutrition_per_serving = forms.BooleanField(required=False) # some managers (e.g. mealie) do not specify what the nutrition's relate to so we let the user choose
class ExportForm(ImportExportBase):
recipes = forms.ModelMultipleChoiceField(widget=MultiSelectWidget, queryset=Recipe.objects.none(), required=False)
all = forms.BooleanField(required=False)

View File

@@ -1,85 +0,0 @@
from decimal import Decimal
from django.utils import timezone
from django.db.models import Sum
from litellm import CustomLogger
from cookbook.models import AiLog
from recipes import settings
def get_monthly_token_usage(space):
"""
returns the number of credits the space has used in the current month
"""
token_usage = AiLog.objects.filter(space=space, credits_from_balance=False, created_at__month=timezone.now().month).aggregate(Sum('credit_cost'))['credit_cost__sum']
if token_usage is None:
token_usage = 0
return token_usage
def has_monthly_token(space):
"""
checks if the monthly credit limit has been exceeded
"""
return get_monthly_token_usage(space) < space.ai_credits_monthly
def can_perform_ai_request(space):
return (has_monthly_token(space) or space.ai_credits_balance > 0) and space.ai_enabled
class AiCallbackHandler(CustomLogger):
space = None
user = None
ai_provider = None
function = None
def __init__(self, space, user, ai_provider, function):
super().__init__()
self.space = space
self.user = user
self.ai_provider = ai_provider
self.function = function
def log_pre_api_call(self, model, messages, kwargs):
pass
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
pass
def log_success_event(self, kwargs, response_obj, start_time, end_time):
self.create_ai_log(kwargs, response_obj, start_time, end_time)
def log_failure_event(self, kwargs, response_obj, start_time, end_time):
self.create_ai_log(kwargs, response_obj, start_time, end_time)
def create_ai_log(self, kwargs, response_obj, start_time, end_time):
credit_cost = 0
credits_from_balance = False
if self.ai_provider.log_credit_cost:
credit_cost = kwargs.get("response_cost", 0) * 100
if (not has_monthly_token(self.space)) and self.space.ai_credits_balance > 0:
remaining_balance = self.space.ai_credits_balance - Decimal(str(credit_cost))
if remaining_balance < 0:
remaining_balance = 0
if settings.HOSTED and self.space.ai_credits_monthly == 0:
self.space.ai_enabled = False
self.space.ai_credits_balance = remaining_balance
credits_from_balance = True
self.space.save()
AiLog.objects.create(
created_by=self.user,
space=self.space,
ai_provider=self.ai_provider,
start_time=start_time,
end_time=end_time,
input_tokens=response_obj['usage']['prompt_tokens'],
output_tokens=response_obj['usage']['completion_tokens'],
function=self.function,
credit_cost=credit_cost,
credits_from_balance=credits_from_balance,
)

View File

@@ -1,22 +0,0 @@
def add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
"""
given a model, the base and related field and the base and related ids, bulk create relation objects
"""
relation_objects = []
for b in base_ids:
for r in related_ids:
relation_objects.append(relation_model(**{base_field_name: b, related_field_name: r}))
relation_model.objects.bulk_create(relation_objects, ignore_conflicts=True, unique_fields=(base_field_name, related_field_name,))
def remove_from_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids, f'{related_field_name}__in': related_ids}).delete()
def remove_all_from_relation(relation_model, base_field_name, base_ids):
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids}).delete()
def set_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
remove_all_from_relation(relation_model, base_field_name, base_ids)
add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids)

View File

@@ -37,7 +37,7 @@ def get_filetype(name):
def is_file_type_allowed(filename, image_only=False):
is_file_allowed = False
allowed_file_types = ['.pdf', '.docx', '.xlsx', '.css', '.mp4', '.mov']
allowed_file_types = ['.pdf', '.docx', '.xlsx', '.css']
allowed_image_types = ['.png', '.jpg', '.jpeg', '.gif', '.webp']
check_list = allowed_image_types
if not image_only:
@@ -77,8 +77,6 @@ def handle_image(request, image_object, filetype):
file_format = 'JPEG'
if filetype == '.png':
file_format = 'PNG'
if filetype == '.webp':
file_format = 'WEBP'
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
if filetype == '.jpeg' or filetype == '.jpg':

View File

@@ -176,6 +176,7 @@ class IngredientParser:
# if something like this is detected move it to the beginning so the parser can handle it
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
match = re.search(r'[1-9](\d)*\s*([^\W\d_])+', ingredient)
print(f'reording from {ingredient} to {ingredient[match.start():match.end()] + " " + ingredient.replace(ingredient[match.start():match.end()], "")}')
ingredient = ingredient[match.start():match.end()] + ' ' + ingredient.replace(ingredient[match.start():match.end()], '')
# if the string contains parenthesis early on remove it and place it at the end
@@ -283,4 +284,6 @@ class IngredientParser:
if len(food.strip()) == 0:
raise ValueError(f'Error parsing string {ingredient}, food cannot be empty')
print(f'parsed {ingredient} to {amount} - {unit} - {food} - {note}')
return amount, unit, food, note[:Ingredient._meta.get_field('note').max_length].strip()

View File

@@ -51,10 +51,10 @@ class OpenDataImporter:
for field in field_list:
if isinstance(getattr(obj, field), float) or isinstance(getattr(obj, field), Decimal):
if abs(float(getattr(obj, field)) - float(existing_obj[field])) > 0.001: # convert both to float and check if basically equal
#print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
elif getattr(obj, field) != existing_obj[field]:
#print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
return True
@@ -342,7 +342,7 @@ class OpenDataImporter:
'name': self.data[datatype][k]['name'],
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']] if self.data[datatype][k]['store_category'] in self.slug_id_cache['category'] else None,
'fdc_id': re.sub(r'\D', '', str(self.data[datatype][k]['fdc_id'])) if self.data[datatype][k]['fdc_id'] != '' else None,
'fdc_id': re.sub(r'\D', '', self.data[datatype][k]['fdc_id']) if self.data[datatype][k]['fdc_id'] != '' else None,
'open_data_slug': k,
'properties_food_unit_id': None,
'space_id': self.request.space.id,

View File

@@ -3,19 +3,17 @@ import inspect
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth.models import Group
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.utils.translation import gettext as _
from django_scopes import scopes_disabled
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
from oauth2_provider.models import AccessToken
from rest_framework import permissions
from rest_framework.permissions import SAFE_METHODS
import random
from cookbook.models import Recipe, ShareLink, UserSpace, Space
from cookbook.models import Recipe, ShareLink, UserSpace
def get_allowed_groups(groups_required):
@@ -333,25 +331,6 @@ class CustomRecipePermission(permissions.BasePermission):
or has_group_permission(request.user, ['user'])) and obj.space == request.space
class CustomAiProviderPermission(permissions.BasePermission):
"""
Custom permission class for the AiProvider api endpoint
users: can read all
admins: can read and write
superusers: can read and write + write providers without a space
"""
message = _('You do not have the required permissions to view this page!')
def has_permission(self, request, view): # user is either at least a user and the request is safe
return (has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS) or (has_group_permission(request.user, ['admin']) or request.user.is_superuser)
# editing of global providers allowed for superusers, space providers by admins and users can read only access
def has_object_permission(self, request, view, obj):
return ((obj.space is None and request.user.is_superuser)
or (obj.space == request.space and has_group_permission(request.user, ['admin']))
or (obj.space == request.space and has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS))
class CustomUserPermission(permissions.BasePermission):
"""
Custom permission class for user api endpoint
@@ -458,36 +437,3 @@ class IsReadOnlyDRF(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in SAFE_METHODS
class IsCreateDRF(permissions.BasePermission):
message = 'You cannot interact with this object, you can only create'
def has_permission(self, request, view):
return request.method == 'POST'
def create_space_for_user(user, name=None):
with scopes_disabled():
if not name:
name = f"{user.username}'s Space"
if Space.objects.filter(name=name).exists():
name = f'{name} #{random.randrange(1, 10 ** 5)}'
created_space = Space(name=name,
created_by=user,
max_file_storage_mb=settings.SPACE_DEFAULT_MAX_FILES,
max_recipes=settings.SPACE_DEFAULT_MAX_RECIPES,
max_users=settings.SPACE_DEFAULT_MAX_USERS,
allow_sharing=settings.SPACE_DEFAULT_ALLOW_SHARING,
ai_enabled=settings.SPACE_AI_ENABLED,
ai_credits_monthly=settings.SPACE_AI_CREDITS_MONTHLY,
space_setup_completed=False, )
created_space.save()
UserSpace.objects.filter(user=user).update(active=False)
user_space = UserSpace.objects.create(space=created_space, user=user, active=True)
user_space.groups.add(Group.objects.filter(name='admin').get())
return user_space

View File

@@ -288,7 +288,7 @@ class RecipeSearch():
def _updated_on_filter(self):
if self._updatedon:
self._queryset = self._queryset.filter(updated_at__date=self._updatedon)
self._queryset = self._queryset.filter(updated_at__date__date=self._updatedon)
elif self._updatedon_lte:
self._queryset = self._queryset.filter(updated_at__date__lte=self._updatedon_lte)
elif self._updatedon_gte:
@@ -326,7 +326,7 @@ class RecipeSearch():
def _favorite_recipes(self):
if self._sort_includes('favorite') or self._timescooked or self._timescooked_gte or self._timescooked_lte:
less_than = self._timescooked_lte and not self._sort_includes('-favorite')
if less_than or self._timescooked == 0:
if less_than:
default = 1000
else:
default = 0
@@ -339,7 +339,7 @@ class RecipeSearch():
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
if self._timescooked:
self._queryset = self._queryset.filter(favorite=self._timescooked)
self._queryset = self._queryset.filter(favorite=0)
elif self._timescooked_lte:
self._queryset = self._queryset.filter(favorite__lte=int(self._timescooked_lte)).exclude(favorite=0)
elif self._timescooked_gte:

View File

@@ -155,7 +155,7 @@ def get_from_scraper(scrape, request):
# assign steps
try:
for i in parse_instructions(scrape.instructions_list()):
for i in parse_instructions(scrape.instructions()):
recipe_json['steps'].append({
'instruction': i,
'ingredients': [],
@@ -177,11 +177,11 @@ def get_from_scraper(scrape, request):
for x in scrape.ingredients():
if x.strip() != '':
try:
amount, unit, food, note = ingredient_parser.parse(x)
amount, unit, ingredient, note = ingredient_parser.parse(x)
ingredient = {
'amount': amount,
'food': {
'name': food,
'name': ingredient,
},
'unit': None,
'note': note,
@@ -315,29 +315,14 @@ def clean_instruction_string(instruction):
# handle unsupported, special UTF8 character in Thermomix-specific instructions,
# that happen in nearly every recipe on Cookidoo, Zaubertopf Club, Rezeptwelt
# and in Thermomix-specific recipes on many other sites
normalized_string = normalized_string \
.replace(u"\uE003", _('reverse rotation')) \
.replace(u"\uE002", _('careful rotation')) \
.replace(u"\uE001", _('knead')) \
.replace(u"\uE031", _('thicken')) \
.replace(u"\uE019", _('warm up')) \
.replace(u"\uE02E", _('ferment')) \
.replace(u"\uE018", _('slow cook')) \
.replace(u"\uE033", _('egg boiler')) \
.replace(u"\uE016", _('kettle')) \
.replace(u"\uE01E", _('blend')) \
.replace(u"\uE011", _('pre-clean')) \
.replace(u"\uE026", _('high temperature')) \
.replace(u"\uE00D", _('rice cooker')) \
.replace(u"\uE00C", _('caramelize')) \
.replace(u"\uE038", _('peeler')) \
.replace(u"\uE037", _('slicer')) \
.replace(u"\uE036", _('grater')) \
.replace(u"\uE04C", _('spiralizer')) \
.replace(u"\uE02D", _("sous-vide"))
return normalized_string
return normalized_string \
.replace("", _('reverse rotation')) \
.replace("", _('careful rotation')) \
.replace("", _('knead')) \
.replace("Andicken ", _('thicken')) \
.replace("Erwärmen ", _('warm up')) \
.replace("Fermentieren ", _('ferment')) \
.replace("Sous-vide ", _("sous-vide"))
def parse_instructions(instructions):
@@ -418,8 +403,6 @@ def parse_servings_text(servings):
def parse_time(recipe_time):
if not recipe_time:
return 0
if type(recipe_time) not in [int, float]:
try:
recipe_time = float(re.search(r'\d+', recipe_time).group())

View File

@@ -1,15 +1,8 @@
from django.contrib.auth.models import Group
from django.http import HttpResponseRedirect
from django.urls import reverse
from django_scopes import scope, scopes_disabled
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
from psycopg2.errors import UniqueViolation
from rest_framework.exceptions import AuthenticationFailed
import random
from cookbook.helper.permission_helper import create_space_for_user
from cookbook.models import Space, UserSpace
from cookbook.views import views
from recipes import settings
@@ -41,28 +34,16 @@ class ScopeMiddleware:
if request.path.startswith(prefix + '/switch-space/'):
return self.get_response(request)
if request.path.startswith(prefix + '/invite/'):
return self.get_response(request)
with scopes_disabled():
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
return views.space_overview(request)
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
user_space = request.user.userspace_set.filter(active=True).first()
if not user_space and request.user.userspace_set.count() > 0:
# if the users has a userspace but nothing is active, activate the first one
user_space = request.user.userspace_set.first()
if user_space:
user_space.active = True
user_space.save()
if not user_space:
if 'signup_token' in request.session:
# if user is authenticated, has no space but a signup token (InviteLink) is present, redirect to invite link logic
return HttpResponseRedirect(reverse('view_invite', args=[request.session.pop('signup_token', '')]))
else:
# if user does not yet have a space create one for him
user_space = create_space_for_user(request.user)
return views.space_overview(request)
# TODO remove the need for this view
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
return views.no_groups(request)

View File

@@ -135,9 +135,8 @@ class UnitConversionHelper:
:param food: base food
:return: converted ingredient object from base amount/unit/food
"""
if (uc.food is None or uc.food == food) and uc.converted_amount > 0 and uc.base_amount > 0:
if uc.food is None or uc.food == food:
if unit == uc.base_unit:
return Ingredient(amount=amount * (uc.converted_amount / uc.base_amount), unit=uc.converted_unit, food=food, space=self.space)
else:
return Ingredient(amount=amount * (uc.base_amount / uc.converted_amount), unit=uc.base_unit, food=food, space=self.space)
return None

View File

@@ -26,12 +26,6 @@ class Integration:
files = None
export_type = None
ignored_recipes = []
import_log = None
import_duplicates = False
import_meal_plans = True
import_shopping_lists = True
nutrition_per_serving = False
def __init__(self, request, export_type):
"""
@@ -108,7 +102,7 @@ class Integration:
"""
return True
def do_import(self, files, il, import_duplicates, meal_plans=True, shopping_lists=True, nutrition_per_serving=False):
def do_import(self, files, il, import_duplicates):
"""
Imports given files
:param import_duplicates: if true duplicates are imported as well
@@ -117,12 +111,6 @@ class Integration:
:return: HttpResponseRedirect to the recipe search showing all imported recipes
"""
with scope(space=self.request.space):
self.import_log = il
self.import_duplicates = import_duplicates
self.import_meal_plans = meal_plans
self.import_shopping_lists = shopping_lists
self.nutrition_per_serving = nutrition_per_serving
try:
self.files = files
@@ -178,24 +166,20 @@ class Integration:
il.total_recipes = len(new_file_list)
file_list = new_file_list
if isinstance(self, cookbook.integration.mealie1.Mealie1):
# since the mealie 1.0 export is a backup and not a classic recipe export we treat it a bit differently
recipes = self.get_recipe_from_file(import_zip)
else:
for z in file_list:
try:
if not hasattr(z, 'filename') or isinstance(z, Tag):
recipe = self.get_recipe_from_file(z)
else:
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
recipe.keywords.add(self.keyword)
il.msg += self.get_recipe_processed_msg(recipe)
self.handle_duplicates(recipe, import_duplicates)
il.imported_recipes += 1
il.save()
except Exception as e:
traceback.print_exc()
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
for z in file_list:
try:
if not hasattr(z, 'filename') or isinstance(z, Tag):
recipe = self.get_recipe_from_file(z)
else:
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
recipe.keywords.add(self.keyword)
il.msg += self.get_recipe_processed_msg(recipe)
self.handle_duplicates(recipe, import_duplicates)
il.imported_recipes += 1
il.save()
except Exception as e:
traceback.print_exc()
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
import_zip.close()
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
data_list = self.split_recipe_file(f['file'])

View File

@@ -1,366 +0,0 @@
import json
import re
import traceback
import uuid
from decimal import Decimal
from io import BytesIO
from zipfile import ZipFile
from gettext import gettext as _
from django.db import transaction
from cookbook.helper import ingredient_parser
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step, Food, Unit, SupermarketCategory, PropertyType, Property, MealType, MealPlan, CookLog, ShoppingListEntry
class Mealie1(Integration):
"""
integration for mealie past version 1.0
"""
def get_recipe_from_file(self, file):
mealie_database = json.loads(BytesIO(file.read('database.json')).getvalue().decode("utf-8"))
self.import_log.total_recipes = len(mealie_database['recipes'])
self.import_log.msg += f"Importing {len(mealie_database["categories"]) + len(mealie_database["tags"])} tags and categories as keywords...\n"
self.import_log.save()
keywords_categories_dict = {}
for c in mealie_database['categories']:
if keyword := Keyword.objects.filter(name=c['name'], space=self.request.space).first():
keywords_categories_dict[c['id']] = keyword.pk
else:
keyword = Keyword.objects.create(name=c['name'], space=self.request.space)
keywords_categories_dict[c['id']] = keyword.pk
keywords_tags_dict = {}
for t in mealie_database['tags']:
if keyword := Keyword.objects.filter(name=t['name'], space=self.request.space).first():
keywords_tags_dict[t['id']] = keyword.pk
else:
keyword = Keyword.objects.create(name=t['name'], space=self.request.space)
keywords_tags_dict[t['id']] = keyword.pk
self.import_log.msg += f"Importing {len(mealie_database["multi_purpose_labels"])} multi purpose labels as supermarket categories...\n"
self.import_log.save()
supermarket_categories_dict = {}
for m in mealie_database['multi_purpose_labels']:
if supermarket_category := SupermarketCategory.objects.filter(name=m['name'], space=self.request.space).first():
supermarket_categories_dict[m['id']] = supermarket_category.pk
else:
supermarket_category = SupermarketCategory.objects.create(name=m['name'], space=self.request.space)
supermarket_categories_dict[m['id']] = supermarket_category.pk
self.import_log.msg += f"Importing {len(mealie_database["ingredient_foods"])} foods...\n"
self.import_log.save()
foods_dict = {}
for f in mealie_database['ingredient_foods']:
if food := Food.objects.filter(name=f['name'], space=self.request.space).first():
foods_dict[f['id']] = food.pk
else:
food = {'name': f['name'],
'plural_name': f['plural_name'],
'description': f['description'],
'space': self.request.space}
if f['label_id'] and f['label_id'] in supermarket_categories_dict:
food['supermarket_category_id'] = supermarket_categories_dict[f['label_id']]
food = Food.objects.create(**food)
if f['on_hand']:
food.onhand_users.add(self.request.user)
foods_dict[f['id']] = food.pk
self.import_log.msg += f"Importing {len(mealie_database["ingredient_units"])} units...\n"
self.import_log.save()
units_dict = {}
for u in mealie_database['ingredient_units']:
if unit := Unit.objects.filter(name=u['name'], space=self.request.space).first():
units_dict[u['id']] = unit.pk
else:
unit = Unit.objects.create(name=u['name'], plural_name=u['plural_name'], description=u['description'], space=self.request.space)
units_dict[u['id']] = unit.pk
recipes_dict = {}
recipe_property_factor_dict = {}
recipes = []
recipe_keyword_relation = []
for r in mealie_database['recipes']:
if Recipe.objects.filter(space=self.request.space, name=r['name']).exists() and not self.import_duplicates:
self.import_log.msg += f"Ignoring {r['name']} because a recipe with this name already exists.\n"
self.import_log.save()
else:
recipe = Recipe.objects.create(
waiting_time=parse_time(r['perform_time']),
working_time=parse_time(r['prep_time']),
description=r['description'][:512],
name=r['name'],
source_url=r['org_url'],
servings=r['recipe_servings'] if r['recipe_servings'] and r['recipe_servings'] != 0 else 1,
servings_text=r['recipe_yield'].strip() if r['recipe_yield'] else "",
internal=True,
created_at=r['created_at'],
space=self.request.space,
created_by=self.request.user,
)
if not self.nutrition_per_serving:
recipe_property_factor_dict[r['id']] = recipe.servings
self.import_log.msg += self.get_recipe_processed_msg(recipe)
self.import_log.imported_recipes += 1
self.import_log.save()
recipes.append(recipe)
recipes_dict[r['id']] = recipe.pk
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipe.pk, keyword_id=self.keyword.pk))
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
self.import_log.msg += f"Importing {len(mealie_database["recipe_instructions"])} instructions...\n"
self.import_log.save()
steps_relation = []
first_step_of_recipe_dict = {}
step_id_dict = {}
for s in mealie_database['recipe_instructions']:
if s['recipe_id'] in recipes_dict:
step = Step.objects.create(instruction=(s['text'] if s['text'] else "") + (f" \n {s['summary']}" if s['summary'] else ""),
order=s['position'],
name=s['title'],
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[s['recipe_id']], step_id=step.pk))
step_id_dict[s["id"]] = step.pk
if s['recipe_id'] not in first_step_of_recipe_dict:
first_step_of_recipe_dict[s['recipe_id']] = step.pk
# it is possible for a recipe to not have steps but have ingredients, in that case create an empty step to add them to later
for r in recipes_dict.keys():
if r not in first_step_of_recipe_dict:
step = Step.objects.create(instruction='',
order=0,
name='',
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[r], step_id=step.pk))
first_step_of_recipe_dict[r] = step.pk
for n in mealie_database['notes']:
if n['recipe_id'] in recipes_dict:
step = Step.objects.create(instruction=n['text'],
name=n['title'],
order=100,
space=self.request.space)
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[n['recipe_id']], step_id=step.pk))
Recipe.steps.through.objects.bulk_create(steps_relation)
ingredient_parser = IngredientParser(self.request, True)
self.import_log.msg += f"Importing {len(mealie_database["recipes_ingredients"])} ingredients...\n"
self.import_log.save()
# mealie stores the reference to a step (instruction) from an ingredient (reference) in the recipe_ingredient_ref_link table
recipe_ingredient_ref_link_dict = {}
for ref in mealie_database['recipe_ingredient_ref_link']:
recipe_ingredient_ref_link_dict[ref["reference_id"]] = ref["instruction_id"]
ingredients_relation = []
for i in mealie_database['recipes_ingredients']:
if i['recipe_id'] in recipes_dict:
if i['title']:
title_ingredient = Ingredient.objects.create(
note=i['title'],
is_header=True,
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=title_ingredient.pk))
if i['food_id']:
ingredient = Ingredient.objects.create(
food_id=foods_dict[i['food_id']] if i['food_id'] in foods_dict else None,
unit_id=units_dict[i['unit_id']] if i['unit_id'] in units_dict else None,
original_text=i['original_text'],
order=i['position'],
amount=i['quantity'] if i['quantity'] else 0,
note=i['note'],
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=ingredient.pk))
elif i['note'].strip():
amount, unit, food, note = ingredient_parser.parse(i['note'].strip())
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
ingredient = Ingredient.objects.create(
food=f,
unit=u,
amount=amount,
note=note,
original_text=i['original_text'],
space=self.request.space,
)
ingredients_relation.append(Step.ingredients.through(step_id=get_step_id(i, first_step_of_recipe_dict, step_id_dict,recipe_ingredient_ref_link_dict), ingredient_id=ingredient.pk))
Step.ingredients.through.objects.bulk_create(ingredients_relation)
self.import_log.msg += f"Importing {len(mealie_database["recipes_to_categories"]) + len(mealie_database["recipes_to_tags"])} category and keyword relations...\n"
self.import_log.save()
recipe_keyword_relation = []
for rC in mealie_database['recipes_to_categories']:
if rC['recipe_id'] in recipes_dict:
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rC['recipe_id']], keyword_id=keywords_categories_dict[rC['category_id']]))
for rT in mealie_database['recipes_to_tags']:
if rT['recipe_id'] in recipes_dict:
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rT['recipe_id']], keyword_id=keywords_tags_dict[rT['tag_id']]))
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
self.import_log.msg += f"Importing {len(mealie_database["recipe_nutrition"])} properties...\n"
self.import_log.save()
property_types_dict = {
'calories': PropertyType.objects.get_or_create(name=_('Calories'), space=self.request.space, defaults={'unit': 'kcal', 'fdc_id': 1008})[0],
'carbohydrate_content': PropertyType.objects.get_or_create(name=_('Carbohydrates'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1005})[0],
'cholesterol_content': PropertyType.objects.get_or_create(name=_('Cholesterol'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1253})[0],
'fat_content': PropertyType.objects.get_or_create(name=_('Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1004})[0],
'fiber_content': PropertyType.objects.get_or_create(name=_('Fiber'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1079})[0],
'protein_content': PropertyType.objects.get_or_create(name=_('Protein'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1003})[0],
'saturated_fat_content': PropertyType.objects.get_or_create(name=_('Saturated Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1258})[0],
'sodium_content': PropertyType.objects.get_or_create(name=_('Sodium'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1093})[0],
'sugar_content': PropertyType.objects.get_or_create(name=_('Sugar'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1063})[0],
'trans_fat_content': PropertyType.objects.get_or_create(name=_('Trans Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1257})[0],
'unsaturated_fat_content': PropertyType.objects.get_or_create(name=_('Unsaturated Fat'), space=self.request.space, defaults={'unit': 'g'})[0],
}
with transaction.atomic():
recipe_properties_relation = []
properties_relation = []
for r in mealie_database['recipe_nutrition']:
if r['recipe_id'] in recipes_dict:
for key in property_types_dict:
if r[key]:
properties_relation.append(
Property(property_type_id=property_types_dict[key].pk,
property_amount=Decimal(str(r[key])) / (
Decimal(str(recipe_property_factor_dict[r['recipe_id']])) if r['recipe_id'] in recipe_property_factor_dict else 1),
open_data_food_slug=r['recipe_id'],
space=self.request.space))
properties = Property.objects.bulk_create(properties_relation)
property_ids = []
for p in properties:
recipe_properties_relation.append(Recipe.properties.through(recipe_id=recipes_dict[p.open_data_food_slug], property_id=p.pk))
property_ids.append(p.pk)
Recipe.properties.through.objects.bulk_create(recipe_properties_relation, ignore_conflicts=True)
Property.objects.filter(id__in=property_ids).update(open_data_food_slug=None)
# delete unused property types
for pT in property_types_dict:
try:
property_types_dict[pT].delete()
except:
pass
self.import_log.msg += f"Importing {len(mealie_database["recipe_comments"]) + len(mealie_database["recipe_timeline_events"])} comments and cook logs...\n"
self.import_log.save()
cook_log_list = []
for c in mealie_database['recipe_comments']:
if c['recipe_id'] in recipes_dict:
cook_log_list.append(CookLog(
recipe_id=recipes_dict[c['recipe_id']],
comment=c['text'],
created_at=c['created_at'],
created_by=self.request.user,
space=self.request.space,
))
for c in mealie_database['recipe_timeline_events']:
if c['recipe_id'] in recipes_dict:
if c['event_type'] == 'comment':
cook_log_list.append(CookLog(
recipe_id=recipes_dict[c['recipe_id']],
comment=c['message'],
created_at=c['created_at'],
created_by=self.request.user,
space=self.request.space,
))
CookLog.objects.bulk_create(cook_log_list)
if self.import_meal_plans:
self.import_log.msg += f"Importing {len(mealie_database["group_meal_plans"])} meal plans...\n"
self.import_log.save()
meal_types_dict = {}
meal_plans = []
for m in mealie_database['group_meal_plans']:
if m['recipe_id'] in recipes_dict:
if not m['entry_type'] in meal_types_dict:
meal_type = MealType.objects.get_or_create(name=m['entry_type'], created_by=self.request.user, space=self.request.space)[0]
meal_types_dict[m['entry_type']] = meal_type.pk
meal_plans.append(MealPlan(
recipe_id=recipes_dict[m['recipe_id']] if m['recipe_id'] else None,
title=m['title'] if m['title'] else "",
note=m['text'] if m['text'] else "",
from_date=m['date'],
to_date=m['date'],
meal_type_id=meal_types_dict[m['entry_type']],
created_by=self.request.user,
space=self.request.space,
))
MealPlan.objects.bulk_create(meal_plans)
if self.import_shopping_lists:
self.import_log.msg += f"Importing {len(mealie_database["shopping_list_items"])} shopping list items...\n"
self.import_log.save()
shopping_list_items = []
for sli in mealie_database['shopping_list_items']:
if not sli['checked']:
if sli['food_id']:
shopping_list_items.append(ShoppingListEntry(
amount=sli['quantity'],
unit_id=units_dict[sli['unit_id']] if sli['unit_id'] else None,
food_id=foods_dict[sli['food_id']] if sli['food_id'] else None,
created_by=self.request.user,
space=self.request.space,
))
elif not sli['food_id'] and sli['note'].strip():
amount, unit, food, note = ingredient_parser.parse(sli['note'].strip())
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
shopping_list_items.append(ShoppingListEntry(
amount=amount,
unit=u,
food=f,
created_by=self.request.user,
space=self.request.space,
))
ShoppingListEntry.objects.bulk_create(shopping_list_items)
self.import_log.msg += f"Importing Images. This might take some time ...\n"
self.import_log.save()
for r in mealie_database['recipes']:
try:
if recipe := Recipe.objects.filter(pk=recipes_dict[r['id']]).first():
self.import_recipe_image(recipe, BytesIO(file.read(f'data/recipes/{str(uuid.UUID(str(r['id'])))}/images/original.webp')), filetype='.webp')
except Exception:
pass
return recipes
def get_file_from_recipe(self, recipe):
raise NotImplementedError('Method not implemented in storage integration')
def get_step_id(i, first_step_of_recipe_dict, step_id_dict, recipe_ingredient_ref_link_dict):
try:
return step_id_dict[recipe_ingredient_ref_link_dict[i['reference_id']]]
except KeyError:
return first_step_of_recipe_dict[i['recipe_id']]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,34 +0,0 @@
# Generated by Django 4.2.22 on 2025-08-31 09:11
from django.db import migrations
from django_scopes import scopes_disabled
def migrate_comments(apps, schema_editor):
with scopes_disabled():
Comment = apps.get_model('cookbook', 'Comment')
CookLog = apps.get_model('cookbook', 'CookLog')
cook_logs = []
for c in Comment.objects.all():
cook_logs.append(CookLog(
recipe=c.recipe,
created_by=c.created_by,
created_at=c.created_at,
comment=c.text,
space=c.recipe.space,
))
CookLog.objects.bulk_create(cook_logs, unique_fields=('recipe', 'comment', 'created_at', 'created_by'))
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0222_alter_shoppinglistrecipe_created_by_and_more'),
]
operations = [
migrations.RunPython(migrate_comments),
]

View File

@@ -1,60 +0,0 @@
# Generated by Django 4.2.22 on 2025-09-05 06:51
import cookbook.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cookbook', '0223_auto_20250831_1111'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_credits_balance',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='space',
name='ai_credits_monthly',
field=models.IntegerField(default=100),
),
migrations.CreateModel(
name='AiProvider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('description', models.TextField(blank=True)),
('api_key', models.CharField(max_length=2048)),
('model_name', models.CharField(max_length=256)),
('url', models.CharField(blank=True, max_length=2048, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('space', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
],
),
migrations.CreateModel(
name='AiLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('function', models.CharField(max_length=64)),
('credit_cost', models.DecimalField(decimal_places=4, max_digits=16)),
('credits_from_balance', models.BooleanField(default=False)),
('input_tokens', models.IntegerField(default=0)),
('output_tokens', models.IntegerField(default=0)),
('start_time', models.DateTimeField(null=True)),
('end_time', models.DateTimeField(null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('ai_provider', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cookbook.aiprovider')),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
],
bases=(models.Model, cookbook.models.PermissionModelMixin),
),
]

View File

@@ -1,18 +0,0 @@
# Generated by Django 4.2.22 on 2025-09-08 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0224_space_ai_credits_balance_space_ai_credits_monthly_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_enabled',
field=models.BooleanField(default=True),
),
]

View File

@@ -1,23 +0,0 @@
# Generated by Django 4.2.22 on 2025-09-08 20:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0225_space_ai_enabled'),
]
operations = [
migrations.AddField(
model_name='aiprovider',
name='log_credit_cost',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='space',
name='ai_credits_monthly',
field=models.IntegerField(default=10000),
),
]

View File

@@ -1,24 +0,0 @@
# Generated by Django 4.2.22 on 2025-09-09 11:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0226_aiprovider_log_credit_cost_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='ai_default_provider',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='space_ai_default_provider', to='cookbook.aiprovider'),
),
migrations.AlterField(
model_name='space',
name='ai_credits_balance',
field=models.DecimalField(decimal_places=4, default=0, max_digits=16),
),
]

View File

@@ -1,18 +0,0 @@
# Generated by Django 5.2.6 on 2025-09-10 20:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0001_squashed_0227_space_ai_default_provider_and_more'),
]
operations = [
migrations.AddField(
model_name='space',
name='space_setup_completed',
field=models.BooleanField(default=True),
),
]

View File

@@ -1,26 +0,0 @@
# Generated by Django 5.2.6 on 2025-09-24 17:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0228_space_space_setup_completed'),
]
operations = [
migrations.AlterModelOptions(
name='ailog',
options={'ordering': ('-created_at',)},
),
migrations.AlterModelOptions(
name='aiprovider',
options={'ordering': ('id',)},
),
migrations.AlterField(
model_name='storage',
name='token',
field=models.CharField(blank=True, max_length=4098, null=True),
),
]

View File

@@ -1,15 +0,0 @@
# Generated by Django 5.2.6 on 2025-09-25 18:56
from django.db import migrations
from django.contrib.postgres.operations import TrigramExtension, UnaccentExtension
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0229_alter_ailog_options_alter_aiprovider_options_and_more'),
]
operations = [
TrigramExtension(),
UnaccentExtension(),
]

View File

@@ -1,141 +0,0 @@
# Generated by Django 5.2.6 on 2025-09-30 18:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0230_auto_20250925_2056'),
]
operations = [
migrations.AlterModelOptions(
name='aiprovider',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='automation',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='bookmarkletimport',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='comment',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='connectorconfig',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='cooklog',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='customfilter',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='exportlog',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='food',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='importlog',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='invitelink',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='keyword',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='mealplan',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='mealtype',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='recipe',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='recipebook',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='recipeimport',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='sharelink',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='shoppinglistentry',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='shoppinglistrecipe',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='space',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='storage',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='supermarket',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='supermarketcategory',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='sync',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='synclog',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='telegrambot',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='unit',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='unitconversion',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='userfile',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='userspace',
options={'ordering': ('pk',)},
),
migrations.AlterModelOptions(
name='viewlog',
options={'ordering': ('pk',)},
),
]

View File

@@ -329,13 +329,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
demo = models.BooleanField(default=False)
food_inherit = models.ManyToManyField(FoodInheritField, blank=True)
space_setup_completed = models.BooleanField(default=True)
ai_enabled = models.BooleanField(default=True)
ai_credits_monthly = models.IntegerField(default=100)
ai_credits_balance = models.DecimalField(default=0, max_digits=16, decimal_places=4)
ai_default_provider = models.ForeignKey("AiProvider", on_delete=models.SET_NULL, null=True, blank=True, related_name='space_ai_default_provider')
internal_note = models.TextField(blank=True, null=True)
def safe_delete(self):
@@ -348,9 +341,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
BookmarkletImport.objects.filter(space=self).delete()
CustomFilter.objects.filter(space=self).delete()
AiLog.objects.filter(space=self).delete()
AiProvider.objects.filter(space=self).delete()
Property.objects.filter(space=self).delete()
PropertyType.objects.filter(space=self).delete()
@@ -402,59 +392,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
def __str__(self):
return self.name
class Meta:
ordering = ('pk',)
class AiProvider(models.Model):
name = models.CharField(max_length=128)
description = models.TextField(blank=True)
# AiProviders can be global, so space=null is allowed (configurable by superusers)
space = models.ForeignKey(Space, on_delete=models.CASCADE, null=True)
api_key = models.CharField(max_length=2048)
model_name = models.CharField(max_length=256)
url = models.CharField(max_length=2048, blank=True, null=True)
log_credit_cost = models.BooleanField(default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Meta:
ordering = ('pk',)
class AiLog(models.Model, PermissionModelMixin):
F_FILE_IMPORT = 'FILE_IMPORT'
F_STEP_SORT = 'STEP_SORT'
F_FOOD_PROPERTIES = 'FOOD_PROPERTIES'
F_RECIPE_PROPERTIES = 'RECIPE_PROPERTIES'
ai_provider = models.ForeignKey(AiProvider, on_delete=models.SET_NULL, null=True)
function = models.CharField(max_length=64)
credit_cost = models.DecimalField(max_digits=16, decimal_places=4)
# if credits from balance were used, else its from monthly quota
credits_from_balance = models.BooleanField(default=False)
input_tokens = models.IntegerField(default=0)
output_tokens = models.IntegerField(default=0)
start_time = models.DateTimeField(null=True)
end_time = models.DateTimeField(null=True)
space = models.ForeignKey(Space, on_delete=models.CASCADE)
created_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.function} {self.ai_provider.name} {self.created_at}"
class Meta:
ordering = ('-created_at',)
class ConnectorConfig(models.Model, PermissionModelMixin):
HOMEASSISTANT = 'HomeAssistant'
@@ -480,9 +417,6 @@ class ConnectorConfig(models.Model, PermissionModelMixin):
space = models.ForeignKey(Space, on_delete=models.CASCADE)
objects = ScopedManager(space='space')
class Meta:
ordering = ('pk',)
class UserPreference(models.Model, PermissionModelMixin):
# Themes
@@ -586,9 +520,6 @@ class UserSpace(models.Model, PermissionModelMixin):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('pk',)
class Storage(models.Model, PermissionModelMixin):
DROPBOX = 'DB'
@@ -602,7 +533,7 @@ class Storage(models.Model, PermissionModelMixin):
)
username = models.CharField(max_length=128, blank=True, null=True)
password = models.CharField(max_length=128, blank=True, null=True)
token = models.CharField(max_length=4098, blank=True, null=True)
token = models.CharField(max_length=512, blank=True, null=True)
url = models.URLField(blank=True, null=True)
path = models.CharField(blank=True, default='', max_length=256)
created_by = models.ForeignKey(User, on_delete=models.PROTECT)
@@ -613,9 +544,6 @@ class Storage(models.Model, PermissionModelMixin):
def __str__(self):
return self.name
class Meta:
ordering = ('pk',)
class Sync(models.Model, PermissionModelMixin):
storage = models.ForeignKey(Storage, on_delete=models.PROTECT)
@@ -631,9 +559,6 @@ class Sync(models.Model, PermissionModelMixin):
def __str__(self):
return self.path
class Meta:
ordering = ('pk',)
class SupermarketCategory(models.Model, PermissionModelMixin, MergeModelMixin):
name = models.CharField(max_length=128, validators=[MinLengthValidator(1)])
@@ -659,7 +584,6 @@ class SupermarketCategory(models.Model, PermissionModelMixin, MergeModelMixin):
models.UniqueConstraint(fields=['space', 'name'], name='smc_unique_name_per_space'),
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='supermarket_category_unique_open_data_slug_per_space')
]
ordering = ('name',)
class Supermarket(models.Model, PermissionModelMixin):
@@ -679,7 +603,6 @@ class Supermarket(models.Model, PermissionModelMixin):
models.UniqueConstraint(fields=['space', 'name'], name='sm_unique_name_per_space'),
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='supermarket_unique_open_data_slug_per_space')
]
ordering = ('name',)
class SupermarketCategoryRelation(models.Model, PermissionModelMixin):
@@ -711,9 +634,6 @@ class SyncLog(models.Model, PermissionModelMixin):
def __str__(self):
return f"{self.created_at}:{self.sync} - {self.status}"
class Meta:
ordering = ('pk',)
class Keyword(ExportModelOperationsMixin('keyword'), TreeModel, PermissionModelMixin):
if SORT_TREE_BY_NAME:
@@ -731,7 +651,6 @@ class Keyword(ExportModelOperationsMixin('keyword'), TreeModel, PermissionModelM
models.UniqueConstraint(fields=['space', 'name'], name='kw_unique_name_per_space')
]
indexes = (Index(fields=['id', 'name']),)
ordering = ('name',)
class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixin, MergeModelMixin):
@@ -763,7 +682,6 @@ class Unit(ExportModelOperationsMixin('unit'), models.Model, PermissionModelMixi
models.UniqueConstraint(fields=['space', 'name'], name='u_unique_name_per_space'),
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='unit_unique_open_data_slug_per_space')
]
ordering = ('name',)
class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
@@ -830,7 +748,14 @@ class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
self.delete()
return target
def delete(self):
if self.ingredient_set.all().exclude(step=None).count() > 0:
raise ProtectedError(self.name + _(" is part of a recipe step and cannot be deleted"), self.ingredient_set.all().exclude(step=None))
else:
return super().delete()
# MP_Tree move uses raw SQL to execute move, override behavior to force a save triggering post_save signal
def move(self, *args, **kwargs):
super().move(*args, **kwargs)
# treebeard bypasses ORM, need to explicity save to trigger post save signals retrieve the object again to avoid writing previous state back to disk
@@ -897,7 +822,6 @@ class Food(ExportModelOperationsMixin('food'), TreeModel, PermissionModelMixin):
Index(fields=['id']),
Index(fields=['name']),
)
ordering = ('name',)
class UnitConversion(ExportModelOperationsMixin('unit_conversion'), models.Model, PermissionModelMixin):
@@ -924,7 +848,6 @@ class UnitConversion(ExportModelOperationsMixin('unit_conversion'), models.Model
models.UniqueConstraint(fields=['space', 'base_unit', 'converted_unit', 'food'], name='f_unique_conversion_per_space'),
models.UniqueConstraint(fields=['space', 'open_data_slug'], name='unit_conversion_unique_open_data_slug_per_space')
]
ordering = ('pk',)
class Ingredient(ExportModelOperationsMixin('ingredient'), models.Model, PermissionModelMixin):
@@ -1129,14 +1052,13 @@ class Recipe(ExportModelOperationsMixin('recipe'), models.Model, PermissionModel
sub_food_recipes = Q(id__in=Food.objects.filter(ingredient__step__recipe__in=related_recipes).exclude(recipe=None).values_list('recipe'))
return Recipe.objects.filter(Q(id__in=related_recipes.values_list('id')) | sub_step_recipes | sub_food_recipes)
class Meta:
class Meta():
indexes = (
GinIndex(fields=["name_search_vector"]),
GinIndex(fields=["desc_search_vector"]),
Index(fields=['id']),
Index(fields=['name']),
)
ordering = ('name',)
class Comment(ExportModelOperationsMixin('comment'), models.Model, PermissionModelMixin):
@@ -1157,9 +1079,6 @@ class Comment(ExportModelOperationsMixin('comment'), models.Model, PermissionMod
def __str__(self):
return self.text
class Meta:
ordering = ('pk',)
class RecipeImport(models.Model, PermissionModelMixin):
@@ -1188,9 +1107,6 @@ class RecipeImport(models.Model, PermissionModelMixin):
self.delete()
return recipe
class Meta:
ordering = ('pk',)
class RecipeBook(ExportModelOperationsMixin('book'), models.Model, PermissionModelMixin):
name = models.CharField(max_length=128)
@@ -1208,7 +1124,6 @@ class RecipeBook(ExportModelOperationsMixin('book'), models.Model, PermissionMod
class Meta():
indexes = (Index(fields=['name']),)
ordering = ('name',)
class RecipeBookEntry(ExportModelOperationsMixin('book_entry'), models.Model, PermissionModelMixin):
@@ -1254,7 +1169,6 @@ class MealType(models.Model, PermissionModelMixin):
constraints = [
models.UniqueConstraint(fields=['space', 'name', 'created_by'], name='mt_unique_name_per_space'),
]
ordering = ('name',)
class MealPlan(ExportModelOperationsMixin('meal_plan'), models.Model, PermissionModelMixin):
@@ -1282,9 +1196,6 @@ class MealPlan(ExportModelOperationsMixin('meal_plan'), models.Model, Permission
def __str__(self):
return f'{self.get_label()} - {self.from_date} - {self.meal_type.name}'
class Meta:
ordering = ('pk',)
class ShoppingListRecipe(ExportModelOperationsMixin('shopping_list_recipe'), models.Model, PermissionModelMixin):
name = models.CharField(max_length=32, blank=True, default='')
@@ -1300,9 +1211,6 @@ class ShoppingListRecipe(ExportModelOperationsMixin('shopping_list_recipe'), mod
def __str__(self):
return f'Shopping list recipe {self.id} - {self.recipe}'
class Meta:
ordering = ('pk',)
class ShoppingListEntry(ExportModelOperationsMixin('shopping_list_entry'), models.Model, PermissionModelMixin):
list_recipe = models.ForeignKey(ShoppingListRecipe, on_delete=models.CASCADE, null=True, blank=True, related_name='entries')
@@ -1334,9 +1242,6 @@ class ShoppingListEntry(ExportModelOperationsMixin('shopping_list_entry'), model
except AttributeError:
return None
class Meta:
ordering = ('pk',)
class ShareLink(ExportModelOperationsMixin('share_link'), models.Model, PermissionModelMixin):
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
@@ -1352,9 +1257,6 @@ class ShareLink(ExportModelOperationsMixin('share_link'), models.Model, Permissi
def __str__(self):
return f'{self.recipe} - {self.uuid}'
class Meta:
ordering = ('pk',)
def default_valid_until():
return date.today() + timedelta(days=14)
@@ -1378,9 +1280,6 @@ class InviteLink(ExportModelOperationsMixin('invite_link'), models.Model, Permis
def __str__(self):
return f'{self.uuid}'
class Meta:
ordering = ('pk',)
class TelegramBot(models.Model, PermissionModelMixin):
token = models.CharField(max_length=256)
@@ -1395,9 +1294,6 @@ class TelegramBot(models.Model, PermissionModelMixin):
def __str__(self):
return f"{self.name}"
class Meta:
ordering = ('pk',)
class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionModelMixin):
recipe = models.ForeignKey(Recipe, on_delete=models.CASCADE)
@@ -1415,7 +1311,7 @@ class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionMo
def __str__(self):
return self.recipe.name
class Meta:
class Meta():
indexes = (
Index(fields=['id']),
Index(fields=['recipe']),
@@ -1424,7 +1320,6 @@ class CookLog(ExportModelOperationsMixin('cook_log'), models.Model, PermissionMo
Index(fields=['created_by']),
Index(fields=['created_by', 'rating']),
)
ordering = ('pk',)
class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionModelMixin):
@@ -1438,14 +1333,13 @@ class ViewLog(ExportModelOperationsMixin('view_log'), models.Model, PermissionMo
def __str__(self):
return self.recipe.name
class Meta:
class Meta():
indexes = (
Index(fields=['recipe']),
Index(fields=['-created_at']),
Index(fields=['created_by']),
Index(fields=['recipe', '-created_at', 'created_by']),
)
ordering = ('pk',)
class ImportLog(models.Model, PermissionModelMixin):
@@ -1466,9 +1360,6 @@ class ImportLog(models.Model, PermissionModelMixin):
def __str__(self):
return f"{self.created_at}:{self.type}"
class Meta:
ordering = ('pk',)
class ExportLog(models.Model, PermissionModelMixin):
type = models.CharField(max_length=32)
@@ -1489,9 +1380,6 @@ class ExportLog(models.Model, PermissionModelMixin):
def __str__(self):
return f"{self.created_at}:{self.type}"
class Meta:
ordering = ('pk',)
class BookmarkletImport(ExportModelOperationsMixin('bookmarklet_import'), models.Model, PermissionModelMixin):
html = models.TextField()
@@ -1502,9 +1390,6 @@ class BookmarkletImport(ExportModelOperationsMixin('bookmarklet_import'), models
objects = ScopedManager(space='space')
space = models.ForeignKey(Space, on_delete=models.CASCADE)
class Meta:
ordering = ('pk',)
# field names used to configure search behavior - all data populated during data migration
# other option is to use a MultiSelectField from https://github.com/goinnn/django-multiselectfield
@@ -1572,9 +1457,6 @@ class UserFile(ExportModelOperationsMixin('user_files'), models.Model, Permissio
def __str__(self):
return f'{self.name} (#{self.id})'
class Meta:
ordering = ('pk',)
class Automation(ExportModelOperationsMixin('automations'), models.Model, PermissionModelMixin):
FOOD_ALIAS = 'FOOD_ALIAS'
@@ -1621,9 +1503,6 @@ class Automation(ExportModelOperationsMixin('automations'), models.Model, Permis
objects = ScopedManager(space='space')
space = models.ForeignKey(Space, on_delete=models.CASCADE)
class Meta:
ordering = ('pk',)
class CustomFilter(models.Model, PermissionModelMixin):
RECIPE = 'RECIPE'
@@ -1654,4 +1533,3 @@ class CustomFilter(models.Model, PermissionModelMixin):
constraints = [
models.UniqueConstraint(fields=['space', 'name'], name='cf_unique_name_per_space')
]
ordering = ('pk',)

View File

@@ -24,9 +24,8 @@ from rest_framework.fields import IntegerField
from cookbook.helper.CustomStorageClass import CachedS3Boto3Storage
from cookbook.helper.HelperFunctions import str2bool
from cookbook.helper.ai_helper import get_monthly_token_usage
from cookbook.helper.image_processing import is_file_type_allowed
from cookbook.helper.permission_helper import above_space_limit, create_space_for_user
from cookbook.helper.permission_helper import above_space_limit
from cookbook.helper.property_helper import FoodPropertyHelper
from cookbook.helper.shopping_helper import RecipeShoppingEditor
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
@@ -37,7 +36,7 @@ from cookbook.models import (Automation, BookmarkletImport, Comment, CookLog, Cu
ShareLink, ShoppingListEntry, ShoppingListRecipe, Space,
Step, Storage, Supermarket, SupermarketCategory,
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields, AiLog, AiProvider)
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields)
from cookbook.templatetags.custom_tags import markdown
from recipes.settings import AWS_ENABLED, MEDIA_URL, EMAIL_HOST
@@ -151,22 +150,19 @@ class CustomOnHandField(serializers.Field):
return instance
def to_representation(self, obj):
try:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
return obj.onhand_users.filter(id__in=shared_users).exists()
except AttributeError:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
return obj.onhand_users.filter(id__in=shared_users).exists()
def to_internal_value(self, data):
return data
@@ -329,62 +325,12 @@ class UserFileViewSerializer(serializers.ModelSerializer):
read_only_fields = ('id', 'file', 'file_download', 'file_size_kb', 'preview', 'created_by', 'created_at')
class AiProviderSerializer(serializers.ModelSerializer):
api_key = serializers.CharField(required=False, write_only=True)
def create(self, validated_data):
validated_data = self.handle_global_space_logic(validated_data)
return super().create(validated_data)
def update(self, instance, validated_data):
validated_data = self.handle_global_space_logic(validated_data, instance=instance)
return super().update(instance, validated_data)
def handle_global_space_logic(self, validated_data, instance=None):
"""
allow superusers to create AI providers without a space but make sure everyone else only uses their own space
"""
if self.context['request'].user.is_superuser:
if ('space' not in validated_data or not validated_data['space']):
validated_data['space'] = None
else:
validated_data['space'] = self.context['request'].space
else:
if instance:
validated_data['space'] = instance.space
else:
validated_data['space'] = self.context['request'].space
if 'log_credit_cost' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['log_credit_cost']
return validated_data
class Meta:
model = AiProvider
fields = ('id', 'name', 'description', 'api_key', 'model_name', 'url', 'log_credit_cost', 'space', 'created_at', 'updated_at')
read_only_fields = ('created_at', 'updated_at',)
class AiLogSerializer(serializers.ModelSerializer):
ai_provider = AiProviderSerializer(read_only=True)
class Meta:
model = AiLog
fields = ('id', 'ai_provider', 'function', 'credit_cost', 'credits_from_balance', 'input_tokens', 'output_tokens', 'start_time', 'end_time', 'created_by', 'created_at',
'updated_at')
read_only_fields = ('__all__',)
class SpaceSerializer(WritableNestedModelSerializer):
created_by = UserSerializer(read_only=True)
user_count = serializers.SerializerMethodField('get_user_count', read_only=True)
recipe_count = serializers.SerializerMethodField('get_recipe_count', read_only=True)
file_size_mb = serializers.SerializerMethodField('get_file_size_mb', read_only=True)
ai_monthly_credits_used = serializers.SerializerMethodField('get_ai_monthly_credits_used', read_only=True)
ai_default_provider = AiProviderSerializer(required=False, allow_null=True)
food_inherit = FoodInheritFieldSerializer(many=True, required=False)
user_count = serializers.SerializerMethodField('get_user_count')
recipe_count = serializers.SerializerMethodField('get_recipe_count')
file_size_mb = serializers.SerializerMethodField('get_file_size_mb')
food_inherit = FoodInheritFieldSerializer(many=True)
image = UserFileViewSerializer(required=False, many=False, allow_null=True)
nav_logo = UserFileViewSerializer(required=False, many=False, allow_null=True)
custom_space_theme = UserFileViewSerializer(required=False, many=False, allow_null=True)
@@ -404,10 +350,6 @@ class SpaceSerializer(WritableNestedModelSerializer):
def get_recipe_count(self, obj):
return Recipe.objects.filter(space=obj).count()
@extend_schema_field(int)
def get_ai_monthly_credits_used(self, obj):
return get_monthly_token_usage(obj)
@extend_schema_field(float)
def get_file_size_mb(self, obj):
try:
@@ -416,36 +358,7 @@ class SpaceSerializer(WritableNestedModelSerializer):
return 0
def create(self, validated_data):
if Space.objects.filter(created_by=self.context['request'].user).count() >= self.context['request'].user.userpreference.max_owned_spaces:
raise serializers.ValidationError(
_('You have the reached the maximum amount of spaces that can be owned by you.') + f' ({self.context['request'].user.userpreference.max_owned_spaces})')
name = None
if 'name' in validated_data:
name = validated_data['name']
user_space = create_space_for_user(self.context['request'].user, name)
return user_space.space
def update(self, instance, validated_data):
validated_data = self.filter_superuser_parameters(validated_data)
if 'name' in validated_data:
if Space.objects.filter(Q(name=validated_data['name']), ~Q(pk=instance.pk)).exists():
raise ValidationError(_('Space Name must be unique.'))
return super().update(instance, validated_data)
def filter_superuser_parameters(self, validated_data):
if 'ai_enabled' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_enabled']
if 'ai_credits_monthly' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_credits_monthly']
if 'ai_credits_balance' in validated_data and not self.context['request'].user.is_superuser:
del validated_data['ai_credits_balance']
return validated_data
raise ValidationError('Cannot create using this endpoint')
class Meta:
model = Space
@@ -453,11 +366,10 @@ class SpaceSerializer(WritableNestedModelSerializer):
'id', 'name', 'created_by', 'created_at', 'message', 'max_recipes', 'max_file_storage_mb', 'max_users',
'allow_sharing', 'demo', 'food_inherit', 'user_count', 'recipe_count', 'file_size_mb',
'image', 'nav_logo', 'space_theme', 'custom_space_theme', 'nav_bg_color', 'nav_text_color',
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg', 'ai_credits_monthly',
'ai_credits_balance', 'ai_monthly_credits_used', 'ai_enabled', 'ai_default_provider', 'space_setup_completed')
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg',)
read_only_fields = (
'id', 'created_by', 'created_at', 'max_recipes', 'max_file_storage_mb', 'max_users', 'allow_sharing',
'demo', 'ai_monthly_credits_used')
'demo',)
class UserSpaceSerializer(WritableNestedModelSerializer):
@@ -680,7 +592,7 @@ class KeywordSerializer(UniqueFieldsMixin, ExtendedRecipeMixin):
fields = (
'id', 'name', 'label', 'description', 'image', 'parent', 'numchild', 'numrecipe', 'created_at',
'updated_at', 'full_name')
read_only_fields = ('id', 'label', 'numchild', 'numrecipe', 'parent', 'image')
read_only_fields = ('id', 'label', 'numchild', 'parent', 'image')
class UnitSerializer(UniqueFieldsMixin, ExtendedRecipeMixin, OpenDataModelMixin):
@@ -846,31 +758,28 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
@extend_schema_field(bool)
def get_substitute_onhand(self, obj):
try:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
self.context['request'].user.id]
caches['default'].set(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
filter = Q(id__in=obj.substitute.all())
if obj.substitute_siblings:
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
if obj.substitute_children:
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
except AttributeError:
if not self.context["request"].user.is_authenticated:
return []
shared_users = []
if c := caches['default'].get(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
shared_users = c
else:
try:
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
self.context['request'].user.id]
caches['default'].set(
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
shared_users, timeout=5 * 60)
# TODO ugly hack that improves API performance significantly, should be done properly
except AttributeError: # Anonymous users (using share links) don't have shared users
pass
filter = Q(id__in=obj.substitute.all())
if obj.substitute_siblings:
filter |= Q(path__startswith=obj.path[:Food.steplen * (obj.depth - 1)], depth=obj.depth)
if obj.substitute_children:
filter |= Q(path__startswith=obj.path, depth__gt=obj.depth)
return Food.objects.filter(filter).filter(onhand_users__id__in=shared_users).exists()
def create(self, validated_data):
name = validated_data['name'].strip()
@@ -878,7 +787,7 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
if plural_name := validated_data.pop('plural_name', None):
plural_name = plural_name.strip()
if food := Food.objects.filter(Q(name__iexact=name) | Q(plural_name__iexact=name)).first():
if food := Food.objects.filter(Q(name=name) | Q(plural_name=name)).first():
return food
space = validated_data.pop('space', self.context['request'].space)
@@ -1129,7 +1038,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
fields = (
'id', 'name', 'description', 'image', 'keywords', 'working_time',
'waiting_time', 'created_by', 'created_at', 'updated_at',
'internal', 'private', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
)
# TODO having these readonly fields makes "RecipeOverview.ts" (API Client) not generate the RecipeOverviewToJSON second else block which leads to errors when using the api
# TODO find a solution (custom schema?) to have these fields readonly (to save performance) and generate a proper client (two serializers would probably do the trick)
@@ -1203,57 +1112,6 @@ class RecipeImportSerializer(SpacedModelSerializer):
fields = '__all__'
class RecipeBatchUpdateSerializer(serializers.Serializer):
recipes = serializers.ListField(child=serializers.IntegerField())
keywords_add = serializers.ListField(child=serializers.IntegerField())
keywords_remove = serializers.ListField(child=serializers.IntegerField())
keywords_set = serializers.ListField(child=serializers.IntegerField())
keywords_remove_all = serializers.BooleanField(default=False)
working_time = serializers.IntegerField(required=False, allow_null=True)
waiting_time = serializers.IntegerField(required=False, allow_null=True)
servings = serializers.IntegerField(required=False, allow_null=True)
servings_text = serializers.CharField(required=False, allow_null=True, allow_blank=True)
private = serializers.BooleanField(required=False, allow_null=True)
shared_add = serializers.ListField(child=serializers.IntegerField())
shared_remove = serializers.ListField(child=serializers.IntegerField())
shared_set = serializers.ListField(child=serializers.IntegerField())
shared_remove_all = serializers.BooleanField(default=False)
show_ingredient_overview = serializers.BooleanField(required=False, allow_null=True)
clear_description = serializers.BooleanField(required=False, allow_null=True)
class FoodBatchUpdateSerializer(serializers.Serializer):
foods = serializers.ListField(child=serializers.IntegerField())
category = serializers.IntegerField(required=False, allow_null=True)
substitute_add = serializers.ListField(child=serializers.IntegerField())
substitute_remove = serializers.ListField(child=serializers.IntegerField())
substitute_set = serializers.ListField(child=serializers.IntegerField())
substitute_remove_all = serializers.BooleanField(default=False)
inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
inherit_fields_remove_all = serializers.BooleanField(default=False)
child_inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
child_inherit_fields_remove_all = serializers.BooleanField(default=False)
substitute_children = serializers.BooleanField(required=False, allow_null=True)
substitute_siblings = serializers.BooleanField(required=False, allow_null=True)
ignore_shopping = serializers.BooleanField(required=False, allow_null=True)
on_hand = serializers.BooleanField(required=False, allow_null=True)
parent_remove = serializers.BooleanField(required=False, allow_null=True)
parent_set = serializers.IntegerField(required=False, allow_null=True)
class CustomFilterSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
shared = UserSerializer(many=True, required=False)
@@ -1365,8 +1223,8 @@ class MealPlanSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
class AutoMealPlanSerializer(serializers.Serializer):
start_date = serializers.DateTimeField()
end_date = serializers.DateTimeField()
start_date = serializers.DateField()
end_date = serializers.DateField()
meal_type_id = serializers.IntegerField()
keyword_ids = serializers.ListField()
servings = CustomDecimalField()
@@ -1622,7 +1480,7 @@ class InviteLinkSerializer(WritableNestedModelSerializer):
fields = (
'id', 'uuid', 'email', 'group', 'valid_until', 'used_by', 'reusable', 'internal_note', 'created_by',
'created_at',)
read_only_fields = ('id', 'uuid', 'used_by', 'created_by', 'created_at',)
read_only_fields = ('id', 'uuid', 'created_by', 'created_at',)
# CORS, REST and Scopes aren't currently working
@@ -1684,6 +1542,7 @@ class ServerSettingsSerializer(serializers.Serializer):
# TODO add all other relevant settings including path/url related ones?
shopping_min_autosync_interval = serializers.CharField()
enable_pdf_export = serializers.BooleanField()
enable_ai_import = serializers.BooleanField()
disable_external_connectors = serializers.BooleanField()
terms_url = serializers.CharField()
privacy_url = serializers.CharField()
@@ -1724,11 +1583,6 @@ class FdcQuerySerializer(serializers.Serializer):
foods = FdcQueryFoodsSerializer(many=True)
class GenericModelReferenceSerializer(serializers.Serializer):
id = serializers.IntegerField()
model = serializers.CharField()
name = serializers.CharField()
# Export/Import Serializers
class KeywordExportSerializer(KeywordSerializer):
@@ -1912,10 +1766,8 @@ class RecipeFromSourceResponseSerializer(serializers.Serializer):
class AiImportSerializer(serializers.Serializer):
ai_provider_id = serializers.IntegerField()
file = serializers.FileField(allow_null=True)
text = serializers.CharField(allow_null=True, allow_blank=True)
recipe_id = serializers.CharField(allow_null=True, allow_blank=True)
class ExportRequestSerializer(serializers.Serializer):

Some files were not shown because too many files have changed in this diff Show More