mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-25 03:13:13 -05:00
Compare commits
14 Commits
2.2.1
...
2.0.0-beta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0104b600cc | ||
|
|
7baad85112 | ||
|
|
4b0bfa9a85 | ||
|
|
5e7c75ef68 | ||
|
|
954a35bea2 | ||
|
|
88347d44c8 | ||
|
|
2c13e76fbb | ||
|
|
362f634828 | ||
|
|
2fb968cfd3 | ||
|
|
4d3dab6edd | ||
|
|
8f1b593ad1 | ||
|
|
1002f0d61f | ||
|
|
20cb218688 | ||
|
|
bba44b0c1e |
@@ -1,7 +1,7 @@
|
||||
FROM python:3.13-alpine3.22
|
||||
FROM python:3.10-alpine3.18
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git yarn libgcc libstdc++ nginx tini envsubst nodejs npm
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git yarn
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@@ -19,10 +19,8 @@ RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl rust && \
|
||||
python -m pip install --upgrade pip && \
|
||||
pip debug -v && \
|
||||
pip install wheel==0.45.1 && \
|
||||
pip install setuptools_rust==1.10.2 && \
|
||||
pip install -r /tmp/pip-tmp/requirements.txt --no-cache-dir &&\
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt && \
|
||||
rm -rf /tmp/pip-tmp && \
|
||||
apk --purge del .build-deps
|
||||
@@ -29,4 +29,3 @@ vue/babel.config*
|
||||
vue/package.json
|
||||
vue/tsconfig.json
|
||||
vue/src/utils/openapi
|
||||
venv
|
||||
43
.github/workflows/build-docker.yml
vendored
43
.github/workflows/build-docker.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
@@ -74,8 +74,9 @@ jobs:
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
# disable latest for tagged releases while in beta
|
||||
# type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
@@ -93,34 +94,34 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
notify-stable:
|
||||
name: Notify Stable
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- name: Set tag name
|
||||
run: |
|
||||
# Strip "refs/tags/" prefix
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
# Send stable discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.4.0
|
||||
with:
|
||||
args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
|
||||
# notify-stable:
|
||||
# name: Notify Stable
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: build-container
|
||||
# if: startsWith(github.ref, 'refs/tags/')
|
||||
# steps:
|
||||
# - name: Set tag name
|
||||
# run: |
|
||||
# # Strip "refs/tags/" prefix
|
||||
# echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
# # Send stable discord notification
|
||||
# - name: Discord notification
|
||||
# env:
|
||||
# DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
# uses: Ilshidur/action-discord@0.3.2
|
||||
# with:
|
||||
# args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
|
||||
|
||||
notify-beta:
|
||||
name: Notify Beta
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: github.ref == 'refs/heads/beta'
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
# Send beta discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.4.0
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The Tandoor 2 Image has been updated! 🥳'
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -12,8 +12,8 @@ jobs:
|
||||
python-version: ["3.12"]
|
||||
node-version: ["22"]
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libxslt-dev libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
|
||||
version: 1.0
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
./cookbook/static
|
||||
./staticfiles
|
||||
key: |
|
||||
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.node-version }}-collectstatic-${{ hashFiles('**/*.css', '**/*.js', 'vue3/src/*') }}
|
||||
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.node-version }}-collectstatic-${{ hashFiles('**/*.css', '**/*.js', 'vue/src/*') }}
|
||||
|
||||
# Build Vue frontend & Dependencies
|
||||
- name: Set up Node ${{ matrix.node-version }}
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.repository_owner == 'TandoorRecipes' && ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -89,6 +89,3 @@ venv/
|
||||
.idea/easy-i18n.xml
|
||||
cookbook/static/vue3
|
||||
vue3/node_modules
|
||||
cookbook/tests/other/docs/reports/tests/tests.html
|
||||
cookbook/tests/other/docs/reports/tests/pytest.xml
|
||||
vue3/src/plugins
|
||||
|
||||
62
.vscode/tasks.json
vendored
62
.vscode/tasks.json
vendored
@@ -14,16 +14,28 @@
|
||||
},
|
||||
{
|
||||
"label": "Setup Dev Server",
|
||||
"dependsOn": ["Run Migrations"]
|
||||
"dependsOn": ["Run Migrations", "Yarn Build"]
|
||||
},
|
||||
{
|
||||
"label": "Run Dev Server",
|
||||
"type": "shell",
|
||||
"type": "shell",
|
||||
"dependsOn": ["Setup Dev Server"],
|
||||
"command": "DEBUG=1 python3 manage.py runserver"
|
||||
"command": "python3 manage.py runserver"
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install",
|
||||
"dependsOn": ["Yarn Install - Vue", "Yarn Install - Vue3"]
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install - Vue",
|
||||
"type": "shell",
|
||||
"command": "yarn install --force",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install - Vue3",
|
||||
"type": "shell",
|
||||
"command": "yarn install --force",
|
||||
"options": {
|
||||
@@ -32,6 +44,18 @@
|
||||
},
|
||||
{
|
||||
"label": "Generate API",
|
||||
"dependsOn": ["Generate API - Vue", "Generate API - Vue3"]
|
||||
},
|
||||
{
|
||||
"label": "Generate API - Vue",
|
||||
"type": "shell",
|
||||
"command": "openapi-generator-cli generate -g typescript-axios -i http://127.0.0.1:8000/openapi/",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue/src/utils/openapi"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Generate API - Vue3",
|
||||
"type": "shell",
|
||||
"command": "openapi-generator-cli generate -g typescript-fetch -i http://127.0.0.1:8000/openapi/",
|
||||
"options": {
|
||||
@@ -39,19 +63,43 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Dev",
|
||||
"label": "Yarn Serve",
|
||||
"type": "shell",
|
||||
"command": "yarn dev",
|
||||
"dependsOn": ["Yarn Install"],
|
||||
"command": "yarn serve",
|
||||
"dependsOn": ["Yarn Install - Vue"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Vite Serve",
|
||||
"type": "shell",
|
||||
"command": "vite",
|
||||
"dependsOn": ["Yarn Install - Vue3"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Build",
|
||||
"dependsOn": ["Yarn Build - Vue", "Vite Build - Vue3"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Yarn Build - Vue",
|
||||
"type": "shell",
|
||||
"command": "yarn build",
|
||||
"dependsOn": ["Yarn Install"],
|
||||
"dependsOn": ["Yarn Install - Vue"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
},
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Vite Build - Vue3",
|
||||
"type": "shell",
|
||||
"command": "vite build",
|
||||
"dependsOn": ["Yarn Install - Vue3"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3"
|
||||
},
|
||||
|
||||
31
Dockerfile
31
Dockerfile
@@ -1,14 +1,15 @@
|
||||
FROM python:3.13-alpine3.22
|
||||
FROM python:3.13-alpine3.21
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git libgcc libstdc++ nginx tini envsubst nodejs npm
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git libgcc libstdc++
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
DOCKER=true
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
ENV DOCKER true
|
||||
|
||||
#This port will be used by gunicorn.
|
||||
EXPOSE 80 8080
|
||||
EXPOSE 8080
|
||||
|
||||
#Create app dir and install requirements.
|
||||
RUN mkdir /opt/recipes
|
||||
@@ -16,27 +17,29 @@ WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
|
||||
RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
|
||||
# remove Development dependencies from requirements.txt
|
||||
RUN sed -i '/# Development/,$d' requirements.txt
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl rust && \
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip debug -v && \
|
||||
venv/bin/pip install wheel==0.45.1 && \
|
||||
venv/bin/pip install setuptools_rust==1.10.2 && \
|
||||
if [ `apk --print-arch` = "aarch64" ]; then \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y; \
|
||||
fi &&\
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
|
||||
apk --purge del .build-deps
|
||||
|
||||
#Copy project and execute it.
|
||||
COPY . ./
|
||||
|
||||
# delete default nginx config and link it to tandoors config
|
||||
# create symlinks to access and error log to show them on stdout
|
||||
RUN rm -rf /etc/nginx/http.d && \
|
||||
ln -s /opt/recipes/http.d /etc/nginx/http.d && \
|
||||
ln -sf /dev/stdout /var/log/nginx/access.log && \
|
||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
|
||||
# commented for now https://github.com/TandoorRecipes/recipes/issues/3478
|
||||
#HEALTHCHECK --interval=30s \
|
||||
# --timeout=5s \
|
||||
@@ -50,4 +53,4 @@ RUN /opt/recipes/venv/bin/python version.py
|
||||
RUN find . -type d -name ".git" | xargs rm -rf
|
||||
|
||||
RUN chmod +x boot.sh
|
||||
ENTRYPOINT ["/sbin/tini", "--", "/opt/recipes/boot.sh"]
|
||||
ENTRYPOINT ["/opt/recipes/boot.sh"]
|
||||
|
||||
13
README.md
13
README.md
@@ -15,15 +15,14 @@
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer"><img src="https://badgen.net/badge/icon/discord?icon=discord&label" ></a>
|
||||
<a href="https://hub.docker.com/r/vabene1111/recipes" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/docker/pulls/vabene1111/recipes" ></a>
|
||||
<a href="https://github.com/vabene1111/recipes/releases/latest" rel="noopener noreferrer"><img src="https://img.shields.io/github/v/release/vabene1111/recipes" ></a>
|
||||
<a href="https://app.tandoor.dev/e/demo-auto-login/" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" rel="noopener noreferrer"><img src="https://img.shields.io/badge/demo-available-success" ></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://tandoor.dev" target="_blank" rel="noopener noreferrer">Website</a> •
|
||||
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
|
||||
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Docs</a> •
|
||||
<a href="https://app.tandoor.dev/e/demo-auto-login/" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://community.tandoor.dev" target="_blank" rel="noopener noreferrer">Community</a> •
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord</a>
|
||||
</p>
|
||||
|
||||
@@ -82,13 +81,13 @@ Share some information on how you use Tandoor to help me improve the application
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td><a href="https://community.tandoor.dev">Community</a></td>
|
||||
<td>Get support, share best practices, discuss feature ideas, and meet other Tandoor users.</td>
|
||||
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
|
||||
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
|
||||
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
|
||||
<td><a href="https://twitter.com/TandoorRecipes">Twitter</a></td>
|
||||
<td>You can follow our Twitter account to get updates on new features or releases</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
36
boot.sh
Executable file → Normal file
36
boot.sh
Executable file → Normal file
@@ -1,21 +1,11 @@
|
||||
#!/bin/sh
|
||||
source venv/bin/activate
|
||||
|
||||
# these are envsubst in the nginx config, make sure they default to something sensible when unset
|
||||
export TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
export MEDIA_ROOT=${MEDIA_ROOT:-/opt/recipes/mediafiles};
|
||||
export STATIC_ROOT=${STATIC_ROOT:-/opt/recipes/staticfiles};
|
||||
|
||||
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
|
||||
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
|
||||
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
|
||||
GUNICORN_LOG_LEVEL="${GUNICORN_LOG_LEVEL:-'info'}"
|
||||
|
||||
PLUGINS_BUILD="${PLUGINS_BUILD:-0}"
|
||||
|
||||
if [ "${TANDOOR_PORT}" -eq 80 ]; then
|
||||
echo "TANDOOR_PORT set to 8080 because 80 is now taken by the integrated nginx"
|
||||
TANDOOR_PORT=8080
|
||||
fi
|
||||
NGINX_CONF_FILE=/opt/recipes/nginx/conf.d/Recipes.conf
|
||||
|
||||
display_warning() {
|
||||
echo "[WARNING]"
|
||||
@@ -24,6 +14,11 @@ display_warning() {
|
||||
|
||||
echo "Checking configuration..."
|
||||
|
||||
# Nginx config file must exist if gunicorn is not active
|
||||
if [ ! -f "$NGINX_CONF_FILE" ] && [ $GUNICORN_MEDIA -eq 0 ]; then
|
||||
display_warning "Nginx configuration file could not be found at the default location!\nPath: ${NGINX_CONF_FILE}"
|
||||
fi
|
||||
|
||||
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
|
||||
|
||||
if [ -f "${SECRET_KEY_FILE}" ]; then
|
||||
@@ -84,31 +79,20 @@ echo "Database is ready"
|
||||
|
||||
echo "Migrating database"
|
||||
|
||||
python manage.py migrate
|
||||
|
||||
if [ "${PLUGINS_BUILD}" -eq 1 ]; then
|
||||
echo "Running yarn build at startup because PLUGINS_BUILD is enabled"
|
||||
python plugin.py
|
||||
fi
|
||||
python manage.py migrate
|
||||
|
||||
echo "Collecting static files, this may take a while..."
|
||||
|
||||
python manage.py collectstatic_js_reverse
|
||||
python manage.py collectstatic --noinput
|
||||
|
||||
echo "Done"
|
||||
|
||||
chmod -R 755 ${MEDIA_ROOT:-/opt/recipes/mediafiles}
|
||||
chmod -R 755 /opt/recipes/mediafiles
|
||||
|
||||
ipv6_disable=$(cat /sys/module/ipv6/parameters/disable)
|
||||
|
||||
# prepare nginx config
|
||||
envsubst '$MEDIA_ROOT $STATIC_ROOT $TANDOOR_PORT' < /opt/recipes/http.d/Recipes.conf.template > /opt/recipes/http.d/Recipes.conf
|
||||
|
||||
# start nginx
|
||||
echo "Starting nginx"
|
||||
nginx
|
||||
|
||||
echo "Starting gunicorn"
|
||||
# Check if IPv6 is enabled, only then run gunicorn with ipv6 support
|
||||
if [ "$ipv6_disable" -eq 0 ]; then
|
||||
exec gunicorn -b "[::]:$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
|
||||
|
||||
@@ -17,7 +17,7 @@ from .models import (BookmarkletImport, Comment, CookLog, CustomFilter, Food, Im
|
||||
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
|
||||
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
|
||||
ViewLog, ConnectorConfig, AiProvider, AiLog)
|
||||
ViewLog, ConnectorConfig)
|
||||
|
||||
admin.site.login = secure_admin_login(admin.site.login)
|
||||
|
||||
@@ -90,20 +90,6 @@ class SearchPreferenceAdmin(admin.ModelAdmin):
|
||||
admin.site.register(SearchPreference, SearchPreferenceAdmin)
|
||||
|
||||
|
||||
class AiProviderAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'space', 'model',)
|
||||
search_fields = ('name', 'space', 'model',)
|
||||
|
||||
|
||||
admin.site.register(AiProvider, AiProviderAdmin)
|
||||
|
||||
|
||||
class AiLogAdmin(admin.ModelAdmin):
|
||||
list_display = ('ai_provider', 'function', 'credit_cost', 'created_by', 'created_at',)
|
||||
|
||||
admin.site.register(AiLog, AiLogAdmin)
|
||||
|
||||
|
||||
class StorageAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'method')
|
||||
search_fields = ('name',)
|
||||
|
||||
@@ -26,7 +26,6 @@ class ImportExportBase(forms.Form):
|
||||
PAPRIKA = 'PAPRIKA'
|
||||
NEXTCLOUD = 'NEXTCLOUD'
|
||||
MEALIE = 'MEALIE'
|
||||
MEALIE1 = 'MEALIE1'
|
||||
CHOWDOWN = 'CHOWDOWN'
|
||||
SAFFRON = 'SAFFRON'
|
||||
CHEFTAP = 'CHEFTAP'
|
||||
@@ -47,7 +46,7 @@ class ImportExportBase(forms.Form):
|
||||
PDF = 'PDF'
|
||||
GOURMET = 'GOURMET'
|
||||
|
||||
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (MEALIE1, 'Mealie1'), (CHOWDOWN, 'Chowdown'),
|
||||
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'),
|
||||
(SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'), (PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'),
|
||||
(DOMESTICA, 'Domestica'), (MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
|
||||
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
|
||||
@@ -76,11 +75,6 @@ class ImportForm(ImportExportBase):
|
||||
files = MultipleFileField(required=True)
|
||||
duplicates = forms.BooleanField(help_text=_('To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
|
||||
required=False)
|
||||
meal_plans = forms.BooleanField(required=False)
|
||||
shopping_lists = forms.BooleanField(required=False)
|
||||
nutrition_per_serving = forms.BooleanField(required=False) # some managers (e.g. mealie) do not specify what the nutrition's relate to so we let the user choose
|
||||
|
||||
|
||||
class ExportForm(ImportExportBase):
|
||||
recipes = forms.ModelMultipleChoiceField(widget=MultiSelectWidget, queryset=Recipe.objects.none(), required=False)
|
||||
all = forms.BooleanField(required=False)
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
from decimal import Decimal
|
||||
|
||||
from django.utils import timezone
|
||||
from django.db.models import Sum
|
||||
from litellm import CustomLogger
|
||||
|
||||
from cookbook.models import AiLog
|
||||
from recipes import settings
|
||||
|
||||
|
||||
def get_monthly_token_usage(space):
|
||||
"""
|
||||
returns the number of credits the space has used in the current month
|
||||
"""
|
||||
token_usage = AiLog.objects.filter(space=space, credits_from_balance=False, created_at__month=timezone.now().month).aggregate(Sum('credit_cost'))['credit_cost__sum']
|
||||
if token_usage is None:
|
||||
token_usage = 0
|
||||
return token_usage
|
||||
|
||||
|
||||
def has_monthly_token(space):
|
||||
"""
|
||||
checks if the monthly credit limit has been exceeded
|
||||
"""
|
||||
return get_monthly_token_usage(space) < space.ai_credits_monthly
|
||||
|
||||
|
||||
def can_perform_ai_request(space):
|
||||
return (has_monthly_token(space) or space.ai_credits_balance > 0) and space.ai_enabled
|
||||
|
||||
|
||||
class AiCallbackHandler(CustomLogger):
|
||||
space = None
|
||||
user = None
|
||||
ai_provider = None
|
||||
|
||||
def __init__(self, space, user, ai_provider):
|
||||
super().__init__()
|
||||
self.space = space
|
||||
self.user = user
|
||||
self.ai_provider = ai_provider
|
||||
|
||||
def log_pre_api_call(self, model, messages, kwargs):
|
||||
pass
|
||||
|
||||
def log_post_api_call(self, kwargs, response_obj, start_time, end_time):
|
||||
pass
|
||||
|
||||
def log_success_event(self, kwargs, response_obj, start_time, end_time):
|
||||
self.create_ai_log(kwargs, response_obj, start_time, end_time)
|
||||
|
||||
def log_failure_event(self, kwargs, response_obj, start_time, end_time):
|
||||
self.create_ai_log(kwargs, response_obj, start_time, end_time)
|
||||
|
||||
def create_ai_log(self, kwargs, response_obj, start_time, end_time):
|
||||
credit_cost = 0
|
||||
credits_from_balance = False
|
||||
if self.ai_provider.log_credit_cost:
|
||||
credit_cost = kwargs.get("response_cost", 0) * 100
|
||||
|
||||
if (not has_monthly_token(self.space)) and self.space.ai_credits_balance > 0:
|
||||
remaining_balance = self.space.ai_credits_balance - Decimal(str(credit_cost))
|
||||
if remaining_balance < 0:
|
||||
remaining_balance = 0
|
||||
if settings.HOSTED:
|
||||
self.space.ai_enabled = False
|
||||
|
||||
self.space.ai_credits_balance = remaining_balance
|
||||
credits_from_balance = True
|
||||
self.space.save()
|
||||
|
||||
AiLog.objects.create(
|
||||
created_by=self.user,
|
||||
space=self.space,
|
||||
ai_provider=self.ai_provider,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
input_tokens=response_obj['usage']['prompt_tokens'],
|
||||
output_tokens=response_obj['usage']['completion_tokens'],
|
||||
function=AiLog.F_FILE_IMPORT,
|
||||
credit_cost=credit_cost,
|
||||
credits_from_balance=credits_from_balance,
|
||||
)
|
||||
@@ -109,7 +109,7 @@ class AutomationEngine:
|
||||
Moves a string that should never be treated as a unit to next token and optionally replaced with default unit
|
||||
e.g. NEVER_UNIT: param1: egg, param2: None would modify ['1', 'egg', 'white'] to ['1', '', 'egg', 'white']
|
||||
or NEVER_UNIT: param1: egg, param2: pcs would modify ['1', 'egg', 'yolk'] to ['1', 'pcs', 'egg', 'yolk']
|
||||
:param1 tokens: string that should never be considered a unit, will be moved to token[2]
|
||||
:param1 string: string that should never be considered a unit, will be moved to token[2]
|
||||
:param2 (optional) unit as string: will insert unit string into token[1]
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
@@ -135,7 +135,7 @@ class AutomationEngine:
|
||||
new_unit = self.never_unit[tokens[1].lower()]
|
||||
never_unit = True
|
||||
except KeyError:
|
||||
return tokens, never_unit
|
||||
return tokens
|
||||
else:
|
||||
if a := Automation.objects.annotate(param_1_lower=Lower('param_1')).filter(space=self.request.space, type=Automation.NEVER_UNIT, param_1_lower__in=[
|
||||
tokens[1].lower(), alt_unit.lower()], disabled=False).order_by('order').first():
|
||||
@@ -144,7 +144,7 @@ class AutomationEngine:
|
||||
|
||||
if never_unit:
|
||||
tokens.insert(1, new_unit)
|
||||
return tokens, never_unit
|
||||
return tokens
|
||||
|
||||
def apply_transpose_automation(self, string):
|
||||
"""
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
def add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
"""
|
||||
given a model, the base and related field and the base and related ids, bulk create relation objects
|
||||
"""
|
||||
relation_objects = []
|
||||
for b in base_ids:
|
||||
for r in related_ids:
|
||||
relation_objects.append(relation_model(**{base_field_name: b, related_field_name: r}))
|
||||
relation_model.objects.bulk_create(relation_objects, ignore_conflicts=True, unique_fields=(base_field_name, related_field_name,))
|
||||
|
||||
|
||||
def remove_from_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids, f'{related_field_name}__in': related_ids}).delete()
|
||||
|
||||
|
||||
def remove_all_from_relation(relation_model, base_field_name, base_ids):
|
||||
relation_model.objects.filter(**{f'{base_field_name}__in': base_ids}).delete()
|
||||
|
||||
|
||||
def set_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids):
|
||||
remove_all_from_relation(relation_model, base_field_name, base_ids)
|
||||
add_to_relation(relation_model, base_field_name, base_ids, related_field_name, related_ids)
|
||||
@@ -37,7 +37,7 @@ def get_filetype(name):
|
||||
|
||||
def is_file_type_allowed(filename, image_only=False):
|
||||
is_file_allowed = False
|
||||
allowed_file_types = ['.pdf', '.docx', '.xlsx', '.css', '.mp4', '.mov']
|
||||
allowed_file_types = ['.pdf', '.docx', '.xlsx', '.css']
|
||||
allowed_image_types = ['.png', '.jpg', '.jpeg', '.gif', '.webp']
|
||||
check_list = allowed_image_types
|
||||
if not image_only:
|
||||
@@ -77,8 +77,6 @@ def handle_image(request, image_object, filetype):
|
||||
file_format = 'JPEG'
|
||||
if filetype == '.png':
|
||||
file_format = 'PNG'
|
||||
if filetype == '.webp':
|
||||
file_format = 'WEBP'
|
||||
|
||||
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
@@ -86,6 +84,7 @@ def handle_image(request, image_object, filetype):
|
||||
if filetype == '.png':
|
||||
return rescale_image_png(image_object)
|
||||
else:
|
||||
print('STripping image')
|
||||
return strip_image_meta(image_object, file_format)
|
||||
|
||||
# TODO webp and gifs bypass the scaling and metadata checks, fix
|
||||
|
||||
@@ -176,6 +176,7 @@ class IngredientParser:
|
||||
# if something like this is detected move it to the beginning so the parser can handle it
|
||||
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
|
||||
match = re.search(r'[1-9](\d)*\s*([^\W\d_])+', ingredient)
|
||||
print(f'reording from {ingredient} to {ingredient[match.start():match.end()] + " " + ingredient.replace(ingredient[match.start():match.end()], "")}')
|
||||
ingredient = ingredient[match.start():match.end()] + ' ' + ingredient.replace(ingredient[match.start():match.end()], '')
|
||||
|
||||
# if the string contains parenthesis early on remove it and place it at the end
|
||||
@@ -210,46 +211,39 @@ class IngredientParser:
|
||||
# three arguments if it already has a unit there can't be
|
||||
# a fraction for the amount
|
||||
if len(tokens) > 2:
|
||||
never_unit_applied = False
|
||||
if not self.ignore_rules:
|
||||
tokens, never_unit_applied = self.automation.apply_never_unit_automation(tokens)
|
||||
|
||||
if never_unit_applied:
|
||||
unit = tokens[1]
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
else:
|
||||
try:
|
||||
if unit is not None:
|
||||
# a unit is already found, no need to try the second argument for a fraction
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except
|
||||
raise ValueError
|
||||
# try to parse second argument as amount and add that, in case of '2 1/2' or '2 ½'
|
||||
if tokens[1]:
|
||||
amount += self.parse_fraction(tokens[1])
|
||||
# assume that units can't end with a comma
|
||||
if len(tokens) > 3 and not tokens[2].endswith(','):
|
||||
# try to use third argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
food, note = self.parse_food(tokens[3:])
|
||||
unit = tokens[2]
|
||||
except ValueError:
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
else:
|
||||
tokens = self.automation.apply_never_unit_automation(tokens)
|
||||
try:
|
||||
if unit is not None:
|
||||
# a unit is already found, no need to try the second argument for a fraction
|
||||
# probably not the best method to do it, but I didn't want to make an if check and paste the exact same thing in the else as already is in the except
|
||||
raise ValueError
|
||||
# try to parse second argument as amount and add that, in case of '2 1/2' or '2 ½'
|
||||
amount += self.parse_fraction(tokens[1])
|
||||
# assume that units can't end with a comma
|
||||
if len(tokens) > 3 and not tokens[2].endswith(','):
|
||||
# try to use third argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
food, note = self.parse_food(tokens[3:])
|
||||
unit = tokens[2]
|
||||
except ValueError:
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
except ValueError:
|
||||
# assume that units can't end with a comma
|
||||
if not tokens[1].endswith(','):
|
||||
# try to use second argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
if unit is None:
|
||||
unit = tokens[1]
|
||||
else:
|
||||
note = tokens[1]
|
||||
except ValueError:
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
else:
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
except ValueError:
|
||||
# assume that units can't end with a comma
|
||||
if not tokens[1].endswith(','):
|
||||
# try to use second argument as unit and everything else as food, use everything as food if it fails
|
||||
try:
|
||||
food, note = self.parse_food(tokens[2:])
|
||||
if unit is None:
|
||||
unit = tokens[1]
|
||||
else:
|
||||
note = tokens[1]
|
||||
except ValueError:
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
food, note = self.parse_food(tokens[1:])
|
||||
else:
|
||||
# only two arguments, first one is the amount
|
||||
# which means this is the food
|
||||
@@ -270,7 +264,6 @@ class IngredientParser:
|
||||
|
||||
if food and not self.ignore_rules:
|
||||
food = self.automation.apply_food_automation(food)
|
||||
|
||||
if len(food) > Food._meta.get_field('name').max_length: # test if food name is to long
|
||||
# try splitting it at a space and taking only the first arg
|
||||
if len(food.split()) > 1 and len(food.split()[0]) < Food._meta.get_field('name').max_length:
|
||||
|
||||
@@ -3,19 +3,17 @@ import inspect
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.contrib.auth.models import Group
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
import random
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace, Space
|
||||
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -333,25 +331,6 @@ class CustomRecipePermission(permissions.BasePermission):
|
||||
or has_group_permission(request.user, ['user'])) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomAiProviderPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for the AiProvider api endpoint
|
||||
users: can read all
|
||||
admins: can read and write
|
||||
superusers: can read and write + write providers without a space
|
||||
"""
|
||||
message = _('You do not have the required permissions to view this page!')
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a user and the request is safe
|
||||
return (has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS) or (has_group_permission(request.user, ['admin']) or request.user.is_superuser)
|
||||
|
||||
# editing of global providers allowed for superusers, space providers by admins and users can read only access
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return ((obj.space is None and request.user.is_superuser)
|
||||
or (obj.space == request.space and has_group_permission(request.user, ['admin']))
|
||||
or (obj.space == request.space and has_group_permission(request.user, ['user']) and request.method in SAFE_METHODS))
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
"""
|
||||
Custom permission class for user api endpoint
|
||||
@@ -458,36 +437,3 @@ class IsReadOnlyDRF(permissions.BasePermission):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method in SAFE_METHODS
|
||||
|
||||
|
||||
class IsCreateDRF(permissions.BasePermission):
|
||||
message = 'You cannot interact with this object, you can only create'
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method == 'POST'
|
||||
|
||||
|
||||
def create_space_for_user(user, name=None):
|
||||
with scopes_disabled():
|
||||
if not name:
|
||||
name = f"{user.username}'s Space"
|
||||
|
||||
if Space.objects.filter(name=name).exists():
|
||||
name = f'{name} #{random.randrange(1, 10 ** 5)}'
|
||||
|
||||
created_space = Space(name=name,
|
||||
created_by=user,
|
||||
max_file_storage_mb=settings.SPACE_DEFAULT_MAX_FILES,
|
||||
max_recipes=settings.SPACE_DEFAULT_MAX_RECIPES,
|
||||
max_users=settings.SPACE_DEFAULT_MAX_USERS,
|
||||
allow_sharing=settings.SPACE_DEFAULT_ALLOW_SHARING,
|
||||
ai_enabled=settings.SPACE_AI_ENABLED,
|
||||
ai_credits_monthly=settings.SPACE_AI_CREDITS_MONTHLY,
|
||||
space_setup_completed=False, )
|
||||
created_space.save()
|
||||
|
||||
UserSpace.objects.filter(user=user).update(active=False)
|
||||
user_space = UserSpace.objects.create(space=created_space, user=user, active=True)
|
||||
user_space.groups.add(Group.objects.filter(name='admin').get())
|
||||
|
||||
return user_space
|
||||
|
||||
@@ -62,14 +62,9 @@ class FoodPropertyHelper:
|
||||
computed_properties[pt.id]['food_values'] = self.add_or_create(
|
||||
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
|
||||
if not found_property:
|
||||
# if no amount and food does not exist yet add it but don't count as missing
|
||||
if i.amount == 0 or i.no_amount and i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
if i.amount == 0 or i.no_amount: # don't count ingredients without an amount as missing
|
||||
computed_properties[pt.id]['missing_value'] = computed_properties[pt.id]['missing_value'] or False # don't override if another food was already missing
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': 0}
|
||||
# if amount is present but unit is missing indicate it in the result
|
||||
elif i.unit is None:
|
||||
if i.food.id not in computed_properties[pt.id]['food_values']:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': 0}
|
||||
computed_properties[pt.id]['food_values'][i.food.id]['missing_unit'] = True
|
||||
else:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
|
||||
@@ -319,10 +319,10 @@ def clean_instruction_string(instruction):
|
||||
.replace("", _('reverse rotation')) \
|
||||
.replace("", _('careful rotation')) \
|
||||
.replace("", _('knead')) \
|
||||
.replace("", _('thicken')) \
|
||||
.replace("", _('warm up')) \
|
||||
.replace("", _('ferment')) \
|
||||
.replace("", _("sous-vide"))
|
||||
.replace("Andicken ", _('thicken')) \
|
||||
.replace("Erwärmen ", _('warm up')) \
|
||||
.replace("Fermentieren ", _('ferment')) \
|
||||
.replace("Sous-vide ", _("sous-vide"))
|
||||
|
||||
|
||||
def parse_instructions(instructions):
|
||||
@@ -403,8 +403,6 @@ def parse_servings_text(servings):
|
||||
|
||||
|
||||
def parse_time(recipe_time):
|
||||
if not recipe_time:
|
||||
return 0
|
||||
if type(recipe_time) not in [int, float]:
|
||||
try:
|
||||
recipe_time = float(re.search(r'\d+', recipe_time).group())
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
from django.contrib.auth.models import Group
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from psycopg2.errors import UniqueViolation
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
import random
|
||||
|
||||
from cookbook.helper.permission_helper import create_space_for_user
|
||||
from cookbook.models import Space, UserSpace
|
||||
from cookbook.views import views
|
||||
from recipes import settings
|
||||
|
||||
@@ -19,7 +12,7 @@ class ScopeMiddleware:
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
prefix = settings.SCRIPT_NAME or ''
|
||||
prefix = settings.JS_REVERSE_SCRIPT_PREFIX or ''
|
||||
|
||||
# need to disable scopes for writing requests into userpref and enable for loading ?
|
||||
if request.path.startswith(prefix + '/api/user-preference/'):
|
||||
@@ -41,28 +34,16 @@ class ScopeMiddleware:
|
||||
if request.path.startswith(prefix + '/switch-space/'):
|
||||
return self.get_response(request)
|
||||
|
||||
if request.path.startswith(prefix + '/invite/'):
|
||||
return self.get_response(request)
|
||||
with scopes_disabled():
|
||||
if request.user.userspace_set.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.space_overview(request)
|
||||
|
||||
# get active user space, if for some reason more than one space is active select first (group permission checks will fail, this is not intended at this point)
|
||||
user_space = request.user.userspace_set.filter(active=True).first()
|
||||
|
||||
if not user_space and request.user.userspace_set.count() > 0:
|
||||
# if the users has a userspace but nothing is active, activate the first one
|
||||
user_space = request.user.userspace_set.first()
|
||||
if user_space:
|
||||
user_space.active = True
|
||||
user_space.save()
|
||||
|
||||
if not user_space:
|
||||
if 'signup_token' in request.session:
|
||||
# if user is authenticated, has no space but a signup token (InviteLink) is present, redirect to invite link logic
|
||||
return HttpResponseRedirect(reverse('view_invite', args=[request.session.pop('signup_token', '')]))
|
||||
else:
|
||||
# if user does not yet have a space create one for him
|
||||
user_space = create_space_for_user(request.user)
|
||||
return views.space_overview(request)
|
||||
|
||||
# TODO remove the need for this view
|
||||
if user_space.groups.count() == 0 and not reverse('account_logout') in request.path:
|
||||
return views.no_groups(request)
|
||||
|
||||
|
||||
@@ -60,15 +60,14 @@ class CookBookApp(Integration):
|
||||
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
|
||||
))
|
||||
|
||||
try:
|
||||
for url in images:
|
||||
# import the first valid image which is not cookbookapp branding
|
||||
if validate_import_url(url) and not url.startswith("https://media.cookbookmanager.com/brand/"):
|
||||
if len(images) > 0:
|
||||
try:
|
||||
url = images[0]
|
||||
if validate_import_url(url):
|
||||
response = requests.get(url)
|
||||
self.import_recipe_image(recipe, BytesIO(response.content))
|
||||
break
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
except Exception as e:
|
||||
print('failed to import image ', str(e))
|
||||
|
||||
recipe.save()
|
||||
return recipe
|
||||
|
||||
@@ -26,12 +26,6 @@ class Integration:
|
||||
files = None
|
||||
export_type = None
|
||||
ignored_recipes = []
|
||||
import_log = None
|
||||
import_duplicates = False
|
||||
|
||||
import_meal_plans = True
|
||||
import_shopping_lists = True
|
||||
nutrition_per_serving = False
|
||||
|
||||
def __init__(self, request, export_type):
|
||||
"""
|
||||
@@ -108,7 +102,7 @@ class Integration:
|
||||
"""
|
||||
return True
|
||||
|
||||
def do_import(self, files, il, import_duplicates, meal_plans=True, shopping_lists=True, nutrition_per_serving=False):
|
||||
def do_import(self, files, il, import_duplicates):
|
||||
"""
|
||||
Imports given files
|
||||
:param import_duplicates: if true duplicates are imported as well
|
||||
@@ -117,12 +111,6 @@ class Integration:
|
||||
:return: HttpResponseRedirect to the recipe search showing all imported recipes
|
||||
"""
|
||||
with scope(space=self.request.space):
|
||||
self.import_log = il
|
||||
self.import_duplicates = import_duplicates
|
||||
|
||||
self.import_meal_plans = meal_plans
|
||||
self.import_shopping_lists = shopping_lists
|
||||
self.nutrition_per_serving = nutrition_per_serving
|
||||
|
||||
try:
|
||||
self.files = files
|
||||
@@ -178,24 +166,20 @@ class Integration:
|
||||
il.total_recipes = len(new_file_list)
|
||||
file_list = new_file_list
|
||||
|
||||
if isinstance(self, cookbook.integration.mealie1.Mealie1):
|
||||
# since the mealie 1.0 export is a backup and not a classic recipe export we treat it a bit differently
|
||||
recipes = self.get_recipe_from_file(import_zip)
|
||||
else:
|
||||
for z in file_list:
|
||||
try:
|
||||
if not hasattr(z, 'filename') or isinstance(z, Tag):
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
recipe.keywords.add(self.keyword)
|
||||
il.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.handle_duplicates(recipe, import_duplicates)
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
for z in file_list:
|
||||
try:
|
||||
if not hasattr(z, 'filename') or isinstance(z, Tag):
|
||||
recipe = self.get_recipe_from_file(z)
|
||||
else:
|
||||
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
|
||||
recipe.keywords.add(self.keyword)
|
||||
il.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.handle_duplicates(recipe, import_duplicates)
|
||||
il.imported_recipes += 1
|
||||
il.save()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
|
||||
import_zip.close()
|
||||
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
|
||||
data_list = self.split_recipe_file(f['file'])
|
||||
|
||||
@@ -1,342 +0,0 @@
|
||||
import json
|
||||
import re
|
||||
import traceback
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
from gettext import gettext as _
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from cookbook.helper import ingredient_parser
|
||||
from cookbook.helper.image_processing import get_filetype
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
|
||||
from cookbook.integration.integration import Integration
|
||||
from cookbook.models import Ingredient, Keyword, Recipe, Step, Food, Unit, SupermarketCategory, PropertyType, Property, MealType, MealPlan, CookLog, ShoppingListEntry
|
||||
|
||||
|
||||
class Mealie1(Integration):
|
||||
"""
|
||||
integration for mealie past version 1.0
|
||||
"""
|
||||
|
||||
def get_recipe_from_file(self, file):
|
||||
mealie_database = json.loads(BytesIO(file.read('database.json')).getvalue().decode("utf-8"))
|
||||
self.import_log.total_recipes = len(mealie_database['recipes'])
|
||||
self.import_log.msg += f"Importing {len(mealie_database["categories"]) + len(mealie_database["tags"])} tags and categories as keywords...\n"
|
||||
self.import_log.save()
|
||||
|
||||
keywords_categories_dict = {}
|
||||
for c in mealie_database['categories']:
|
||||
if keyword := Keyword.objects.filter(name=c['name'], space=self.request.space).first():
|
||||
keywords_categories_dict[c['id']] = keyword.pk
|
||||
else:
|
||||
keyword = Keyword.objects.create(name=c['name'], space=self.request.space)
|
||||
keywords_categories_dict[c['id']] = keyword.pk
|
||||
|
||||
keywords_tags_dict = {}
|
||||
for t in mealie_database['tags']:
|
||||
if keyword := Keyword.objects.filter(name=t['name'], space=self.request.space).first():
|
||||
keywords_tags_dict[t['id']] = keyword.pk
|
||||
else:
|
||||
keyword = Keyword.objects.create(name=t['name'], space=self.request.space)
|
||||
keywords_tags_dict[t['id']] = keyword.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["multi_purpose_labels"])} multi purpose labels as supermarket categories...\n"
|
||||
self.import_log.save()
|
||||
|
||||
supermarket_categories_dict = {}
|
||||
for m in mealie_database['multi_purpose_labels']:
|
||||
if supermarket_category := SupermarketCategory.objects.filter(name=m['name'], space=self.request.space).first():
|
||||
supermarket_categories_dict[m['id']] = supermarket_category.pk
|
||||
else:
|
||||
supermarket_category = SupermarketCategory.objects.create(name=m['name'], space=self.request.space)
|
||||
supermarket_categories_dict[m['id']] = supermarket_category.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["ingredient_foods"])} foods...\n"
|
||||
self.import_log.save()
|
||||
|
||||
foods_dict = {}
|
||||
for f in mealie_database['ingredient_foods']:
|
||||
if food := Food.objects.filter(name=f['name'], space=self.request.space).first():
|
||||
foods_dict[f['id']] = food.pk
|
||||
else:
|
||||
food = {'name': f['name'],
|
||||
'plural_name': f['plural_name'],
|
||||
'description': f['description'],
|
||||
'space': self.request.space}
|
||||
|
||||
if f['label_id'] and f['label_id'] in supermarket_categories_dict:
|
||||
food['supermarket_category_id'] = supermarket_categories_dict[f['label_id']]
|
||||
|
||||
food = Food.objects.create(**food)
|
||||
if f['on_hand']:
|
||||
food.onhand_users.add(self.request.user)
|
||||
foods_dict[f['id']] = food.pk
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["ingredient_units"])} units...\n"
|
||||
self.import_log.save()
|
||||
|
||||
units_dict = {}
|
||||
for u in mealie_database['ingredient_units']:
|
||||
if unit := Unit.objects.filter(name=u['name'], space=self.request.space).first():
|
||||
units_dict[u['id']] = unit.pk
|
||||
else:
|
||||
unit = Unit.objects.create(name=u['name'], plural_name=u['plural_name'], description=u['description'], space=self.request.space)
|
||||
units_dict[u['id']] = unit.pk
|
||||
|
||||
recipes_dict = {}
|
||||
recipe_property_factor_dict = {}
|
||||
recipes = []
|
||||
recipe_keyword_relation = []
|
||||
for r in mealie_database['recipes']:
|
||||
if Recipe.objects.filter(space=self.request.space, name=r['name']).exists() and not self.import_duplicates:
|
||||
self.import_log.msg += f"Ignoring {r['name']} because a recipe with this name already exists.\n"
|
||||
self.import_log.save()
|
||||
else:
|
||||
recipe = Recipe.objects.create(
|
||||
waiting_time=parse_time(r['perform_time']),
|
||||
working_time=parse_time(r['prep_time']),
|
||||
description=r['description'][:512],
|
||||
name=r['name'],
|
||||
source_url=r['org_url'],
|
||||
servings=r['recipe_servings'] if r['recipe_servings'] and r['recipe_servings'] != 0 else 1,
|
||||
servings_text=r['recipe_yield'].strip() if r['recipe_yield'] else "",
|
||||
internal=True,
|
||||
created_at=r['created_at'],
|
||||
space=self.request.space,
|
||||
created_by=self.request.user,
|
||||
)
|
||||
|
||||
if not self.nutrition_per_serving:
|
||||
recipe_property_factor_dict[r['id']] = recipe.servings
|
||||
|
||||
self.import_log.msg += self.get_recipe_processed_msg(recipe)
|
||||
self.import_log.imported_recipes += 1
|
||||
self.import_log.save()
|
||||
|
||||
recipes.append(recipe)
|
||||
recipes_dict[r['id']] = recipe.pk
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipe.pk, keyword_id=self.keyword.pk))
|
||||
|
||||
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_instructions"])} instructions...\n"
|
||||
self.import_log.save()
|
||||
|
||||
steps_relation = []
|
||||
first_step_of_recipe_dict = {}
|
||||
for s in mealie_database['recipe_instructions']:
|
||||
if s['recipe_id'] in recipes_dict:
|
||||
step = Step.objects.create(instruction=(s['text'] if s['text'] else "") + (f" \n {s['summary']}" if s['summary'] else ""),
|
||||
order=s['position'],
|
||||
name=s['title'],
|
||||
space=self.request.space)
|
||||
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[s['recipe_id']], step_id=step.pk))
|
||||
if s['recipe_id'] not in first_step_of_recipe_dict:
|
||||
first_step_of_recipe_dict[s['recipe_id']] = step.pk
|
||||
|
||||
for n in mealie_database['notes']:
|
||||
if n['recipe_id'] in recipes_dict:
|
||||
step = Step.objects.create(instruction=n['text'],
|
||||
name=n['title'],
|
||||
order=100,
|
||||
space=self.request.space)
|
||||
steps_relation.append(Recipe.steps.through(recipe_id=recipes_dict[n['recipe_id']], step_id=step.pk))
|
||||
|
||||
Recipe.steps.through.objects.bulk_create(steps_relation)
|
||||
|
||||
ingredient_parser = IngredientParser(self.request, True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipes_ingredients"])} ingredients...\n"
|
||||
self.import_log.save()
|
||||
|
||||
ingredients_relation = []
|
||||
for i in mealie_database['recipes_ingredients']:
|
||||
if i['recipe_id'] in recipes_dict:
|
||||
if i['title']:
|
||||
title_ingredient = Ingredient.objects.create(
|
||||
note=i['title'],
|
||||
is_header=True,
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=title_ingredient.pk))
|
||||
if i['food_id']:
|
||||
ingredient = Ingredient.objects.create(
|
||||
food_id=foods_dict[i['food_id']] if i['food_id'] in foods_dict else None,
|
||||
unit_id=units_dict[i['unit_id']] if i['unit_id'] in units_dict else None,
|
||||
original_text=i['original_text'],
|
||||
order=i['position'],
|
||||
amount=i['quantity'],
|
||||
note=i['note'],
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=ingredient.pk))
|
||||
elif i['note'].strip():
|
||||
amount, unit, food, note = ingredient_parser.parse(i['note'].strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
ingredient = Ingredient.objects.create(
|
||||
food=f,
|
||||
unit=u,
|
||||
amount=amount,
|
||||
note=note,
|
||||
original_text=i['original_text'],
|
||||
space=self.request.space,
|
||||
)
|
||||
ingredients_relation.append(Step.ingredients.through(step_id=first_step_of_recipe_dict[i['recipe_id']], ingredient_id=ingredient.pk))
|
||||
Step.ingredients.through.objects.bulk_create(ingredients_relation)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipes_to_categories"]) + len(mealie_database["recipes_to_tags"])} category and keyword relations...\n"
|
||||
self.import_log.save()
|
||||
|
||||
recipe_keyword_relation = []
|
||||
for rC in mealie_database['recipes_to_categories']:
|
||||
if rC['recipe_id'] in recipes_dict:
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rC['recipe_id']], keyword_id=keywords_categories_dict[rC['category_id']]))
|
||||
|
||||
for rT in mealie_database['recipes_to_tags']:
|
||||
if rT['recipe_id'] in recipes_dict:
|
||||
recipe_keyword_relation.append(Recipe.keywords.through(recipe_id=recipes_dict[rT['recipe_id']], keyword_id=keywords_tags_dict[rT['tag_id']]))
|
||||
|
||||
Recipe.keywords.through.objects.bulk_create(recipe_keyword_relation, ignore_conflicts=True)
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_nutrition"])} properties...\n"
|
||||
self.import_log.save()
|
||||
|
||||
property_types_dict = {
|
||||
'calories': PropertyType.objects.get_or_create(name=_('Calories'), space=self.request.space, defaults={'unit': 'kcal', 'fdc_id': 1008})[0],
|
||||
'carbohydrate_content': PropertyType.objects.get_or_create(name=_('Carbohydrates'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1005})[0],
|
||||
'cholesterol_content': PropertyType.objects.get_or_create(name=_('Cholesterol'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1253})[0],
|
||||
'fat_content': PropertyType.objects.get_or_create(name=_('Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1004})[0],
|
||||
'fiber_content': PropertyType.objects.get_or_create(name=_('Fiber'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1079})[0],
|
||||
'protein_content': PropertyType.objects.get_or_create(name=_('Protein'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1003})[0],
|
||||
'saturated_fat_content': PropertyType.objects.get_or_create(name=_('Saturated Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1258})[0],
|
||||
'sodium_content': PropertyType.objects.get_or_create(name=_('Sodium'), space=self.request.space, defaults={'unit': 'mg', 'fdc_id': 1093})[0],
|
||||
'sugar_content': PropertyType.objects.get_or_create(name=_('Sugar'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1063})[0],
|
||||
'trans_fat_content': PropertyType.objects.get_or_create(name=_('Trans Fat'), space=self.request.space, defaults={'unit': 'g', 'fdc_id': 1257})[0],
|
||||
'unsaturated_fat_content': PropertyType.objects.get_or_create(name=_('Unsaturated Fat'), space=self.request.space, defaults={'unit': 'g'})[0],
|
||||
}
|
||||
|
||||
with transaction.atomic():
|
||||
recipe_properties_relation = []
|
||||
properties_relation = []
|
||||
for r in mealie_database['recipe_nutrition']:
|
||||
if r['recipe_id'] in recipes_dict:
|
||||
for key in property_types_dict:
|
||||
if r[key]:
|
||||
properties_relation.append(
|
||||
Property(property_type_id=property_types_dict[key].pk,
|
||||
property_amount=Decimal(str(r[key])) / (
|
||||
Decimal(str(recipe_property_factor_dict[r['recipe_id']])) if r['recipe_id'] in recipe_property_factor_dict else 1),
|
||||
open_data_food_slug=r['recipe_id'],
|
||||
space=self.request.space))
|
||||
properties = Property.objects.bulk_create(properties_relation)
|
||||
property_ids = []
|
||||
for p in properties:
|
||||
recipe_properties_relation.append(Recipe.properties.through(recipe_id=recipes_dict[p.open_data_food_slug], property_id=p.pk))
|
||||
property_ids.append(p.pk)
|
||||
Recipe.properties.through.objects.bulk_create(recipe_properties_relation, ignore_conflicts=True)
|
||||
Property.objects.filter(id__in=property_ids).update(open_data_food_slug=None)
|
||||
|
||||
# delete unused property types
|
||||
for pT in property_types_dict:
|
||||
try:
|
||||
property_types_dict[pT].delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.import_log.msg += f"Importing {len(mealie_database["recipe_comments"]) + len(mealie_database["recipe_timeline_events"])} comments and cook logs...\n"
|
||||
self.import_log.save()
|
||||
|
||||
cook_log_list = []
|
||||
for c in mealie_database['recipe_comments']:
|
||||
if c['recipe_id'] in recipes_dict:
|
||||
cook_log_list.append(CookLog(
|
||||
recipe_id=recipes_dict[c['recipe_id']],
|
||||
comment=c['text'],
|
||||
created_at=c['created_at'],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
for c in mealie_database['recipe_timeline_events']:
|
||||
if c['recipe_id'] in recipes_dict:
|
||||
if c['event_type'] == 'comment':
|
||||
cook_log_list.append(CookLog(
|
||||
recipe_id=recipes_dict[c['recipe_id']],
|
||||
comment=c['message'],
|
||||
created_at=c['created_at'],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
CookLog.objects.bulk_create(cook_log_list)
|
||||
|
||||
if self.import_meal_plans:
|
||||
self.import_log.msg += f"Importing {len(mealie_database["group_meal_plans"])} meal plans...\n"
|
||||
self.import_log.save()
|
||||
|
||||
meal_types_dict = {}
|
||||
meal_plans = []
|
||||
for m in mealie_database['group_meal_plans']:
|
||||
if m['recipe_id'] in recipes_dict:
|
||||
if not m['entry_type'] in meal_types_dict:
|
||||
meal_type = MealType.objects.get_or_create(name=m['entry_type'], created_by=self.request.user, space=self.request.space)[0]
|
||||
meal_types_dict[m['entry_type']] = meal_type.pk
|
||||
meal_plans.append(MealPlan(
|
||||
recipe_id=recipes_dict[m['recipe_id']] if m['recipe_id'] else None,
|
||||
title=m['title'] if m['title'] else "",
|
||||
note=m['text'] if m['text'] else "",
|
||||
from_date=m['date'],
|
||||
to_date=m['date'],
|
||||
meal_type_id=meal_types_dict[m['entry_type']],
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
|
||||
MealPlan.objects.bulk_create(meal_plans)
|
||||
|
||||
if self.import_shopping_lists:
|
||||
self.import_log.msg += f"Importing {len(mealie_database["shopping_list_items"])} shopping list items...\n"
|
||||
self.import_log.save()
|
||||
|
||||
shopping_list_items = []
|
||||
for sli in mealie_database['shopping_list_items']:
|
||||
if not sli['checked']:
|
||||
if sli['food_id']:
|
||||
shopping_list_items.append(ShoppingListEntry(
|
||||
amount=sli['quantity'],
|
||||
unit_id=units_dict[sli['unit_id']] if sli['unit_id'] else None,
|
||||
food_id=foods_dict[sli['food_id']] if sli['food_id'] else None,
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
elif not sli['food_id'] and sli['note'].strip():
|
||||
amount, unit, food, note = ingredient_parser.parse(sli['note'].strip())
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
shopping_list_items.append(ShoppingListEntry(
|
||||
amount=amount,
|
||||
unit=u,
|
||||
food=f,
|
||||
created_by=self.request.user,
|
||||
space=self.request.space,
|
||||
))
|
||||
ShoppingListEntry.objects.bulk_create(shopping_list_items)
|
||||
|
||||
self.import_log.msg += f"Importing Images. This might take some time ...\n"
|
||||
self.import_log.save()
|
||||
for r in mealie_database['recipes']:
|
||||
try:
|
||||
if recipe := Recipe.objects.filter(pk=recipes_dict[r['id']]).first():
|
||||
self.import_recipe_image(recipe, BytesIO(file.read(f'data/recipes/{str(uuid.UUID(str(r['id'])))}/images/original.webp')), filetype='.webp')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return recipes
|
||||
|
||||
def get_file_from_recipe(self, recipe):
|
||||
raise NotImplementedError('Method not implemented in storage integration')
|
||||
@@ -14,7 +14,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-06-23 08:28+0000\n"
|
||||
"PO-Revision-Date: 2025-01-29 13:44+0000\n"
|
||||
"Last-Translator: Ángel <1024mb@users.noreply.translate.tandoor.dev>\n"
|
||||
"Language-Team: Spanish <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/es/>\n"
|
||||
@@ -284,12 +284,14 @@ msgid "You have more users than allowed in your space."
|
||||
msgstr "Tenés mas usuarios que los permitidos en tu espacio"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:310
|
||||
#, fuzzy
|
||||
#| msgid "Use fractions"
|
||||
msgid "reverse rotation"
|
||||
msgstr "rotación inversa"
|
||||
msgstr "Usar fracciones"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:311
|
||||
msgid "careful rotation"
|
||||
msgstr "rotación cuidadosa"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:312
|
||||
msgid "knead"
|
||||
@@ -396,9 +398,8 @@ msgid "Section"
|
||||
msgstr "Sección"
|
||||
|
||||
#: .\cookbook\management\commands\fix_duplicate_properties.py:15
|
||||
#, fuzzy
|
||||
msgid "Fixes foods with "
|
||||
msgstr "Corrige alimentos con "
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\management\commands\rebuildindex.py:14
|
||||
msgid "Rebuilds full text search index on Recipe"
|
||||
@@ -435,14 +436,16 @@ msgid "Other"
|
||||
msgstr "Otro"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#, fuzzy
|
||||
#| msgid "Fats"
|
||||
msgid "Fat"
|
||||
msgstr "Grasa"
|
||||
msgstr "Grasas"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:19
|
||||
msgid "g"
|
||||
msgstr "gr."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
msgid "Carbohydrates"
|
||||
@@ -465,8 +468,6 @@ msgid ""
|
||||
"Maximum file storage for space in MB. 0 for unlimited, -1 to disable file "
|
||||
"upload."
|
||||
msgstr ""
|
||||
"Almacenamiento máximo de archivos para el espacio en MB. 0 para ilimitado, -"
|
||||
"1 para desactivar la carga de archivos."
|
||||
|
||||
#: .\cookbook\models.py:454 .\cookbook\templates\search.html:7
|
||||
#: .\cookbook\templates\settings.html:18
|
||||
@@ -497,16 +498,18 @@ msgid "Nutrition"
|
||||
msgstr "Información Nutricional"
|
||||
|
||||
#: .\cookbook\models.py:918
|
||||
#, fuzzy
|
||||
#| msgid "Merge"
|
||||
msgid "Allergen"
|
||||
msgstr "Alérgeno"
|
||||
msgstr "Combinar"
|
||||
|
||||
#: .\cookbook\models.py:919
|
||||
msgid "Price"
|
||||
msgstr "Precio"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:919
|
||||
msgid "Goal"
|
||||
msgstr "Objetivo"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1408 .\cookbook\templates\search_info.html:28
|
||||
msgid "Simple"
|
||||
@@ -529,40 +532,54 @@ msgid "Food Alias"
|
||||
msgstr "Alias de la Comida"
|
||||
|
||||
#: .\cookbook\models.py:1468
|
||||
#, fuzzy
|
||||
#| msgid "Units"
|
||||
msgid "Unit Alias"
|
||||
msgstr "Alias de unidad"
|
||||
msgstr "Unidades"
|
||||
|
||||
#: .\cookbook\models.py:1469
|
||||
#, fuzzy
|
||||
#| msgid "Keywords"
|
||||
msgid "Keyword Alias"
|
||||
msgstr "Alias de palabra clave"
|
||||
msgstr "Palabras clave"
|
||||
|
||||
#: .\cookbook\models.py:1470
|
||||
#, fuzzy
|
||||
#| msgid "Description"
|
||||
msgid "Description Replace"
|
||||
msgstr "Reemplazo de descripción"
|
||||
msgstr "Descripción"
|
||||
|
||||
#: .\cookbook\models.py:1471
|
||||
#, fuzzy
|
||||
#| msgid "Instructions"
|
||||
msgid "Instruction Replace"
|
||||
msgstr "Reemplazo de instrucciones"
|
||||
msgstr "Instrucciones"
|
||||
|
||||
#: .\cookbook\models.py:1472
|
||||
#, fuzzy
|
||||
#| msgid "New Unit"
|
||||
msgid "Never Unit"
|
||||
msgstr "Unidad prohibida"
|
||||
msgstr "Nueva Unidad"
|
||||
|
||||
#: .\cookbook\models.py:1473
|
||||
msgid "Transpose Words"
|
||||
msgstr "Transponer palabras"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1474
|
||||
#, fuzzy
|
||||
#| msgid "Food Alias"
|
||||
msgid "Food Replace"
|
||||
msgstr "Reemplazo de alimento"
|
||||
msgstr "Alias de la Comida"
|
||||
|
||||
#: .\cookbook\models.py:1475
|
||||
#, fuzzy
|
||||
#| msgid "Description"
|
||||
msgid "Unit Replace"
|
||||
msgstr "Reemplazo de unidad"
|
||||
msgstr "Descripción"
|
||||
|
||||
#: .\cookbook\models.py:1476
|
||||
msgid "Name Replace"
|
||||
msgstr "Reemplazo de nombre"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1503 .\cookbook\views\delete.py:40
|
||||
#: .\cookbook\views\edit.py:210 .\cookbook\views\new.py:39
|
||||
@@ -570,8 +587,10 @@ msgid "Recipe"
|
||||
msgstr "Receta"
|
||||
|
||||
#: .\cookbook\models.py:1504
|
||||
#, fuzzy
|
||||
#| msgid "Food"
|
||||
msgid "Food"
|
||||
msgstr "Alimento"
|
||||
msgstr "Comida"
|
||||
|
||||
#: .\cookbook\models.py:1505 .\cookbook\templates\base.html:149
|
||||
msgid "Keyword"
|
||||
@@ -629,26 +648,22 @@ msgstr "Invitación para Tandoor Recipes"
|
||||
|
||||
#: .\cookbook\serializer.py:1426
|
||||
msgid "Existing shopping list to update"
|
||||
msgstr "Lista de compras existente para actualizar"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\serializer.py:1428
|
||||
msgid ""
|
||||
"List of ingredient IDs from the recipe to add, if not provided all "
|
||||
"ingredients will be added."
|
||||
msgstr ""
|
||||
"Lista de IDs de ingredientes de la receta para agregar; si no se "
|
||||
"proporciona, se agregarán todos los ingredientes."
|
||||
|
||||
#: .\cookbook\serializer.py:1430
|
||||
msgid ""
|
||||
"Providing a list_recipe ID and servings of 0 will delete that shopping list."
|
||||
msgstr ""
|
||||
"Proporcionar un ID list_recipe y porciones igual a 0 eliminará esa lista de "
|
||||
"compras."
|
||||
|
||||
#: .\cookbook\serializer.py:1439
|
||||
msgid "Amount of food to add to the shopping list"
|
||||
msgstr "Cantidad de alimento a agregar a la lista de compras"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\serializer.py:1441
|
||||
msgid "ID of unit to use for the shopping list"
|
||||
|
||||
@@ -14,8 +14,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-08-10 11:36+0000\n"
|
||||
"Last-Translator: Enzo La Rafale <enzo.chaussivert@gmail.com>\n"
|
||||
"PO-Revision-Date: 2025-02-16 14:58+0000\n"
|
||||
"Last-Translator: Elvis Gosselin <elvis.gosselin@tutanota.com>\n"
|
||||
"Language-Team: French <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/fr/>\n"
|
||||
"Language: fr\n"
|
||||
@@ -405,7 +405,7 @@ msgstr "Rubrique"
|
||||
|
||||
#: .\cookbook\management\commands\fix_duplicate_properties.py:15
|
||||
msgid "Fixes foods with "
|
||||
msgstr "Corriger les aliments avec "
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\management\commands\rebuildindex.py:14
|
||||
msgid "Rebuilds full text search index on Recipe"
|
||||
@@ -442,6 +442,8 @@ msgid "Other"
|
||||
msgstr "Autre"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#, fuzzy
|
||||
#| msgid "Fats"
|
||||
msgid "Fat"
|
||||
msgstr "Matières grasses"
|
||||
|
||||
@@ -449,7 +451,7 @@ msgstr "Matières grasses"
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:19
|
||||
msgid "g"
|
||||
msgstr "g"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
msgid "Carbohydrates"
|
||||
@@ -465,7 +467,7 @@ msgstr "Calories"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:20
|
||||
msgid "kcal"
|
||||
msgstr "kcal"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:325
|
||||
msgid ""
|
||||
@@ -552,24 +554,30 @@ msgid "Instruction Replace"
|
||||
msgstr "Remplacer l'instruction"
|
||||
|
||||
#: .\cookbook\models.py:1472
|
||||
#, fuzzy
|
||||
#| msgid "New Unit"
|
||||
msgid "Never Unit"
|
||||
msgstr "Aucune unité"
|
||||
msgstr "Nouvelle unité"
|
||||
|
||||
#: .\cookbook\models.py:1473
|
||||
msgid "Transpose Words"
|
||||
msgstr "Transposer les mots"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1474
|
||||
#, fuzzy
|
||||
#| msgid "Food Alias"
|
||||
msgid "Food Replace"
|
||||
msgstr "Aliment alternatif"
|
||||
msgstr "Aliment équivalent"
|
||||
|
||||
#: .\cookbook\models.py:1475
|
||||
#, fuzzy
|
||||
#| msgid "Description Replace"
|
||||
msgid "Unit Replace"
|
||||
msgstr "Remplacer l'unité"
|
||||
msgstr "Remplacer la Description"
|
||||
|
||||
#: .\cookbook\models.py:1476
|
||||
msgid "Name Replace"
|
||||
msgstr "Remplacer le nom"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\models.py:1503 .\cookbook\views\delete.py:40
|
||||
#: .\cookbook\views\edit.py:210 .\cookbook\views\new.py:39
|
||||
@@ -1032,8 +1040,10 @@ msgid "Properties"
|
||||
msgstr "Propriétés"
|
||||
|
||||
#: .\cookbook\templates\base.html:301 .\cookbook\views\lists.py:255
|
||||
#, fuzzy
|
||||
#| msgid "Account Connections"
|
||||
msgid "Unit Conversions"
|
||||
msgstr "Conversions d'unités"
|
||||
msgstr "Comptes connectés"
|
||||
|
||||
#: .\cookbook\templates\base.html:318 .\cookbook\templates\index.html:47
|
||||
msgid "Import Recipe"
|
||||
@@ -1053,8 +1063,10 @@ msgid "Space Settings"
|
||||
msgstr "Paramètres de groupe"
|
||||
|
||||
#: .\cookbook\templates\base.html:340
|
||||
#, fuzzy
|
||||
#| msgid "External Recipes"
|
||||
msgid "External Connectors"
|
||||
msgstr "Connecteurs externes"
|
||||
msgstr "Recettes externes"
|
||||
|
||||
#: .\cookbook\templates\base.html:345 .\cookbook\templates\system.html:13
|
||||
msgid "System"
|
||||
@@ -1518,8 +1530,10 @@ msgid "Back"
|
||||
msgstr "Retour"
|
||||
|
||||
#: .\cookbook\templates\property_editor.html:7
|
||||
#, fuzzy
|
||||
#| msgid "Ingredient Editor"
|
||||
msgid "Property Editor"
|
||||
msgstr "Éditeur de propriété"
|
||||
msgstr "Éditeur d’ingrédients"
|
||||
|
||||
#: .\cookbook\templates\recipe_view.html:36
|
||||
msgid "Comments"
|
||||
@@ -1997,8 +2011,10 @@ msgid "Sign in using"
|
||||
msgstr "Se connecter avec"
|
||||
|
||||
#: .\cookbook\templates\space_manage.html:7
|
||||
#, fuzzy
|
||||
#| msgid "Space Membership"
|
||||
msgid "Space Management"
|
||||
msgstr "Gestion de l'espace"
|
||||
msgstr "Adhésion à l'espace"
|
||||
|
||||
#: .\cookbook\templates\space_manage.html:26
|
||||
msgid "Space:"
|
||||
@@ -2211,8 +2227,10 @@ msgid "Info"
|
||||
msgstr "Info"
|
||||
|
||||
#: .\cookbook\templates\system.html:110 .\cookbook\templates\system.html:127
|
||||
#, fuzzy
|
||||
#| msgid "Use fractions"
|
||||
msgid "Migrations"
|
||||
msgstr "Migrations"
|
||||
msgstr "Utiliser les fractions"
|
||||
|
||||
#: .\cookbook\templates\system.html:116
|
||||
msgid ""
|
||||
@@ -2247,8 +2265,10 @@ msgid "Hide"
|
||||
msgstr "Cacher"
|
||||
|
||||
#: .\cookbook\templates\system.html:210
|
||||
#, fuzzy
|
||||
#| msgid "Show Log"
|
||||
msgid "Show"
|
||||
msgstr "Afficher"
|
||||
msgstr "Afficher le journal"
|
||||
|
||||
#: .\cookbook\templates\url_import.html:8
|
||||
msgid "URL Import"
|
||||
@@ -2333,9 +2353,11 @@ msgstr ""
|
||||
"MM-DD."
|
||||
|
||||
#: .\cookbook\views\api.py:744
|
||||
#, fuzzy
|
||||
#| msgid "ID of recipe a step is part of. For multiple repeat parameter."
|
||||
msgid "Filter meal plans with MealType ID. For multiple repeat parameter."
|
||||
msgstr ""
|
||||
"Filtrer le planning des repas avec l'identifiant MealType. Pour plusieurs "
|
||||
"Identifiant de la recette dont fait partie une étape. Pour plusieurs "
|
||||
"paramètres de répétition."
|
||||
|
||||
#: .\cookbook\views\api.py:872
|
||||
@@ -2438,27 +2460,18 @@ msgstr ""
|
||||
#: .\cookbook\views\api.py:922
|
||||
msgid "ID of book a recipe should be in. For multiple repeat parameter."
|
||||
msgstr ""
|
||||
"ID du livre dans lequel une recette doit se trouver. Pour plusieurs "
|
||||
"paramètres de répétition."
|
||||
|
||||
#: .\cookbook\views\api.py:923
|
||||
msgid "Book IDs, repeat for multiple. Return recipes with any of the books"
|
||||
msgstr ""
|
||||
"IDs de livre, répéter pour plusieurs livres. Renvoie les recettes dans "
|
||||
"n'importe quel livre."
|
||||
|
||||
#: .\cookbook\views\api.py:924
|
||||
msgid "Book IDs, repeat for multiple. Return recipes with all of the books."
|
||||
msgstr ""
|
||||
"IDs de livre, répéter pour plusieurs livres. Renvoie les recettes dans tous "
|
||||
"les livre."
|
||||
|
||||
#: .\cookbook\views\api.py:925
|
||||
#, fuzzy
|
||||
msgid "Book IDs, repeat for multiple. Exclude recipes with any of the books."
|
||||
msgstr ""
|
||||
"Identifiants de livres : répéter pour plusieurs. Exclure les recettes de "
|
||||
"l'un des livres."
|
||||
|
||||
#: .\cookbook\views\api.py:926
|
||||
msgid "Book IDs, repeat for multiple. Exclude recipes with all of the books."
|
||||
|
||||
@@ -11,7 +11,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-07-21 09:43+0000\n"
|
||||
"PO-Revision-Date: 2024-11-05 10:58+0000\n"
|
||||
"Last-Translator: Aija Kozlovska <kozlovska.aija@gmail.com>\n"
|
||||
"Language-Team: Latvian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/lv/>\n"
|
||||
@@ -20,7 +20,7 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2;\n"
|
||||
"X-Generator: Weblate 5.8.4\n"
|
||||
"X-Generator: Weblate 5.6.2\n"
|
||||
|
||||
#: .\cookbook\forms.py:45
|
||||
msgid ""
|
||||
@@ -83,8 +83,8 @@ msgid ""
|
||||
"Leave empty for dropbox and enter only base url for nextcloud (<code>/remote."
|
||||
"php/webdav/</code> is added automatically)"
|
||||
msgstr ""
|
||||
"Atstājiet tukšu Dropbox un ievadiet tikai Nextcloud bāzes URL (<code> /remote"
|
||||
".php/webdav/ </code> tiek pievienots automātiski)"
|
||||
"Atstājiet tukšu Dropbox un ievadiet tikai Nextcloud bāzes URL (<kods> /"
|
||||
"remote.php/webdav/ </code> tiek pievienots automātiski)"
|
||||
|
||||
#: .\cookbook\forms.py:188
|
||||
msgid ""
|
||||
@@ -147,65 +147,48 @@ msgid ""
|
||||
"Determines how fuzzy a search is if it uses trigram similarity matching (e."
|
||||
"g. low values mean more typos are ignored)."
|
||||
msgstr ""
|
||||
"Nosaka cik precīza ir meklēšana gadījumā, ja tiek izmantota trigram līdzība ("
|
||||
"jo zemāka vērtība, jo vairāk rakstīšanas kļūdas tiek ignorētas)."
|
||||
|
||||
#: .\cookbook\forms.py:340
|
||||
msgid ""
|
||||
"Select type method of search. Click <a href=\"/docs/search/\">here</a> for "
|
||||
"full description of choices."
|
||||
msgstr ""
|
||||
"Izvēlies meklēšanas veidu. Spied <a href=\"/docs/search/\">šeit</a>, lai "
|
||||
"apskatītu visas iespējas."
|
||||
|
||||
#: .\cookbook\forms.py:341
|
||||
msgid ""
|
||||
"Use fuzzy matching on units, keywords and ingredients when editing and "
|
||||
"importing recipes."
|
||||
msgstr ""
|
||||
"Izmanto aptuveno meklēšanu vienībām, atslēgas vārdiem un sastāvdaļām "
|
||||
"importējot un labojot receptes."
|
||||
|
||||
#: .\cookbook\forms.py:342
|
||||
msgid ""
|
||||
"Fields to search ignoring accents. Selecting this option can improve or "
|
||||
"degrade search quality depending on language"
|
||||
msgstr ""
|
||||
"Lauki, kurus meklējot ignorēt akcentus. Šī varianta izvēlēšanās var uzlabot "
|
||||
"vai pasliktināt meklēšanas kvalitāti atkarībā no valodas"
|
||||
|
||||
#: .\cookbook\forms.py:343
|
||||
msgid ""
|
||||
"Fields to search for partial matches. (e.g. searching for 'Pie' will return "
|
||||
"'pie' and 'piece' and 'soapie')"
|
||||
msgstr ""
|
||||
"Lauki, kuros meklēt aptuveno līdzību. (piem. meklējot vārdu 'Kūka' tiks "
|
||||
"atrasts arī 'kūka' un 'ābolkūka')"
|
||||
|
||||
#: .\cookbook\forms.py:344
|
||||
msgid ""
|
||||
"Fields to search for beginning of word matches. (e.g. searching for 'sa' "
|
||||
"will return 'salad' and 'sandwich')"
|
||||
msgstr ""
|
||||
"Lauki, kuros meklēt vārdu līdzības sākumu. (piem meklējot 'la' atradīt "
|
||||
"'lapas' un 'laims')"
|
||||
|
||||
#: .\cookbook\forms.py:345
|
||||
msgid ""
|
||||
"Fields to 'fuzzy' search. (e.g. searching for 'recpie' will find 'recipe'.) "
|
||||
"Note: this option will conflict with 'web' and 'raw' methods of search."
|
||||
msgstr ""
|
||||
"Lauki, kuriem izmantot aptuveno meklēšanu. (piem. meklējot 'recpte' tiks "
|
||||
"atrasts 'recepte'.) Piezīme: šis variants konfliktēs ar 'web' un 'raw' "
|
||||
"meklēšanas metodēm."
|
||||
|
||||
#: .\cookbook\forms.py:346
|
||||
msgid ""
|
||||
"Fields to full text search. Note: 'web', 'phrase', and 'raw' search methods "
|
||||
"only function with fulltext fields."
|
||||
msgstr ""
|
||||
"Lauki priekš pilnās teksta meklēšanas. Piezīme: 'web', 'phrase' un 'raw' "
|
||||
"meklēšanas metodes darbojās tikai ar pilno teksta meklēšanu."
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
#, fuzzy
|
||||
@@ -215,11 +198,11 @@ msgstr "Meklēt"
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Fuzzy Lookups"
|
||||
msgstr "Aptuvenā meklēšana"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Ignore Accent"
|
||||
msgstr "Ignorēt akcentus"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\forms.py:350
|
||||
msgid "Partial Match"
|
||||
@@ -762,8 +745,8 @@ msgid ""
|
||||
" ."
|
||||
msgstr ""
|
||||
"Lūdzu apstipriniet, ka\n"
|
||||
" <a href=\"mailto:%(email)s\">%(email)s</a> ir e-pasta adrese "
|
||||
"lietotājam %(user_display)\n"
|
||||
" <a href=\"mailto:%(email)s\">%(email)s</a> ir lietotāja "
|
||||
"%(user_display) e-pasta adrese\n"
|
||||
" ."
|
||||
|
||||
#: .\cookbook\templates\account\email_confirm.html:22
|
||||
@@ -1348,7 +1331,7 @@ msgstr ""
|
||||
#: .\cookbook\templates\markdown_info.html:57
|
||||
#: .\cookbook\templates\markdown_info.html:73
|
||||
msgid "or by leaving a blank line in between."
|
||||
msgstr "vai atstājot tukšu rindu starp."
|
||||
msgstr "vai atstājot tukšu rindu starp ."
|
||||
|
||||
#: .\cookbook\templates\markdown_info.html:59
|
||||
#: .\cookbook\templates\markdown_info.html:74
|
||||
@@ -1460,7 +1443,7 @@ msgstr "Nav Tiesību"
|
||||
|
||||
#: .\cookbook\templates\no_groups_info.html:17
|
||||
msgid "You do not have any groups and therefor cannot use this application."
|
||||
msgstr "Jūs neesat nevienā grupā un tādēļ nevarat izmantot šo lietotni."
|
||||
msgstr "Jūs neesat nevienā grupā un tādēļ nevarat izmantot šo lietotni!"
|
||||
|
||||
#: .\cookbook\templates\no_groups_info.html:18
|
||||
#: .\cookbook\templates\no_perm_info.html:15
|
||||
@@ -1477,7 +1460,7 @@ msgid ""
|
||||
"You do not have the required permissions to view this page or perform this "
|
||||
"action."
|
||||
msgstr ""
|
||||
"Jums nav nepieciešamo atļauju, lai skatītu šo vietni vai veiktu šo darbību."
|
||||
"Jums nav nepieciešamo atļauju, lai skatītu šo vietni vai veiktu šo darbību!"
|
||||
|
||||
#: .\cookbook\templates\offline.html:6
|
||||
msgid "Offline"
|
||||
@@ -1565,16 +1548,6 @@ msgid ""
|
||||
"html#TEXTSEARCH-PARSING-QUERIES>Postgresql's website.</a>\n"
|
||||
" "
|
||||
msgstr ""
|
||||
" \n"
|
||||
" Pilnā teksta meklēšanas mēģinājums vienkāršot dotos vārdus, lai "
|
||||
"tie sakristu ar tipiskajiem variantiem. Piemēram: 'griezt', 'griezšana', "
|
||||
"'griezums' tiks vienkāršots uz 'griez'.\n"
|
||||
" Lai kontrolētu meklētāja darbību ievadot vairākus meklējamos "
|
||||
"vārdus, ir pieejamas vairākas zemāk aprakstītās metodes.\n"
|
||||
" Pilno tehnisko informāciju par tām var apskatīt <a "
|
||||
"href=https://www.postgresql.org/docs/current/textsearch-controls.html"
|
||||
"#TEXTSEARCH-PARSING-QUERIES>Postgresql mājas lapā.</a>\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\search_info.html:29
|
||||
msgid ""
|
||||
@@ -2574,12 +2547,22 @@ msgid "Unable to determine PostgreSQL version."
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:317
|
||||
#, fuzzy
|
||||
#| msgid ""
|
||||
#| "\n"
|
||||
#| " This application is not running with a Postgres database "
|
||||
#| "backend. This is ok but not recommended as some\n"
|
||||
#| " features only work with postgres databases.\n"
|
||||
#| " "
|
||||
msgid ""
|
||||
"This application is not running with a Postgres database backend. This is ok "
|
||||
"but not recommended as some features only work with postgres databases."
|
||||
msgstr ""
|
||||
"Šī lietojumprogramma nedarbojas, izmantojot Postgres datubāzi. Tas ir labi, "
|
||||
"bet nav ieteicams, jo dažas funkcijas darbojas tikai ar Postgres datu bāzēm."
|
||||
"\n"
|
||||
" Šī lietojumprogramma nedarbojas, izmantojot Postgres datubāzi. "
|
||||
"Tas ir labi, bet nav ieteicams, jo dažas\n"
|
||||
" funkcijas darbojas tikai ar Postgres datu bāzēm.\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\views\views.py:360
|
||||
#, fuzzy
|
||||
|
||||
@@ -13,8 +13,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-07-31 19:14+0000\n"
|
||||
"Last-Translator: Justin Straver <justin.straver@gmail.com>\n"
|
||||
"PO-Revision-Date: 2025-02-16 14:58+0000\n"
|
||||
"Last-Translator: Cots Partier <cots.pastier.34@icloud.com>\n"
|
||||
"Language-Team: Dutch <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/nl/>\n"
|
||||
"Language: nl\n"
|
||||
@@ -46,7 +46,7 @@ msgstr "Voorbereidingstijd in minuten"
|
||||
|
||||
#: .\cookbook\forms.py:62
|
||||
msgid "Waiting time (cooking/baking) in minutes"
|
||||
msgstr "Wachttijd in minuten (koken en bakken)"
|
||||
msgstr "Wacht tijd in minuten (koken en bakken)"
|
||||
|
||||
#: .\cookbook\forms.py:63 .\cookbook\forms.py:222 .\cookbook\forms.py:246
|
||||
msgid "Path"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,8 +8,8 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-08-10 11:36+0000\n"
|
||||
"Last-Translator: Elias Sjögreen <eliassjogreen1@gmail.com>\n"
|
||||
"PO-Revision-Date: 2025-02-07 08:58+0000\n"
|
||||
"Last-Translator: Mattias G <mattias.granlund@gmail.com>\n"
|
||||
"Language-Team: Swedish <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/sv/>\n"
|
||||
"Language: sv\n"
|
||||
@@ -641,8 +641,6 @@ msgstr "ID eller enhet att använda för inköpslistan"
|
||||
#: .\cookbook\serializer.py:1443
|
||||
msgid "When set to true will delete all food from active shopping lists."
|
||||
msgstr ""
|
||||
"Om det här alternativet är aktiverat kommer alla matvaror att raderas från "
|
||||
"de aktiva inköpslistorna."
|
||||
|
||||
#: .\cookbook\tables.py:69 .\cookbook\tables.py:83
|
||||
#: .\cookbook\templates\generic\delete_template.html:7
|
||||
@@ -725,13 +723,10 @@ msgid ""
|
||||
"You currently do not have any e-mail address set up. You should really add "
|
||||
"an e-mail address so you can receive notifications, reset your password, etc."
|
||||
msgstr ""
|
||||
"Just nu har du inga e-post adresser konfigurerade. Du borde verkligen lägga "
|
||||
"till en e-post adress så att du kan får notiser, återställa ditt lösenord, "
|
||||
"mm."
|
||||
|
||||
#: .\cookbook\templates\account\email.html:64
|
||||
msgid "Add E-mail Address"
|
||||
msgstr "Lägg till en e-post adress"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\templates\account\email.html:69
|
||||
msgid "Add E-mail"
|
||||
@@ -739,12 +734,12 @@ msgstr "Lägg till email"
|
||||
|
||||
#: .\cookbook\templates\account\email.html:79
|
||||
msgid "Do you really want to remove the selected e-mail address?"
|
||||
msgstr "Vill du verkligen ta bort den valda e-postadressen?"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\templates\account\email_confirm.html:6
|
||||
#: .\cookbook\templates\account\email_confirm.html:10
|
||||
msgid "Confirm E-mail Address"
|
||||
msgstr "Bekräfta e-postadress"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\templates\account\email_confirm.html:16
|
||||
#, python-format
|
||||
@@ -754,10 +749,6 @@ msgid ""
|
||||
"for user %(user_display)s\n"
|
||||
" ."
|
||||
msgstr ""
|
||||
"Vänligen bekräfra att\n"
|
||||
" <a href=\"mailto:%(email)s\">%(email)s</a> är e-postadressen för "
|
||||
"användaren %(user_display)s\n"
|
||||
" ."
|
||||
|
||||
#: .\cookbook\templates\account\email_confirm.html:22
|
||||
#: .\cookbook\templates\generic\delete_template.html:72
|
||||
@@ -771,9 +762,6 @@ msgid ""
|
||||
" <a href=\"%(email_url)s\">issue a new e-mail confirmation "
|
||||
"request</a>."
|
||||
msgstr ""
|
||||
"Denna e-post bekräftelselänk är utgången eller felaktig. Vänligen\n"
|
||||
" <a href=\"%(email_url)s\">skapa en ny "
|
||||
"e-postbekräftelsebegäran</a>."
|
||||
|
||||
#: .\cookbook\templates\account\login.html:8 .\cookbook\templates\base.html:388
|
||||
#: .\cookbook\templates\openid\login.html:8
|
||||
@@ -797,17 +785,19 @@ msgstr "Logga in"
|
||||
#: .\cookbook\templates\account\password_reset_done.html:33
|
||||
#: .\cookbook\templates\socialaccount\signup.html:8
|
||||
#: .\cookbook\templates\socialaccount\signup.html:57
|
||||
#, fuzzy
|
||||
#| msgid "Sign In"
|
||||
msgid "Sign Up"
|
||||
msgstr "Registrera dig"
|
||||
msgstr "Logga in"
|
||||
|
||||
#: .\cookbook\templates\account\login.html:38
|
||||
msgid "Lost your password?"
|
||||
msgstr "Glömt ditt lösenord?"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\templates\account\login.html:39
|
||||
#: .\cookbook\templates\account\password_reset.html:29
|
||||
msgid "Reset My Password"
|
||||
msgstr "Återställ mitt lösenord"
|
||||
msgstr ""
|
||||
|
||||
#: .\cookbook\templates\account\login.html:50
|
||||
msgid "Social Login"
|
||||
@@ -834,8 +824,10 @@ msgstr "Är du säker på att du vill logga ut?"
|
||||
#: .\cookbook\templates\account\password_reset_from_key.html:13
|
||||
#: .\cookbook\templates\account\password_reset_from_key_done.html:7
|
||||
#: .\cookbook\templates\account\password_reset_from_key_done.html:13
|
||||
#, fuzzy
|
||||
#| msgid "Changes saved!"
|
||||
msgid "Change Password"
|
||||
msgstr "Ändra lösenord"
|
||||
msgstr "Ändringar sparade!"
|
||||
|
||||
#: .\cookbook\templates\account\password_change.html:12
|
||||
#: .\cookbook\templates\account\password_set.html:12
|
||||
@@ -858,20 +850,18 @@ msgid ""
|
||||
"Forgotten your password? Enter your e-mail address below, and we'll send you "
|
||||
"an e-mail allowing you to reset it."
|
||||
msgstr ""
|
||||
"Glömt ditt lösenord? Ange din e-postadress nedanför så skickar vi ett "
|
||||
"återställningmail."
|
||||
|
||||
#: .\cookbook\templates\account\password_reset.html:32
|
||||
#, fuzzy
|
||||
#| msgid "Password reset is not implemented for the time being!"
|
||||
msgid "Password reset is disabled on this instance."
|
||||
msgstr "Återställning av lösenord är avaktiverat på denna instans."
|
||||
msgstr "Återställning av lösenord har ännu inte lagts till!"
|
||||
|
||||
#: .\cookbook\templates\account\password_reset_done.html:25
|
||||
msgid ""
|
||||
"We have sent you an e-mail. Please contact us if you do not receive it "
|
||||
"within a few minutes."
|
||||
msgstr ""
|
||||
"Vi har skickat ett e-postmeddelande till dig. Om du inte har fått det inom "
|
||||
"några minuter, vänligen kontakta oss."
|
||||
|
||||
#: .\cookbook\templates\account\password_reset_from_key.html:13
|
||||
#, fuzzy
|
||||
|
||||
@@ -8,22 +8,22 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-08-02 07:49+0000\n"
|
||||
"Last-Translator: TC Kuo <tckuo7@gmail.com>\n"
|
||||
"Language-Team: Chinese (Traditional Han script) <http://translate.tandoor."
|
||||
"dev/projects/tandoor/recipes-backend/zh_Hant/>\n"
|
||||
"PO-Revision-Date: 2024-11-04 10:29+0000\n"
|
||||
"Last-Translator: Johnny Ip <ip.iohnny@gmail.com>\n"
|
||||
"Language-Team: Chinese (Traditional) <http://translate.tandoor.dev/projects/"
|
||||
"tandoor/recipes-backend/zh_Hant/>\n"
|
||||
"Language: zh_Hant\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=1; plural=0;\n"
|
||||
"X-Generator: Weblate 5.8.4\n"
|
||||
"X-Generator: Weblate 5.6.2\n"
|
||||
|
||||
#: .\cookbook\forms.py:45
|
||||
msgid ""
|
||||
"Both fields are optional. If none are given the username will be displayed "
|
||||
"instead"
|
||||
msgstr "這兩個欄位都是可選的。如果沒有輸入,將顯示用戶名"
|
||||
msgstr "這兩個字段都是可選的。如果沒有輸入,將顯示用戶名"
|
||||
|
||||
#: .\cookbook\forms.py:62 .\cookbook\forms.py:246
|
||||
msgid "Name"
|
||||
@@ -31,7 +31,7 @@ msgstr "名字"
|
||||
|
||||
#: .\cookbook\forms.py:62 .\cookbook\forms.py:246 .\cookbook\views\lists.py:103
|
||||
msgid "Keywords"
|
||||
msgstr "關鍵字"
|
||||
msgstr "關鍵詞"
|
||||
|
||||
#: .\cookbook\forms.py:62
|
||||
msgid "Preparation time in minutes"
|
||||
@@ -51,13 +51,14 @@ msgstr "存儲ID"
|
||||
|
||||
#: .\cookbook\forms.py:93
|
||||
msgid "Default"
|
||||
msgstr "預設"
|
||||
msgstr "默認"
|
||||
|
||||
#: .\cookbook\forms.py:121
|
||||
msgid ""
|
||||
"To prevent duplicates recipes with the same name as existing ones are "
|
||||
"ignored. Check this box to import everything."
|
||||
msgstr "為防止重複,忽略與現有同名的食譜。選中此框可導入所有內容(包括同名食譜)。"
|
||||
msgstr ""
|
||||
"為防止重復,忽略與現有同名的菜譜。選中此框可導入所有內容(包括同名菜譜)。"
|
||||
|
||||
#: .\cookbook\forms.py:143
|
||||
msgid "Add your comment: "
|
||||
@@ -240,7 +241,7 @@ msgstr "你沒有必要的權限來查看這個頁面!"
|
||||
#: .\cookbook\helper\permission_helper.py:237
|
||||
#: .\cookbook\helper\permission_helper.py:252
|
||||
msgid "You cannot interact with this object as it is not owned by you!"
|
||||
msgstr "你不能與此對象互動,因為它不屬於你!"
|
||||
msgstr "你不能與此對象交互,因為它不屬於你!"
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:402
|
||||
msgid "You have reached the maximum number of recipes for your space."
|
||||
@@ -310,16 +311,16 @@ msgstr "在導入過程中發生了一個意外的錯誤。請確認你上傳的
|
||||
|
||||
#: .\cookbook\integration\integration.py:217
|
||||
msgid "The following recipes were ignored because they already existed:"
|
||||
msgstr "以下食譜被忽略了,因為它們已經存在了:"
|
||||
msgstr "以下菜譜被忽略了,因為它們已經存在了:"
|
||||
|
||||
#: .\cookbook\integration\integration.py:221
|
||||
#, python-format
|
||||
msgid "Imported %s recipes."
|
||||
msgstr "導入了%s食譜。"
|
||||
msgstr "導入了%s菜譜。"
|
||||
|
||||
#: .\cookbook\integration\openeats.py:28
|
||||
msgid "Recipe source:"
|
||||
msgstr "食譜來源:"
|
||||
msgstr "菜譜來源:"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:49
|
||||
msgid "Notes"
|
||||
@@ -327,7 +328,7 @@ msgstr "說明"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:52
|
||||
msgid "Nutritional Information"
|
||||
msgstr "營養資訊"
|
||||
msgstr "營養信息"
|
||||
|
||||
#: .\cookbook\integration\paprika.py:56
|
||||
msgid "Source"
|
||||
@@ -644,7 +645,7 @@ msgstr "電子郵件地址"
|
||||
#: .\cookbook\templates\socialaccount\connections.html:10
|
||||
#: .\cookbook\templates\user_settings.html:8
|
||||
msgid "Settings"
|
||||
msgstr "設定"
|
||||
msgstr "設置"
|
||||
|
||||
#: .\cookbook\templates\account\email.html:13
|
||||
msgid "Email"
|
||||
@@ -1877,7 +1878,7 @@ msgstr "你可以被邀請加入現有空間或創建自己的空間。"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:53
|
||||
msgid "Owner"
|
||||
msgstr "擁有者"
|
||||
msgstr "所有者"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:57
|
||||
msgid "Leave Space"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,34 +0,0 @@
|
||||
# Generated by Django 4.2.22 on 2025-08-31 09:11
|
||||
|
||||
from django.db import migrations
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
|
||||
def migrate_comments(apps, schema_editor):
|
||||
with scopes_disabled():
|
||||
Comment = apps.get_model('cookbook', 'Comment')
|
||||
CookLog = apps.get_model('cookbook', 'CookLog')
|
||||
|
||||
cook_logs = []
|
||||
|
||||
for c in Comment.objects.all():
|
||||
cook_logs.append(CookLog(
|
||||
recipe=c.recipe,
|
||||
created_by=c.created_by,
|
||||
created_at=c.created_at,
|
||||
comment=c.text,
|
||||
space=c.recipe.space,
|
||||
))
|
||||
|
||||
CookLog.objects.bulk_create(cook_logs, unique_fields=('recipe', 'comment', 'created_at', 'created_by'))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0222_alter_shoppinglistrecipe_created_by_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_comments),
|
||||
]
|
||||
@@ -1,60 +0,0 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-05 06:51
|
||||
|
||||
import cookbook.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('cookbook', '0223_auto_20250831_1111'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_credits_balance',
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_credits_monthly',
|
||||
field=models.IntegerField(default=100),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AiProvider',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=128)),
|
||||
('description', models.TextField(blank=True)),
|
||||
('api_key', models.CharField(max_length=2048)),
|
||||
('model_name', models.CharField(max_length=256)),
|
||||
('url', models.CharField(blank=True, max_length=2048, null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('space', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AiLog',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('function', models.CharField(max_length=64)),
|
||||
('credit_cost', models.DecimalField(decimal_places=4, max_digits=16)),
|
||||
('credits_from_balance', models.BooleanField(default=False)),
|
||||
('input_tokens', models.IntegerField(default=0)),
|
||||
('output_tokens', models.IntegerField(default=0)),
|
||||
('start_time', models.DateTimeField(null=True)),
|
||||
('end_time', models.DateTimeField(null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('ai_provider', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cookbook.aiprovider')),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
|
||||
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.space')),
|
||||
],
|
||||
bases=(models.Model, cookbook.models.PermissionModelMixin),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-08 19:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0224_space_ai_credits_balance_space_ai_credits_monthly_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_enabled',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-08 20:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0225_space_ai_enabled'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='aiprovider',
|
||||
name='log_credit_cost',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='space',
|
||||
name='ai_credits_monthly',
|
||||
field=models.IntegerField(default=10000),
|
||||
),
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
# Generated by Django 4.2.22 on 2025-09-09 11:40
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0226_aiprovider_log_credit_cost_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='ai_default_provider',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='space_ai_default_provider', to='cookbook.aiprovider'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='space',
|
||||
name='ai_credits_balance',
|
||||
field=models.DecimalField(decimal_places=4, default=0, max_digits=16),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-10 20:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('cookbook', '0001_squashed_0227_space_ai_default_provider_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='space',
|
||||
name='space_setup_completed',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -329,13 +329,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
demo = models.BooleanField(default=False)
|
||||
food_inherit = models.ManyToManyField(FoodInheritField, blank=True)
|
||||
|
||||
space_setup_completed = models.BooleanField(default=True)
|
||||
|
||||
ai_enabled = models.BooleanField(default=True)
|
||||
ai_credits_monthly = models.IntegerField(default=100)
|
||||
ai_credits_balance = models.DecimalField(default=0, max_digits=16, decimal_places=4)
|
||||
ai_default_provider = models.ForeignKey("AiProvider", on_delete=models.SET_NULL, null=True, blank=True, related_name='space_ai_default_provider')
|
||||
|
||||
internal_note = models.TextField(blank=True, null=True)
|
||||
|
||||
def safe_delete(self):
|
||||
@@ -348,9 +341,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
BookmarkletImport.objects.filter(space=self).delete()
|
||||
CustomFilter.objects.filter(space=self).delete()
|
||||
|
||||
AiLog.objects.filter(space=self).delete()
|
||||
AiProvider.objects.filter(space=self).delete()
|
||||
|
||||
Property.objects.filter(space=self).delete()
|
||||
PropertyType.objects.filter(space=self).delete()
|
||||
|
||||
@@ -403,41 +393,6 @@ class Space(ExportModelOperationsMixin('space'), models.Model):
|
||||
return self.name
|
||||
|
||||
|
||||
class AiProvider(models.Model):
|
||||
name = models.CharField(max_length=128)
|
||||
description = models.TextField(blank=True)
|
||||
# AiProviders can be global, so space=null is allowed (configurable by superusers)
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE, null=True)
|
||||
|
||||
api_key = models.CharField(max_length=2048)
|
||||
model_name = models.CharField(max_length=256)
|
||||
url = models.CharField(max_length=2048, blank=True, null=True)
|
||||
log_credit_cost = models.BooleanField(default=True)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
|
||||
class AiLog(models.Model, PermissionModelMixin):
|
||||
F_FILE_IMPORT = 'FILE_IMPORT'
|
||||
|
||||
ai_provider = models.ForeignKey(AiProvider, on_delete=models.SET_NULL, null=True)
|
||||
function = models.CharField(max_length=64)
|
||||
credit_cost = models.DecimalField(max_digits=16, decimal_places=4)
|
||||
# if credits from balance were used, else its from monthly quota
|
||||
credits_from_balance = models.BooleanField(default=False)
|
||||
|
||||
input_tokens = models.IntegerField(default=0)
|
||||
output_tokens = models.IntegerField(default=0)
|
||||
start_time = models.DateTimeField(null=True)
|
||||
end_time = models.DateTimeField(null=True)
|
||||
|
||||
space = models.ForeignKey(Space, on_delete=models.CASCADE)
|
||||
created_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
|
||||
class ConnectorConfig(models.Model, PermissionModelMixin):
|
||||
HOMEASSISTANT = 'HomeAssistant'
|
||||
CONNECTER_TYPE = ((HOMEASSISTANT, 'HomeAssistant'),)
|
||||
|
||||
@@ -24,9 +24,8 @@ from rest_framework.fields import IntegerField
|
||||
|
||||
from cookbook.helper.CustomStorageClass import CachedS3Boto3Storage
|
||||
from cookbook.helper.HelperFunctions import str2bool
|
||||
from cookbook.helper.ai_helper import get_monthly_token_usage
|
||||
from cookbook.helper.image_processing import is_file_type_allowed
|
||||
from cookbook.helper.permission_helper import above_space_limit, create_space_for_user
|
||||
from cookbook.helper.permission_helper import above_space_limit
|
||||
from cookbook.helper.property_helper import FoodPropertyHelper
|
||||
from cookbook.helper.shopping_helper import RecipeShoppingEditor
|
||||
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
|
||||
@@ -37,7 +36,7 @@ from cookbook.models import (Automation, BookmarkletImport, Comment, CookLog, Cu
|
||||
ShareLink, ShoppingListEntry, ShoppingListRecipe, Space,
|
||||
Step, Storage, Supermarket, SupermarketCategory,
|
||||
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
|
||||
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields, AiLog, AiProvider)
|
||||
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig, SearchPreference, SearchFields)
|
||||
from cookbook.templatetags.custom_tags import markdown
|
||||
from recipes.settings import AWS_ENABLED, MEDIA_URL, EMAIL_HOST
|
||||
|
||||
@@ -326,53 +325,12 @@ class UserFileViewSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = ('id', 'file', 'file_download', 'file_size_kb', 'preview', 'created_by', 'created_at')
|
||||
|
||||
|
||||
class AiProviderSerializer(serializers.ModelSerializer):
|
||||
api_key = serializers.CharField(required=False, write_only=True)
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data = self.handle_global_space_logic(validated_data)
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data = self.handle_global_space_logic(validated_data)
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def handle_global_space_logic(self, validated_data):
|
||||
"""
|
||||
allow superusers to create AI providers without a space but make sure everyone else only uses their own space
|
||||
"""
|
||||
if ('space' not in validated_data or not validated_data['space']) and self.context['request'].user.is_superuser:
|
||||
validated_data['space'] = None
|
||||
else:
|
||||
validated_data['space'] = self.context['request'].space
|
||||
|
||||
return validated_data
|
||||
|
||||
class Meta:
|
||||
model = AiProvider
|
||||
fields = ('id', 'name', 'description', 'api_key', 'model_name', 'url', 'log_credit_cost', 'space', 'created_at', 'updated_at')
|
||||
read_only_fields = ('created_at', 'updated_at',)
|
||||
|
||||
|
||||
class AiLogSerializer(serializers.ModelSerializer):
|
||||
ai_provider = AiProviderSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AiLog
|
||||
fields = ('id', 'ai_provider', 'function', 'credit_cost', 'credits_from_balance', 'input_tokens', 'output_tokens', 'start_time', 'end_time', 'created_by', 'created_at',
|
||||
'updated_at')
|
||||
read_only_fields = ('__all__',)
|
||||
|
||||
|
||||
class SpaceSerializer(WritableNestedModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
user_count = serializers.SerializerMethodField('get_user_count', read_only=True)
|
||||
recipe_count = serializers.SerializerMethodField('get_recipe_count', read_only=True)
|
||||
file_size_mb = serializers.SerializerMethodField('get_file_size_mb', read_only=True)
|
||||
ai_monthly_credits_used = serializers.SerializerMethodField('get_ai_monthly_credits_used', read_only=True)
|
||||
ai_default_provider = AiProviderSerializer(required=False, allow_null=True)
|
||||
food_inherit = FoodInheritFieldSerializer(many=True, required=False)
|
||||
user_count = serializers.SerializerMethodField('get_user_count')
|
||||
recipe_count = serializers.SerializerMethodField('get_recipe_count')
|
||||
file_size_mb = serializers.SerializerMethodField('get_file_size_mb')
|
||||
food_inherit = FoodInheritFieldSerializer(many=True)
|
||||
image = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
nav_logo = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
custom_space_theme = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
@@ -392,10 +350,6 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
def get_recipe_count(self, obj):
|
||||
return Recipe.objects.filter(space=obj).count()
|
||||
|
||||
@extend_schema_field(int)
|
||||
def get_ai_monthly_credits_used(self, obj):
|
||||
return get_monthly_token_usage(obj)
|
||||
|
||||
@extend_schema_field(float)
|
||||
def get_file_size_mb(self, obj):
|
||||
try:
|
||||
@@ -404,36 +358,7 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
return 0
|
||||
|
||||
def create(self, validated_data):
|
||||
if Space.objects.filter(created_by=self.context['request'].user).count() >= self.context['request'].user.userpreference.max_owned_spaces:
|
||||
raise serializers.ValidationError(
|
||||
_('You have the reached the maximum amount of spaces that can be owned by you.') + f' ({self.context['request'].user.userpreference.max_owned_spaces})')
|
||||
|
||||
name = None
|
||||
if 'name' in validated_data:
|
||||
name = validated_data['name']
|
||||
user_space = create_space_for_user(self.context['request'].user, name)
|
||||
return user_space.space
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
validated_data = self.filter_superuser_parameters(validated_data)
|
||||
|
||||
if 'name' in validated_data:
|
||||
if Space.objects.filter(Q(name=validated_data['name']), ~Q(pk=instance.pk)).exists():
|
||||
raise ValidationError(_('Space Name must be unique.'))
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
def filter_superuser_parameters(self, validated_data):
|
||||
if 'ai_enabled' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_enabled']
|
||||
|
||||
if 'ai_credits_monthly' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_credits_monthly']
|
||||
|
||||
if 'ai_credits_balance' in validated_data and not self.context['request'].user.is_superuser:
|
||||
del validated_data['ai_credits_balance']
|
||||
|
||||
return validated_data
|
||||
raise ValidationError('Cannot create using this endpoint')
|
||||
|
||||
class Meta:
|
||||
model = Space
|
||||
@@ -441,11 +366,10 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
'id', 'name', 'created_by', 'created_at', 'message', 'max_recipes', 'max_file_storage_mb', 'max_users',
|
||||
'allow_sharing', 'demo', 'food_inherit', 'user_count', 'recipe_count', 'file_size_mb',
|
||||
'image', 'nav_logo', 'space_theme', 'custom_space_theme', 'nav_bg_color', 'nav_text_color',
|
||||
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg', 'ai_credits_monthly',
|
||||
'ai_credits_balance', 'ai_monthly_credits_used', 'ai_enabled', 'ai_default_provider', 'space_setup_completed')
|
||||
'logo_color_32', 'logo_color_128', 'logo_color_144', 'logo_color_180', 'logo_color_192', 'logo_color_512', 'logo_color_svg',)
|
||||
read_only_fields = (
|
||||
'id', 'created_by', 'created_at', 'max_recipes', 'max_file_storage_mb', 'max_users', 'allow_sharing',
|
||||
'demo', 'ai_monthly_credits_used')
|
||||
'demo',)
|
||||
|
||||
|
||||
class UserSpaceSerializer(WritableNestedModelSerializer):
|
||||
@@ -668,7 +592,7 @@ class KeywordSerializer(UniqueFieldsMixin, ExtendedRecipeMixin):
|
||||
fields = (
|
||||
'id', 'name', 'label', 'description', 'image', 'parent', 'numchild', 'numrecipe', 'created_at',
|
||||
'updated_at', 'full_name')
|
||||
read_only_fields = ('id', 'label', 'numchild', 'numrecipe', 'parent', 'image')
|
||||
read_only_fields = ('id', 'label', 'numchild', 'parent', 'image')
|
||||
|
||||
|
||||
class UnitSerializer(UniqueFieldsMixin, ExtendedRecipeMixin, OpenDataModelMixin):
|
||||
@@ -863,7 +787,7 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
|
||||
if plural_name := validated_data.pop('plural_name', None):
|
||||
plural_name = plural_name.strip()
|
||||
|
||||
if food := Food.objects.filter(Q(name__iexact=name) | Q(plural_name__iexact=name)).first():
|
||||
if food := Food.objects.filter(Q(name=name) | Q(plural_name=name)).first():
|
||||
return food
|
||||
|
||||
space = validated_data.pop('space', self.context['request'].space)
|
||||
@@ -1114,7 +1038,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
fields = (
|
||||
'id', 'name', 'description', 'image', 'keywords', 'working_time',
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
'internal', 'private', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
|
||||
'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
|
||||
)
|
||||
# TODO having these readonly fields makes "RecipeOverview.ts" (API Client) not generate the RecipeOverviewToJSON second else block which leads to errors when using the api
|
||||
# TODO find a solution (custom schema?) to have these fields readonly (to save performance) and generate a proper client (two serializers would probably do the trick)
|
||||
@@ -1188,57 +1112,6 @@ class RecipeImportSerializer(SpacedModelSerializer):
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
class RecipeBatchUpdateSerializer(serializers.Serializer):
|
||||
recipes = serializers.ListField(child=serializers.IntegerField())
|
||||
keywords_add = serializers.ListField(child=serializers.IntegerField())
|
||||
keywords_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
keywords_set = serializers.ListField(child=serializers.IntegerField())
|
||||
keywords_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
working_time = serializers.IntegerField(required=False, allow_null=True)
|
||||
waiting_time = serializers.IntegerField(required=False, allow_null=True)
|
||||
servings = serializers.IntegerField(required=False, allow_null=True)
|
||||
servings_text = serializers.CharField(required=False, allow_null=True, allow_blank=True)
|
||||
|
||||
private = serializers.BooleanField(required=False, allow_null=True)
|
||||
shared_add = serializers.ListField(child=serializers.IntegerField())
|
||||
shared_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
shared_set = serializers.ListField(child=serializers.IntegerField())
|
||||
shared_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
show_ingredient_overview = serializers.BooleanField(required=False, allow_null=True)
|
||||
clear_description = serializers.BooleanField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class FoodBatchUpdateSerializer(serializers.Serializer):
|
||||
foods = serializers.ListField(child=serializers.IntegerField())
|
||||
|
||||
category = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
substitute_add = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_set = serializers.ListField(child=serializers.IntegerField())
|
||||
substitute_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
|
||||
inherit_fields_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
child_inherit_fields_add = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_remove = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_set = serializers.ListField(child=serializers.IntegerField())
|
||||
child_inherit_fields_remove_all = serializers.BooleanField(default=False)
|
||||
|
||||
substitute_children = serializers.BooleanField(required=False, allow_null=True)
|
||||
substitute_siblings = serializers.BooleanField(required=False, allow_null=True)
|
||||
ignore_shopping = serializers.BooleanField(required=False, allow_null=True)
|
||||
on_hand = serializers.BooleanField(required=False, allow_null=True)
|
||||
|
||||
parent_remove = serializers.BooleanField(required=False, allow_null=True)
|
||||
parent_set = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class CustomFilterSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
shared = UserSerializer(many=True, required=False)
|
||||
|
||||
@@ -1350,8 +1223,8 @@ class MealPlanSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
|
||||
|
||||
class AutoMealPlanSerializer(serializers.Serializer):
|
||||
start_date = serializers.DateTimeField()
|
||||
end_date = serializers.DateTimeField()
|
||||
start_date = serializers.DateField()
|
||||
end_date = serializers.DateField()
|
||||
meal_type_id = serializers.IntegerField()
|
||||
keyword_ids = serializers.ListField()
|
||||
servings = CustomDecimalField()
|
||||
@@ -1607,7 +1480,7 @@ class InviteLinkSerializer(WritableNestedModelSerializer):
|
||||
fields = (
|
||||
'id', 'uuid', 'email', 'group', 'valid_until', 'used_by', 'reusable', 'internal_note', 'created_by',
|
||||
'created_at',)
|
||||
read_only_fields = ('id', 'uuid', 'used_by', 'created_by', 'created_at',)
|
||||
read_only_fields = ('id', 'uuid', 'created_by', 'created_at',)
|
||||
|
||||
|
||||
# CORS, REST and Scopes aren't currently working
|
||||
@@ -1669,6 +1542,7 @@ class ServerSettingsSerializer(serializers.Serializer):
|
||||
# TODO add all other relevant settings including path/url related ones?
|
||||
shopping_min_autosync_interval = serializers.CharField()
|
||||
enable_pdf_export = serializers.BooleanField()
|
||||
enable_ai_import = serializers.BooleanField()
|
||||
disable_external_connectors = serializers.BooleanField()
|
||||
terms_url = serializers.CharField()
|
||||
privacy_url = serializers.CharField()
|
||||
@@ -1892,10 +1766,8 @@ class RecipeFromSourceResponseSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class AiImportSerializer(serializers.Serializer):
|
||||
ai_provider_id = serializers.IntegerField()
|
||||
file = serializers.FileField(allow_null=True)
|
||||
text = serializers.CharField(allow_null=True, allow_blank=True)
|
||||
recipe_id = serializers.CharField(allow_null=True, allow_blank=True)
|
||||
|
||||
|
||||
class ExportRequestSerializer(serializers.Serializer):
|
||||
|
||||
@@ -41,6 +41,15 @@
|
||||
<script src="{% static 'js/popper.min.js' %}"></script>
|
||||
<script src="{% static 'js/bootstrap.min.js' %}"></script>
|
||||
|
||||
<!-- Select2 for use with django autocomplete light -->
|
||||
<link href="{% static 'css/select2.min.css' %}" rel="stylesheet"/>
|
||||
<script src="{% static 'js/select2.min.js' %}"></script>
|
||||
|
||||
<!-- Bootstrap theme for select2 -->
|
||||
<link rel="stylesheet" href="{% static 'css/select2-bootstrap.css' %}"/>
|
||||
|
||||
<link rel="stylesheet" href="{% static 'themes/select2-bootstrap-theme.css' %}"/>
|
||||
|
||||
<!-- Fontawesome icons -->
|
||||
<link rel="stylesheet" href="{% static "fontawesome/fontawesome_all.min.css" %}">
|
||||
|
||||
|
||||
@@ -37,20 +37,8 @@
|
||||
<link id="id_custom_space_css" href="{{ theme_values.custom_theme }}" rel="stylesheet">
|
||||
{% endif %}
|
||||
|
||||
<style>
|
||||
{% if request.user.userpreference.theme == 'TANDOOR_DARK' %}
|
||||
/* vueform/multiselect */
|
||||
/* when append to body is true the multiselects dropdown does not recognize the .v-theme--dark condition and renders a white background otherwise */
|
||||
|
||||
.multiselect-dropdown, .multiselect-options, .multiselect-option {
|
||||
background: #212121 !important;
|
||||
}
|
||||
{% endif %}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="app"></div>
|
||||
|
||||
{% vite_hmr_client %}
|
||||
@@ -60,16 +48,16 @@
|
||||
localStorage.setItem('BASE_PATH', "{% base_path request 'base' %}")
|
||||
|
||||
|
||||
window.addEventListener("load", () => {
|
||||
if ("serviceWorker" in navigator) {
|
||||
navigator.serviceWorker.register("{% url 'service_worker' %}", {scope: "{% base_path request 'base' %}" + '/'}).then(function (reg) {
|
||||
}).catch(function (err) {
|
||||
console.warn('Error whilst registering service worker', err);
|
||||
});
|
||||
} else {
|
||||
console.warn('service worker not in navigator');
|
||||
}
|
||||
});
|
||||
{#window.addEventListener("load", () => {#}
|
||||
{# if ("serviceWorker" in navigator) {#}
|
||||
{# navigator.serviceWorker.register("{% url 'service_worker' %}", {scope: "{% base_path request 'base' %}" + '/'}).then(function (reg) {#}
|
||||
{# }).catch(function (err) {#}
|
||||
{# console.warn('Error whilst registering service worker', err);#}
|
||||
{# });#}
|
||||
{# } else {#}
|
||||
{# console.warn('service worker not in navigator');#}
|
||||
{# }#}
|
||||
{#});#}
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -51,6 +51,11 @@
|
||||
{# {% endif %}#}
|
||||
<p class="card-text"><small
|
||||
class="text-muted">{% trans 'Owner' %}: {{ us.space.created_by }}</small>
|
||||
{% if us.space.created_by != us.user %}
|
||||
<p class="card-text"><small
|
||||
class="text-muted"><a
|
||||
href="{% url 'delete_user_space' us.pk %}">{% trans 'Leave Space' %}</a></small>
|
||||
{% endif %}
|
||||
<!--TODO add direct link to management page -->
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
<div class="container">
|
||||
|
||||
|
||||
<h1>{% trans 'System' %}</h1>
|
||||
<h1 >{% trans 'System' %}</h1>
|
||||
{% blocktrans %}
|
||||
Tandoor Recipes is an open source free software application. It can be found on
|
||||
<a href="https://github.com/TandoorRecipes/recipes">GitHub</a>.
|
||||
@@ -53,17 +53,6 @@
|
||||
{% endblocktrans %}
|
||||
{% endif %}
|
||||
|
||||
<h3 class="mt-5">{% trans 'Plugins' %}</h3>
|
||||
Clone the plugin using git into the <code>recipe/plugin/</code> folder (Docker mount <code>/opt/recipe/recipes/plugins</code> to the host system and clone into it).
|
||||
<table class="table table-bordered">
|
||||
{% for p in plugins %}
|
||||
<tr>
|
||||
<td><a href="{{ p.github }}">{{ p.name }}</a> <br/>{{ p.base_path }}</td>
|
||||
<td><a href="{% url 'view_plugin_update' %}?module={{ p.module }}" class="btn btn-primary">Git Pull</a></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
<h4 class="mt-3">{% trans 'Media Serving' %} <span class="badge text-bg-{% if gunicorn_media %}danger{% else %}success{% endif %}">{% if gunicorn_media %}
|
||||
{% trans 'Warning' %}{% else %}{% trans 'Ok' %}{% endif %}</span></h4>
|
||||
{% if gunicorn_media %}
|
||||
@@ -224,14 +213,6 @@
|
||||
{% endfor %}
|
||||
</table>
|
||||
{% endif %}
|
||||
<h4 class="mt-3">Cache Test</h4>
|
||||
On first load this should be None, on second load it should be the time of the first load. Expiration is set to 10 seconds after that it should be None again. <br/>
|
||||
{% if cache_response %}
|
||||
|
||||
<span class="badge text-bg-success">Cache entry from {{ cache_response|date:" d m Y H:i:s" }}</span>
|
||||
{% else %}
|
||||
<span class="badge text-bg-info">No cache entry before load</span>
|
||||
{% endif %}
|
||||
<h4 class="mt-3">Debug</h4>
|
||||
<textarea class="form-control" rows="20">
|
||||
Gunicorn Media: {{ gunicorn_media }}
|
||||
|
||||
@@ -25,6 +25,10 @@ def get_theming_values(request):
|
||||
space = Space.objects.filter(id=FORCE_THEME_FROM_SPACE).first()
|
||||
|
||||
themes = {
|
||||
UserPreference.BOOTSTRAP: 'themes/bootstrap.min.css',
|
||||
UserPreference.FLATLY: 'themes/flatly.min.css',
|
||||
UserPreference.DARKLY: 'themes/darkly.min.css',
|
||||
UserPreference.SUPERHERO: 'themes/superhero.min.css',
|
||||
UserPreference.TANDOOR: 'themes/tandoor.min.css',
|
||||
UserPreference.TANDOOR_DARK: 'themes/tandoor_dark.min.css',
|
||||
}
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.models import MealType, PropertyType, AiProvider
|
||||
|
||||
LIST_URL = 'api:aiprovider-list'
|
||||
DETAIL_URL = 'api:aiprovider-detail'
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def obj_1(space_1, a1_s1):
|
||||
return AiProvider.objects.get_or_create(name='test_1', space=space_1)[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def obj_2(space_1, a1_s1):
|
||||
return AiProvider.objects.get_or_create(name='test_2', space=None)[0]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 200],
|
||||
['a1_s1', 200],
|
||||
])
|
||||
def test_list_permission(arg, request):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
assert c.get(reverse(LIST_URL)).status_code == arg[1]
|
||||
|
||||
|
||||
def test_list_space(obj_1, obj_2, u1_s1, u1_s2, space_2):
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 1
|
||||
|
||||
obj_1.space = space_2
|
||||
obj_1.save()
|
||||
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 1
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
|
||||
obj_1.space = None
|
||||
obj_1.save()
|
||||
|
||||
assert json.loads(u1_s1.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
assert json.loads(u1_s2.get(reverse(LIST_URL)).content)['count'] == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 200],
|
||||
['g1_s2', 403],
|
||||
['u1_s2', 403],
|
||||
['a1_s2', 404],
|
||||
])
|
||||
def test_update(arg, request, obj_1):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.patch(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
),
|
||||
{'name': 'new'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 200:
|
||||
assert response['name'] == 'new'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 403],
|
||||
['g1_s2', 403],
|
||||
['u1_s2', 403],
|
||||
['a1_s2', 403],
|
||||
['s1_s1', 200],
|
||||
])
|
||||
def test_update_global(arg, request, obj_2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.patch(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
),
|
||||
{'name': 'new'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 200:
|
||||
assert response['name'] == 'new'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 201],
|
||||
])
|
||||
def test_add(arg, request, u1_s2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
r = c.post(
|
||||
reverse(LIST_URL),
|
||||
{'name': 'test', 'api_key': 'test', 'model_name': 'test'},
|
||||
content_type='application/json'
|
||||
)
|
||||
response = json.loads(r.content)
|
||||
assert r.status_code == arg[1]
|
||||
if r.status_code == 201:
|
||||
assert response['name'] == 'test'
|
||||
r = c.get(reverse(DETAIL_URL, args={response['id']}))
|
||||
assert r.status_code == 200
|
||||
r = u1_s2.get(reverse(DETAIL_URL, args={response['id']}))
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
def test_delete(a1_s1, a1_s2, obj_1):
|
||||
# admins cannot delete foreign space providers
|
||||
r = a1_s2.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 404
|
||||
|
||||
# admins can delete their space providers
|
||||
r = a1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_1.id}
|
||||
)
|
||||
)
|
||||
|
||||
assert r.status_code == 204
|
||||
with scopes_disabled():
|
||||
assert AiProvider.objects.count() == 0
|
||||
|
||||
|
||||
def test_delete_global(a1_s1, s1_s1, obj_2):
|
||||
# admins cant delete global providers
|
||||
r = a1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
)
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
# superusers can delete global providers
|
||||
r = s1_s1.delete(
|
||||
reverse(
|
||||
DETAIL_URL,
|
||||
args={obj_2.id}
|
||||
)
|
||||
)
|
||||
|
||||
assert r.status_code == 204
|
||||
with scopes_disabled():
|
||||
assert AiProvider.objects.count() == 0
|
||||
@@ -7,7 +7,6 @@ from django.urls import reverse
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.models import UserSpace
|
||||
from recipes import settings
|
||||
|
||||
LIST_URL = 'api:space-list'
|
||||
DETAIL_URL = 'api:space-detail'
|
||||
@@ -46,6 +45,7 @@ def test_list_multiple(u1_s1, space_1, space_2):
|
||||
assert u1_response['id'] == space_1.id
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
@@ -70,9 +70,9 @@ def test_update(arg, request, space_1, a1_s1):
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 201],
|
||||
['u1_s1', 201],
|
||||
['a1_s1', 201],
|
||||
['g1_s1', 403],
|
||||
['u1_s1', 403],
|
||||
['a1_s1', 405],
|
||||
])
|
||||
def test_add(arg, request, u1_s2):
|
||||
c = request.getfixturevalue(arg[0])
|
||||
@@ -90,59 +90,3 @@ def test_delete(u1_s1, u1_s2, a1_s1, space_1):
|
||||
# event the space owner cannot delete his space over the api (this might change later but for now it's only available in the UI)
|
||||
r = a1_s1.delete(reverse(DETAIL_URL, args={space_1.id}))
|
||||
assert r.status_code == 405
|
||||
|
||||
|
||||
def test_superuser_parameters(space_1, a1_s1, s1_s1):
|
||||
# ------- test as normal user -------
|
||||
response = a1_s1.post(reverse(LIST_URL), {'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 201
|
||||
response = json.loads(response.content)
|
||||
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
|
||||
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
|
||||
assert response['ai_credits_balance'] == 0
|
||||
|
||||
space_1.created_by = auth.get_user(a1_s1)
|
||||
space_1.ai_enabled = False
|
||||
space_1.ai_credits_monthly = 0
|
||||
space_1.ai_credits_balance = 0
|
||||
space_1.save()
|
||||
|
||||
response = a1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
space_1.refresh_from_db()
|
||||
assert space_1.ai_enabled == False
|
||||
assert space_1.ai_credits_monthly == 0
|
||||
assert space_1.ai_credits_balance == 0
|
||||
|
||||
# ------- test as superuser -------
|
||||
|
||||
response = s1_s1.post(reverse(LIST_URL),
|
||||
{'name': 'test', 'ai_enabled': not settings.SPACE_AI_ENABLED, 'ai_credits_monthly': settings.SPACE_AI_CREDITS_MONTHLY + 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 201
|
||||
response = json.loads(response.content)
|
||||
assert response['ai_enabled'] == settings.SPACE_AI_ENABLED
|
||||
assert response['ai_credits_monthly'] == settings.SPACE_AI_CREDITS_MONTHLY
|
||||
assert response['ai_credits_balance'] == 0
|
||||
|
||||
space_1.created_by = auth.get_user(s1_s1)
|
||||
space_1.ai_enabled = False
|
||||
space_1.ai_credits_monthly = 0
|
||||
space_1.ai_credits_balance = 0
|
||||
space_1.save()
|
||||
|
||||
response = s1_s1.patch(reverse(DETAIL_URL, args={space_1.id}), {'ai_enabled': True, 'ai_credits_monthly': 100, 'ai_credits_balance': 100},
|
||||
content_type='application/json')
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
space_1.refresh_from_db()
|
||||
assert space_1.ai_enabled == True
|
||||
assert space_1.ai_credits_monthly == 100
|
||||
assert space_1.ai_credits_balance == 100
|
||||
|
||||
@@ -5,8 +5,6 @@ from django.contrib import auth
|
||||
from django.urls import reverse
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from cookbook.models import UserSpace
|
||||
|
||||
LIST_URL = 'api:userspace-list'
|
||||
DETAIL_URL = 'api:userspace-detail'
|
||||
|
||||
@@ -15,10 +13,10 @@ DETAIL_URL = 'api:userspace-detail'
|
||||
['a_u', 403, 0],
|
||||
['g1_s1', 200, 1], # sees only own user space
|
||||
['u1_s1', 200, 1],
|
||||
['a1_s1', 200, 4], # admins can see all other members
|
||||
['a2_s1', 200, 4],
|
||||
['a1_s1', 200, 3], # sees user space of all users in space
|
||||
['a2_s1', 200, 1],
|
||||
])
|
||||
def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1, a2_s1):
|
||||
def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1):
|
||||
space_1.created_by = auth.get_user(a1_s1)
|
||||
space_1.save()
|
||||
|
||||
@@ -29,18 +27,6 @@ def test_list_permission(arg, request, space_1, g1_s1, u1_s1, a1_s1, a2_s1):
|
||||
assert len(json.loads(result.content)['results']) == arg[2]
|
||||
|
||||
|
||||
def test_list_all_personal(space_2, u1_s1):
|
||||
result = u1_s1.get(reverse('api:userspace-all-personal'))
|
||||
assert result.status_code == 200
|
||||
assert len(json.loads(result.content)) == 1
|
||||
|
||||
UserSpace.objects.create(user=auth.get_user(u1_s1), space=space_2)
|
||||
|
||||
result = u1_s1.get(reverse('api:userspace-all-personal'))
|
||||
assert result.status_code == 200
|
||||
assert len(json.loads(result.content)) == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
['a_u', 403],
|
||||
['g1_s1', 403],
|
||||
|
||||
@@ -298,11 +298,3 @@ def a1_s2(client, space_2):
|
||||
@pytest.fixture()
|
||||
def a2_s2(client, space_2):
|
||||
return create_user(client, space_2, group='admin')
|
||||
|
||||
@pytest.fixture()
|
||||
def s1_s1(client, space_1):
|
||||
client = create_user(client, space_1, group='admin')
|
||||
user = auth.get_user(client)
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
return client
|
||||
|
||||
1
cookbook/tests/other/docs/reports/tests/pytest.xml
Normal file
1
cookbook/tests/other/docs/reports/tests/pytest.xml
Normal file
@@ -0,0 +1 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="2" time="38.353" timestamp="2025-03-31T09:44:57.025358" hostname="vabene-pc"><testcase classname="cookbook.tests.other.test_recipe_full_text_search" name="test_search_count[found_recipe0-rating]" time="29.368" /><testcase classname="cookbook.tests.other.test_recipe_full_text_search" name="test_search_count[found_recipe1-timescooked]" time="29.371" /></testsuite></testsuites>
|
||||
770
cookbook/tests/other/docs/reports/tests/tests.html
Normal file
770
cookbook/tests/other/docs/reports/tests/tests.html
Normal file
@@ -0,0 +1,770 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<title id="head-title">tests.html</title>
|
||||
<link href="assets\style.css" rel="stylesheet" type="text/css"/>
|
||||
</head>
|
||||
<body>
|
||||
<h1 id="title">tests.html</h1>
|
||||
<p>Report generated on 31-Mar-2025 at 09:45:35 by <a href="https://pypi.python.org/pypi/pytest-html">pytest-html</a>
|
||||
v4.1.1</p>
|
||||
<div id="environment-header">
|
||||
<h2>Environment</h2>
|
||||
</div>
|
||||
<table id="environment"></table>
|
||||
<!-- TEMPLATES -->
|
||||
<template id="template_environment_row">
|
||||
<tr>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</template>
|
||||
<template id="template_results-table__body--empty">
|
||||
<tbody class="results-table-row">
|
||||
<tr id="not-found-message">
|
||||
<td colspan="4">No results found. Check the filters.</th>
|
||||
</tr>
|
||||
</template>
|
||||
<template id="template_results-table__tbody">
|
||||
<tbody class="results-table-row">
|
||||
<tr class="collapsible">
|
||||
</tr>
|
||||
<tr class="extras-row">
|
||||
<td class="extra" colspan="4">
|
||||
<div class="extraHTML"></div>
|
||||
<div class="media">
|
||||
<div class="media-container">
|
||||
<div class="media-container__nav--left"><</div>
|
||||
<div class="media-container__viewport">
|
||||
<img src="" />
|
||||
<video controls>
|
||||
<source src="" type="video/mp4">
|
||||
</video>
|
||||
</div>
|
||||
<div class="media-container__nav--right">></div>
|
||||
</div>
|
||||
<div class="media__name"></div>
|
||||
<div class="media__counter"></div>
|
||||
</div>
|
||||
<div class="logwrapper">
|
||||
<div class="logexpander"></div>
|
||||
<div class="log"></div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</template>
|
||||
<!-- END TEMPLATES -->
|
||||
<div class="summary">
|
||||
<div class="summary__data">
|
||||
<h2>Summary</h2>
|
||||
<div class="additional-summary prefix">
|
||||
</div>
|
||||
<p class="run-count">2 tests took 00:00:59.</p>
|
||||
<p class="filter">(Un)check the boxes to filter the results.</p>
|
||||
<div class="summary__reload">
|
||||
<div class="summary__reload__button hidden" onclick="location.reload()">
|
||||
<div>There are still tests running. <br />Reload this page to get the latest results!</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="summary__spacer"></div>
|
||||
<div class="controls">
|
||||
<div class="filters">
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="failed" disabled/>
|
||||
<span class="failed">0 Failed,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="passed" />
|
||||
<span class="passed">2 Passed,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="skipped" disabled/>
|
||||
<span class="skipped">0 Skipped,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="xfailed" disabled/>
|
||||
<span class="xfailed">0 Expected failures,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="xpassed" disabled/>
|
||||
<span class="xpassed">0 Unexpected passes,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="error" disabled/>
|
||||
<span class="error">0 Errors,</span>
|
||||
<input checked="true" class="filter" name="filter_checkbox" type="checkbox" data-test-result="rerun" disabled/>
|
||||
<span class="rerun">0 Reruns</span>
|
||||
</div>
|
||||
<div class="collapse">
|
||||
<button id="show_all_details">Show all details</button> / <button id="hide_all_details">Hide all details</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="additional-summary summary">
|
||||
</div>
|
||||
<div class="additional-summary postfix">
|
||||
</div>
|
||||
</div>
|
||||
<table id="results-table">
|
||||
<thead id="results-table-head">
|
||||
<tr>
|
||||
<th class="sortable" data-column-type="result">Result</th>
|
||||
<th class="sortable" data-column-type="testId">Test</th>
|
||||
<th class="sortable" data-column-type="duration">Duration</th>
|
||||
<th>Links</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
</body>
|
||||
<footer>
|
||||
<div id="data-container" data-jsonblob="{"environment": {"Python": "3.12.9", "Platform": "Windows-11-10.0.26100-SP0", "Packages": {"pytest": "8.0.0", "pluggy": "1.4.0"}, "Plugins": {"anyio": "4.8.0", "Faker": "23.2.1", "asyncio": "0.23.5", "cov": "5.0.0", "django": "4.9.0", "factoryboy": "2.7.0", "html": "4.1.1", "metadata": "3.1.1", "xdist": "3.6.1"}}, "tests": {"cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe0-rating]": [{"extras": [], "result": "Passed", "testId": "cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe0-rating]", "duration": "00:00:29", "resultsTableRow": ["<td class=\"col-result\">Passed</td>", "<td class=\"col-testId\">cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe0-rating]</td>", "<td class=\"col-duration\">00:00:29</td>", "<td class=\"col-links\"></td>"], "log": "[gw1] win32 -- Python 3.12.9 C:\\Users\\vaben\\Documents\\Development\\Django\\recipes\\venv\\Scripts\\python.exe\n\n---------------------------- Captured stdout setup -----------------------------\nTransforming nutrition information, this might take a while on large databases\nmigrating shopping list recipe space attribute, this might take a while ...\n"}], "cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe1-timescooked]": [{"extras": [], "result": "Passed", "testId": "cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe1-timescooked]", "duration": "00:00:29", "resultsTableRow": ["<td class=\"col-result\">Passed</td>", "<td class=\"col-testId\">cookbook/tests/other/test_recipe_full_text_search.py::test_search_count[found_recipe1-timescooked]</td>", "<td class=\"col-duration\">00:00:29</td>", "<td class=\"col-links\"></td>"], "log": "[gw0] win32 -- Python 3.12.9 C:\\Users\\vaben\\Documents\\Development\\Django\\recipes\\venv\\Scripts\\python.exe\n\n---------------------------- Captured stdout setup -----------------------------\nTransforming nutrition information, this might take a while on large databases\nmigrating shopping list recipe space attribute, this might take a while ...\n"}]}, "renderCollapsed": ["passed"], "initialSort": "result", "title": "tests.html"}"></div>
|
||||
<script>
|
||||
(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
|
||||
const { getCollapsedCategory, setCollapsedIds } = require('./storage.js')
|
||||
|
||||
class DataManager {
|
||||
setManager(data) {
|
||||
const collapsedCategories = [...getCollapsedCategory(data.renderCollapsed)]
|
||||
const collapsedIds = []
|
||||
const tests = Object.values(data.tests).flat().map((test, index) => {
|
||||
const collapsed = collapsedCategories.includes(test.result.toLowerCase())
|
||||
const id = `test_${index}`
|
||||
if (collapsed) {
|
||||
collapsedIds.push(id)
|
||||
}
|
||||
return {
|
||||
...test,
|
||||
id,
|
||||
collapsed,
|
||||
}
|
||||
})
|
||||
const dataBlob = { ...data, tests }
|
||||
this.data = { ...dataBlob }
|
||||
this.renderData = { ...dataBlob }
|
||||
setCollapsedIds(collapsedIds)
|
||||
}
|
||||
|
||||
get allData() {
|
||||
return { ...this.data }
|
||||
}
|
||||
|
||||
resetRender() {
|
||||
this.renderData = { ...this.data }
|
||||
}
|
||||
|
||||
setRender(data) {
|
||||
this.renderData.tests = [...data]
|
||||
}
|
||||
|
||||
toggleCollapsedItem(id) {
|
||||
this.renderData.tests = this.renderData.tests.map((test) =>
|
||||
test.id === id ? { ...test, collapsed: !test.collapsed } : test,
|
||||
)
|
||||
}
|
||||
|
||||
set allCollapsed(collapsed) {
|
||||
this.renderData = { ...this.renderData, tests: [...this.renderData.tests.map((test) => (
|
||||
{ ...test, collapsed }
|
||||
))] }
|
||||
}
|
||||
|
||||
get testSubset() {
|
||||
return [...this.renderData.tests]
|
||||
}
|
||||
|
||||
get environment() {
|
||||
return this.renderData.environment
|
||||
}
|
||||
|
||||
get initialSort() {
|
||||
return this.data.initialSort
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
manager: new DataManager(),
|
||||
}
|
||||
|
||||
},{"./storage.js":8}],2:[function(require,module,exports){
|
||||
const mediaViewer = require('./mediaviewer.js')
|
||||
const templateEnvRow = document.getElementById('template_environment_row')
|
||||
const templateResult = document.getElementById('template_results-table__tbody')
|
||||
|
||||
function htmlToElements(html) {
|
||||
const temp = document.createElement('template')
|
||||
temp.innerHTML = html
|
||||
return temp.content.childNodes
|
||||
}
|
||||
|
||||
const find = (selector, elem) => {
|
||||
if (!elem) {
|
||||
elem = document
|
||||
}
|
||||
return elem.querySelector(selector)
|
||||
}
|
||||
|
||||
const findAll = (selector, elem) => {
|
||||
if (!elem) {
|
||||
elem = document
|
||||
}
|
||||
return [...elem.querySelectorAll(selector)]
|
||||
}
|
||||
|
||||
const dom = {
|
||||
getStaticRow: (key, value) => {
|
||||
const envRow = templateEnvRow.content.cloneNode(true)
|
||||
const isObj = typeof value === 'object' && value !== null
|
||||
const values = isObj ? Object.keys(value).map((k) => `${k}: ${value[k]}`) : null
|
||||
|
||||
const valuesElement = htmlToElements(
|
||||
values ? `<ul>${values.map((val) => `<li>${val}</li>`).join('')}<ul>` : `<div>${value}</div>`)[0]
|
||||
const td = findAll('td', envRow)
|
||||
td[0].textContent = key
|
||||
td[1].appendChild(valuesElement)
|
||||
|
||||
return envRow
|
||||
},
|
||||
getResultTBody: ({ testId, id, log, extras, resultsTableRow, tableHtml, result, collapsed }) => {
|
||||
const resultBody = templateResult.content.cloneNode(true)
|
||||
resultBody.querySelector('tbody').classList.add(result.toLowerCase())
|
||||
resultBody.querySelector('tbody').id = testId
|
||||
resultBody.querySelector('.collapsible').dataset.id = id
|
||||
|
||||
resultsTableRow.forEach((html) => {
|
||||
const t = document.createElement('template')
|
||||
t.innerHTML = html
|
||||
resultBody.querySelector('.collapsible').appendChild(t.content)
|
||||
})
|
||||
|
||||
if (log) {
|
||||
// Wrap lines starting with "E" with span.error to color those lines red
|
||||
const wrappedLog = log.replace(/^E.*$/gm, (match) => `<span class="error">${match}</span>`)
|
||||
resultBody.querySelector('.log').innerHTML = wrappedLog
|
||||
} else {
|
||||
resultBody.querySelector('.log').remove()
|
||||
}
|
||||
|
||||
if (collapsed) {
|
||||
resultBody.querySelector('.collapsible > td')?.classList.add('collapsed')
|
||||
resultBody.querySelector('.extras-row').classList.add('hidden')
|
||||
} else {
|
||||
resultBody.querySelector('.collapsible > td')?.classList.remove('collapsed')
|
||||
}
|
||||
|
||||
const media = []
|
||||
extras?.forEach(({ name, format_type, content }) => {
|
||||
if (['image', 'video'].includes(format_type)) {
|
||||
media.push({ path: content, name, format_type })
|
||||
}
|
||||
|
||||
if (format_type === 'html') {
|
||||
resultBody.querySelector('.extraHTML').insertAdjacentHTML('beforeend', `<div>${content}</div>`)
|
||||
}
|
||||
})
|
||||
mediaViewer.setup(resultBody, media)
|
||||
|
||||
// Add custom html from the pytest_html_results_table_html hook
|
||||
tableHtml?.forEach((item) => {
|
||||
resultBody.querySelector('td[class="extra"]').insertAdjacentHTML('beforeend', item)
|
||||
})
|
||||
|
||||
return resultBody
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dom,
|
||||
htmlToElements,
|
||||
find,
|
||||
findAll,
|
||||
}
|
||||
|
||||
},{"./mediaviewer.js":6}],3:[function(require,module,exports){
|
||||
const { manager } = require('./datamanager.js')
|
||||
const { doSort } = require('./sort.js')
|
||||
const storageModule = require('./storage.js')
|
||||
|
||||
const getFilteredSubSet = (filter) =>
|
||||
manager.allData.tests.filter(({ result }) => filter.includes(result.toLowerCase()))
|
||||
|
||||
const doInitFilter = () => {
|
||||
const currentFilter = storageModule.getVisible()
|
||||
const filteredSubset = getFilteredSubSet(currentFilter)
|
||||
manager.setRender(filteredSubset)
|
||||
}
|
||||
|
||||
const doFilter = (type, show) => {
|
||||
if (show) {
|
||||
storageModule.showCategory(type)
|
||||
} else {
|
||||
storageModule.hideCategory(type)
|
||||
}
|
||||
|
||||
const currentFilter = storageModule.getVisible()
|
||||
const filteredSubset = getFilteredSubSet(currentFilter)
|
||||
manager.setRender(filteredSubset)
|
||||
|
||||
const sortColumn = storageModule.getSort()
|
||||
doSort(sortColumn, true)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
doFilter,
|
||||
doInitFilter,
|
||||
}
|
||||
|
||||
},{"./datamanager.js":1,"./sort.js":7,"./storage.js":8}],4:[function(require,module,exports){
|
||||
const { redraw, bindEvents, renderStatic } = require('./main.js')
|
||||
const { doInitFilter } = require('./filter.js')
|
||||
const { doInitSort } = require('./sort.js')
|
||||
const { manager } = require('./datamanager.js')
|
||||
const data = JSON.parse(document.getElementById('data-container').dataset.jsonblob)
|
||||
|
||||
function init() {
|
||||
manager.setManager(data)
|
||||
doInitFilter()
|
||||
doInitSort()
|
||||
renderStatic()
|
||||
redraw()
|
||||
bindEvents()
|
||||
}
|
||||
|
||||
init()
|
||||
|
||||
},{"./datamanager.js":1,"./filter.js":3,"./main.js":5,"./sort.js":7}],5:[function(require,module,exports){
|
||||
const { dom, find, findAll } = require('./dom.js')
|
||||
const { manager } = require('./datamanager.js')
|
||||
const { doSort } = require('./sort.js')
|
||||
const { doFilter } = require('./filter.js')
|
||||
const {
|
||||
getVisible,
|
||||
getCollapsedIds,
|
||||
setCollapsedIds,
|
||||
getSort,
|
||||
getSortDirection,
|
||||
possibleFilters,
|
||||
} = require('./storage.js')
|
||||
|
||||
const removeChildren = (node) => {
|
||||
while (node.firstChild) {
|
||||
node.removeChild(node.firstChild)
|
||||
}
|
||||
}
|
||||
|
||||
const renderStatic = () => {
|
||||
const renderEnvironmentTable = () => {
|
||||
const environment = manager.environment
|
||||
const rows = Object.keys(environment).map((key) => dom.getStaticRow(key, environment[key]))
|
||||
const table = document.getElementById('environment')
|
||||
removeChildren(table)
|
||||
rows.forEach((row) => table.appendChild(row))
|
||||
}
|
||||
renderEnvironmentTable()
|
||||
}
|
||||
|
||||
const addItemToggleListener = (elem) => {
|
||||
elem.addEventListener('click', ({ target }) => {
|
||||
const id = target.parentElement.dataset.id
|
||||
manager.toggleCollapsedItem(id)
|
||||
|
||||
const collapsedIds = getCollapsedIds()
|
||||
if (collapsedIds.includes(id)) {
|
||||
const updated = collapsedIds.filter((item) => item !== id)
|
||||
setCollapsedIds(updated)
|
||||
} else {
|
||||
collapsedIds.push(id)
|
||||
setCollapsedIds(collapsedIds)
|
||||
}
|
||||
redraw()
|
||||
})
|
||||
}
|
||||
|
||||
const renderContent = (tests) => {
|
||||
const sortAttr = getSort(manager.initialSort)
|
||||
const sortAsc = JSON.parse(getSortDirection())
|
||||
const rows = tests.map(dom.getResultTBody)
|
||||
const table = document.getElementById('results-table')
|
||||
const tableHeader = document.getElementById('results-table-head')
|
||||
|
||||
const newTable = document.createElement('table')
|
||||
newTable.id = 'results-table'
|
||||
|
||||
// remove all sorting classes and set the relevant
|
||||
findAll('.sortable', tableHeader).forEach((elem) => elem.classList.remove('asc', 'desc'))
|
||||
tableHeader.querySelector(`.sortable[data-column-type="${sortAttr}"]`)?.classList.add(sortAsc ? 'desc' : 'asc')
|
||||
newTable.appendChild(tableHeader)
|
||||
|
||||
if (!rows.length) {
|
||||
const emptyTable = document.getElementById('template_results-table__body--empty').content.cloneNode(true)
|
||||
newTable.appendChild(emptyTable)
|
||||
} else {
|
||||
rows.forEach((row) => {
|
||||
if (!!row) {
|
||||
findAll('.collapsible td:not(.col-links', row).forEach(addItemToggleListener)
|
||||
find('.logexpander', row).addEventListener('click',
|
||||
(evt) => evt.target.parentNode.classList.toggle('expanded'),
|
||||
)
|
||||
newTable.appendChild(row)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
table.replaceWith(newTable)
|
||||
}
|
||||
|
||||
const renderDerived = () => {
|
||||
const currentFilter = getVisible()
|
||||
possibleFilters.forEach((result) => {
|
||||
const input = document.querySelector(`input[data-test-result="${result}"]`)
|
||||
input.checked = currentFilter.includes(result)
|
||||
})
|
||||
}
|
||||
|
||||
const bindEvents = () => {
|
||||
const filterColumn = (evt) => {
|
||||
const { target: element } = evt
|
||||
const { testResult } = element.dataset
|
||||
|
||||
doFilter(testResult, element.checked)
|
||||
const collapsedIds = getCollapsedIds()
|
||||
const updated = manager.renderData.tests.map((test) => {
|
||||
return {
|
||||
...test,
|
||||
collapsed: collapsedIds.includes(test.id),
|
||||
}
|
||||
})
|
||||
manager.setRender(updated)
|
||||
redraw()
|
||||
}
|
||||
|
||||
const header = document.getElementById('environment-header')
|
||||
header.addEventListener('click', () => {
|
||||
const table = document.getElementById('environment')
|
||||
table.classList.toggle('hidden')
|
||||
header.classList.toggle('collapsed')
|
||||
})
|
||||
|
||||
findAll('input[name="filter_checkbox"]').forEach((elem) => {
|
||||
elem.addEventListener('click', filterColumn)
|
||||
})
|
||||
|
||||
findAll('.sortable').forEach((elem) => {
|
||||
elem.addEventListener('click', (evt) => {
|
||||
const { target: element } = evt
|
||||
const { columnType } = element.dataset
|
||||
doSort(columnType)
|
||||
redraw()
|
||||
})
|
||||
})
|
||||
|
||||
document.getElementById('show_all_details').addEventListener('click', () => {
|
||||
manager.allCollapsed = false
|
||||
setCollapsedIds([])
|
||||
redraw()
|
||||
})
|
||||
document.getElementById('hide_all_details').addEventListener('click', () => {
|
||||
manager.allCollapsed = true
|
||||
const allIds = manager.renderData.tests.map((test) => test.id)
|
||||
setCollapsedIds(allIds)
|
||||
redraw()
|
||||
})
|
||||
}
|
||||
|
||||
const redraw = () => {
|
||||
const { testSubset } = manager
|
||||
|
||||
renderContent(testSubset)
|
||||
renderDerived()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
redraw,
|
||||
bindEvents,
|
||||
renderStatic,
|
||||
}
|
||||
|
||||
},{"./datamanager.js":1,"./dom.js":2,"./filter.js":3,"./sort.js":7,"./storage.js":8}],6:[function(require,module,exports){
|
||||
class MediaViewer {
|
||||
constructor(assets) {
|
||||
this.assets = assets
|
||||
this.index = 0
|
||||
}
|
||||
|
||||
nextActive() {
|
||||
this.index = this.index === this.assets.length - 1 ? 0 : this.index + 1
|
||||
return [this.activeFile, this.index]
|
||||
}
|
||||
|
||||
prevActive() {
|
||||
this.index = this.index === 0 ? this.assets.length - 1 : this.index -1
|
||||
return [this.activeFile, this.index]
|
||||
}
|
||||
|
||||
get currentIndex() {
|
||||
return this.index
|
||||
}
|
||||
|
||||
get activeFile() {
|
||||
return this.assets[this.index]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const setup = (resultBody, assets) => {
|
||||
if (!assets.length) {
|
||||
resultBody.querySelector('.media').classList.add('hidden')
|
||||
return
|
||||
}
|
||||
|
||||
const mediaViewer = new MediaViewer(assets)
|
||||
const container = resultBody.querySelector('.media-container')
|
||||
const leftArrow = resultBody.querySelector('.media-container__nav--left')
|
||||
const rightArrow = resultBody.querySelector('.media-container__nav--right')
|
||||
const mediaName = resultBody.querySelector('.media__name')
|
||||
const counter = resultBody.querySelector('.media__counter')
|
||||
const imageEl = resultBody.querySelector('img')
|
||||
const sourceEl = resultBody.querySelector('source')
|
||||
const videoEl = resultBody.querySelector('video')
|
||||
|
||||
const setImg = (media, index) => {
|
||||
if (media?.format_type === 'image') {
|
||||
imageEl.src = media.path
|
||||
|
||||
imageEl.classList.remove('hidden')
|
||||
videoEl.classList.add('hidden')
|
||||
} else if (media?.format_type === 'video') {
|
||||
sourceEl.src = media.path
|
||||
|
||||
videoEl.classList.remove('hidden')
|
||||
imageEl.classList.add('hidden')
|
||||
}
|
||||
|
||||
mediaName.innerText = media?.name
|
||||
counter.innerText = `${index + 1} / ${assets.length}`
|
||||
}
|
||||
setImg(mediaViewer.activeFile, mediaViewer.currentIndex)
|
||||
|
||||
const moveLeft = () => {
|
||||
const [media, index] = mediaViewer.prevActive()
|
||||
setImg(media, index)
|
||||
}
|
||||
const doRight = () => {
|
||||
const [media, index] = mediaViewer.nextActive()
|
||||
setImg(media, index)
|
||||
}
|
||||
const openImg = () => {
|
||||
window.open(mediaViewer.activeFile.path, '_blank')
|
||||
}
|
||||
if (assets.length === 1) {
|
||||
container.classList.add('media-container--fullscreen')
|
||||
} else {
|
||||
leftArrow.addEventListener('click', moveLeft)
|
||||
rightArrow.addEventListener('click', doRight)
|
||||
}
|
||||
imageEl.addEventListener('click', openImg)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setup,
|
||||
}
|
||||
|
||||
},{}],7:[function(require,module,exports){
|
||||
const { manager } = require('./datamanager.js')
|
||||
const storageModule = require('./storage.js')
|
||||
|
||||
const genericSort = (list, key, ascending, customOrder) => {
|
||||
let sorted
|
||||
if (customOrder) {
|
||||
sorted = list.sort((a, b) => {
|
||||
const aValue = a.result.toLowerCase()
|
||||
const bValue = b.result.toLowerCase()
|
||||
|
||||
const aIndex = customOrder.findIndex((item) => item.toLowerCase() === aValue)
|
||||
const bIndex = customOrder.findIndex((item) => item.toLowerCase() === bValue)
|
||||
|
||||
// Compare the indices to determine the sort order
|
||||
return aIndex - bIndex
|
||||
})
|
||||
} else {
|
||||
sorted = list.sort((a, b) => a[key] === b[key] ? 0 : a[key] > b[key] ? 1 : -1)
|
||||
}
|
||||
|
||||
if (ascending) {
|
||||
sorted.reverse()
|
||||
}
|
||||
return sorted
|
||||
}
|
||||
|
||||
const durationSort = (list, ascending) => {
|
||||
const parseDuration = (duration) => {
|
||||
if (duration.includes(':')) {
|
||||
// If it's in the format "HH:mm:ss"
|
||||
const [hours, minutes, seconds] = duration.split(':').map(Number)
|
||||
return (hours * 3600 + minutes * 60 + seconds) * 1000
|
||||
} else {
|
||||
// If it's in the format "nnn ms"
|
||||
return parseInt(duration)
|
||||
}
|
||||
}
|
||||
const sorted = list.sort((a, b) => parseDuration(a['duration']) - parseDuration(b['duration']))
|
||||
if (ascending) {
|
||||
sorted.reverse()
|
||||
}
|
||||
return sorted
|
||||
}
|
||||
|
||||
const doInitSort = () => {
|
||||
const type = storageModule.getSort(manager.initialSort)
|
||||
const ascending = storageModule.getSortDirection()
|
||||
const list = manager.testSubset
|
||||
const initialOrder = ['Error', 'Failed', 'Rerun', 'XFailed', 'XPassed', 'Skipped', 'Passed']
|
||||
|
||||
storageModule.setSort(type)
|
||||
storageModule.setSortDirection(ascending)
|
||||
|
||||
if (type?.toLowerCase() === 'original') {
|
||||
manager.setRender(list)
|
||||
} else {
|
||||
let sortedList
|
||||
switch (type) {
|
||||
case 'duration':
|
||||
sortedList = durationSort(list, ascending)
|
||||
break
|
||||
case 'result':
|
||||
sortedList = genericSort(list, type, ascending, initialOrder)
|
||||
break
|
||||
default:
|
||||
sortedList = genericSort(list, type, ascending)
|
||||
break
|
||||
}
|
||||
manager.setRender(sortedList)
|
||||
}
|
||||
}
|
||||
|
||||
const doSort = (type, skipDirection) => {
|
||||
const newSortType = storageModule.getSort(manager.initialSort) !== type
|
||||
const currentAsc = storageModule.getSortDirection()
|
||||
let ascending
|
||||
if (skipDirection) {
|
||||
ascending = currentAsc
|
||||
} else {
|
||||
ascending = newSortType ? false : !currentAsc
|
||||
}
|
||||
storageModule.setSort(type)
|
||||
storageModule.setSortDirection(ascending)
|
||||
|
||||
const list = manager.testSubset
|
||||
const sortedList = type === 'duration' ? durationSort(list, ascending) : genericSort(list, type, ascending)
|
||||
manager.setRender(sortedList)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
doInitSort,
|
||||
doSort,
|
||||
}
|
||||
|
||||
},{"./datamanager.js":1,"./storage.js":8}],8:[function(require,module,exports){
|
||||
const possibleFilters = [
|
||||
'passed',
|
||||
'skipped',
|
||||
'failed',
|
||||
'error',
|
||||
'xfailed',
|
||||
'xpassed',
|
||||
'rerun',
|
||||
]
|
||||
|
||||
const getVisible = () => {
|
||||
const url = new URL(window.location.href)
|
||||
const settings = new URLSearchParams(url.search).get('visible')
|
||||
const lower = (item) => {
|
||||
const lowerItem = item.toLowerCase()
|
||||
if (possibleFilters.includes(lowerItem)) {
|
||||
return lowerItem
|
||||
}
|
||||
return null
|
||||
}
|
||||
return settings === null ?
|
||||
possibleFilters :
|
||||
[...new Set(settings?.split(',').map(lower).filter((item) => item))]
|
||||
}
|
||||
|
||||
const hideCategory = (categoryToHide) => {
|
||||
const url = new URL(window.location.href)
|
||||
const visibleParams = new URLSearchParams(url.search).get('visible')
|
||||
const currentVisible = visibleParams ? visibleParams.split(',') : [...possibleFilters]
|
||||
const settings = [...new Set(currentVisible)].filter((f) => f !== categoryToHide).join(',')
|
||||
|
||||
url.searchParams.set('visible', settings)
|
||||
window.history.pushState({}, null, unescape(url.href))
|
||||
}
|
||||
|
||||
const showCategory = (categoryToShow) => {
|
||||
if (typeof window === 'undefined') {
|
||||
return
|
||||
}
|
||||
const url = new URL(window.location.href)
|
||||
const currentVisible = new URLSearchParams(url.search).get('visible')?.split(',').filter(Boolean) ||
|
||||
[...possibleFilters]
|
||||
const settings = [...new Set([categoryToShow, ...currentVisible])]
|
||||
const noFilter = possibleFilters.length === settings.length || !settings.length
|
||||
|
||||
noFilter ? url.searchParams.delete('visible') : url.searchParams.set('visible', settings.join(','))
|
||||
window.history.pushState({}, null, unescape(url.href))
|
||||
}
|
||||
|
||||
const getSort = (initialSort) => {
|
||||
const url = new URL(window.location.href)
|
||||
let sort = new URLSearchParams(url.search).get('sort')
|
||||
if (!sort) {
|
||||
sort = initialSort || 'result'
|
||||
}
|
||||
return sort
|
||||
}
|
||||
|
||||
const setSort = (type) => {
|
||||
const url = new URL(window.location.href)
|
||||
url.searchParams.set('sort', type)
|
||||
window.history.pushState({}, null, unescape(url.href))
|
||||
}
|
||||
|
||||
const getCollapsedCategory = (renderCollapsed) => {
|
||||
let categories
|
||||
if (typeof window !== 'undefined') {
|
||||
const url = new URL(window.location.href)
|
||||
const collapsedItems = new URLSearchParams(url.search).get('collapsed')
|
||||
switch (true) {
|
||||
case !renderCollapsed && collapsedItems === null:
|
||||
categories = ['passed']
|
||||
break
|
||||
case collapsedItems?.length === 0 || /^["']{2}$/.test(collapsedItems):
|
||||
categories = []
|
||||
break
|
||||
case /^all$/.test(collapsedItems) || collapsedItems === null && /^all$/.test(renderCollapsed):
|
||||
categories = [...possibleFilters]
|
||||
break
|
||||
default:
|
||||
categories = collapsedItems?.split(',').map((item) => item.toLowerCase()) || renderCollapsed
|
||||
break
|
||||
}
|
||||
} else {
|
||||
categories = []
|
||||
}
|
||||
return categories
|
||||
}
|
||||
|
||||
const getSortDirection = () => JSON.parse(sessionStorage.getItem('sortAsc')) || false
|
||||
const setSortDirection = (ascending) => sessionStorage.setItem('sortAsc', ascending)
|
||||
|
||||
const getCollapsedIds = () => JSON.parse(sessionStorage.getItem('collapsedIds')) || []
|
||||
const setCollapsedIds = (list) => sessionStorage.setItem('collapsedIds', JSON.stringify(list))
|
||||
|
||||
module.exports = {
|
||||
getVisible,
|
||||
hideCategory,
|
||||
showCategory,
|
||||
getCollapsedIds,
|
||||
setCollapsedIds,
|
||||
getSort,
|
||||
setSort,
|
||||
getSortDirection,
|
||||
setSortDirection,
|
||||
getCollapsedCategory,
|
||||
possibleFilters,
|
||||
}
|
||||
|
||||
},{}]},{},[4]);
|
||||
</script>
|
||||
</footer>
|
||||
</html>
|
||||
@@ -91,8 +91,7 @@ def test_never_unit_automation(u1_s1, arg):
|
||||
|
||||
with scope(space=space):
|
||||
Automation.objects.get_or_create(name='never unit test', type=Automation.NEVER_UNIT, param_1='egg', param_2=arg[1], created_by=user, space=space)
|
||||
tokens, automation_applied = automation.apply_never_unit_automation(arg[0])
|
||||
assert tokens == arg[2]
|
||||
assert automation.apply_never_unit_automation(arg[0]) == arg[2]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source", [
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
from django.contrib import auth
|
||||
from django.test import RequestFactory
|
||||
from django_scopes import scope
|
||||
@@ -6,11 +5,7 @@ from django_scopes import scope
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
|
||||
|
||||
@pytest.mark.parametrize("arg", [
|
||||
[True],
|
||||
[False],
|
||||
])
|
||||
def test_ingredient_parser(arg, u1_s1):
|
||||
def test_ingredient_parser(u1_s1):
|
||||
expectations = {
|
||||
"2¼ l Wasser": (2.25, "l", "Wasser", ""),
|
||||
"3¼l Wasser": (3.25, "l", "Wasser", ""),
|
||||
@@ -72,8 +67,7 @@ def test_ingredient_parser(arg, u1_s1):
|
||||
"1 Porreestange(n) , ca. 200 g": (1.0, None, 'Porreestange(n)', 'ca. 200 g'), # leading space before comma
|
||||
# test for over long food entries to get properly split into the note field
|
||||
"1 Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l": (
|
||||
1.0, 'Lorem', 'ipsum',
|
||||
'dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l'),
|
||||
1.0, 'Lorem', 'ipsum', 'dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut l'),
|
||||
"1 LoremipsumdolorsitametconsetetursadipscingelitrseddiamnonumyeirmodtemporinviduntutlLoremipsumdolorsitametconsetetursadipscingelitrseddiamnonumyeirmodtemporinviduntutl": (
|
||||
1.0, None, 'LoremipsumdolorsitametconsetetursadipscingelitrseddiamnonumyeirmodtemporinviduntutlLoremipsumdolorsitametconsetetursadipscingeli',
|
||||
'LoremipsumdolorsitametconsetetursadipscingelitrseddiamnonumyeirmodtemporinviduntutlLoremipsumdolorsitametconsetetursadipscingelitrseddiamnonumyeirmodtemporinviduntutl'),
|
||||
@@ -92,7 +86,7 @@ def test_ingredient_parser(arg, u1_s1):
|
||||
request = RequestFactory()
|
||||
request.user = user
|
||||
request.space = space
|
||||
ingredient_parser = IngredientParser(request, False, ignore_automations=arg[0])
|
||||
ingredient_parser = IngredientParser(request, False, ignore_automations=True)
|
||||
|
||||
count = 0
|
||||
with scope(space=space):
|
||||
|
||||
@@ -42,7 +42,7 @@ def test_theming_function(space_1, u1_s1):
|
||||
assert get_theming_values(request)['sticky_nav'] == ''
|
||||
assert get_theming_values(request)['app_name'] == 'Tandoor Recipes'
|
||||
|
||||
space_1.space_theme = Space.TANDOOR
|
||||
space_1.space_theme = Space.BOOTSTRAP
|
||||
space_1.nav_bg_color = '#000000'
|
||||
space_1.nav_text_color = UserPreference.DARK
|
||||
space_1.app_name = 'test_app_name'
|
||||
@@ -53,7 +53,7 @@ def test_theming_function(space_1, u1_s1):
|
||||
request.space = space_1
|
||||
|
||||
# space settings apply when set
|
||||
assert get_theming_values(request)['theme'] == static('themes/tandoor.min.css')
|
||||
assert get_theming_values(request)['theme'] == static('themes/bootstrap.min.css')
|
||||
assert get_theming_values(request)['nav_bg_color'] == '#000000'
|
||||
assert get_theming_values(request)['nav_text_class'] == 'navbar-light'
|
||||
assert get_theming_values(request)['app_name'] == 'test_app_name'
|
||||
|
||||
@@ -61,8 +61,6 @@ router.register(r'search-preference', api.SearchPreferenceViewSet)
|
||||
router.register(r'user-space', api.UserSpaceViewSet)
|
||||
router.register(r'view-log', api.ViewLogViewSet)
|
||||
router.register(r'access-token', api.AccessTokenViewSet)
|
||||
router.register(r'ai-provider', api.AiProviderViewSet)
|
||||
router.register(r'ai-log', api.AiLogViewSet)
|
||||
|
||||
router.register(r'localization', api.LocalizationViewSet, basename='localization')
|
||||
router.register(r'server-settings', api.ServerSettingsViewSet, basename='server-settings')
|
||||
@@ -78,14 +76,12 @@ urlpatterns = [
|
||||
|
||||
path('setup/', views.setup, name='view_setup'),
|
||||
path('no-group/', views.no_groups, name='view_no_group'),
|
||||
#path('space-overview/', views.space_overview, name='view_space_overview'),
|
||||
#path('switch-space/<int:space_id>', views.switch_space, name='view_switch_space'),
|
||||
#path('no-perm/', views.no_perm, name='view_no_perm'),
|
||||
path('space-overview/', views.space_overview, name='view_space_overview'),
|
||||
path('switch-space/<int:space_id>', views.switch_space, name='view_switch_space'),
|
||||
path('no-perm/', views.no_perm, name='view_no_perm'),
|
||||
path('invite/<slug:token>', views.invite_link, name='view_invite'),
|
||||
path('invite/<slug:token>/', views.invite_link, name='view_invite'),
|
||||
|
||||
path('system/', views.system, name='view_system'),
|
||||
path('plugin/update/', views.plugin_update, name='view_plugin_update'),
|
||||
|
||||
|
||||
path('abuse/<slug:token>', views.report_share_abuse, name='view_report_share_abuse'),
|
||||
|
||||
@@ -124,11 +120,15 @@ urlpatterns = [
|
||||
path('api-token-auth/', CustomAuthToken.as_view()),
|
||||
|
||||
path('offline/', views.offline, name='view_offline'),
|
||||
path('service-worker.js', views.service_worker, name='service_worker'),
|
||||
#path('service-worker.js', (TemplateView.as_view(template_name="sw.js", content_type='application/javascript')), name='service_worker'),
|
||||
path('manifest.json', views.web_manifest, name='web_manifest'),
|
||||
|
||||
]
|
||||
|
||||
if DEBUG:
|
||||
urlpatterns.append(path('test/', views.test, name='view_test'))
|
||||
urlpatterns.append(path('test2/', views.test2, name='view_test2'))
|
||||
|
||||
# catchall view for new frontend
|
||||
urlpatterns += [
|
||||
path('', views.index, name='index'),
|
||||
|
||||
@@ -18,6 +18,8 @@ import litellm
|
||||
import redis
|
||||
import requests
|
||||
from PIL import UnidentifiedImageError
|
||||
from PIL.ImImagePlugin import number
|
||||
from PIL.features import check
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
@@ -33,6 +35,7 @@ from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.datetime_safe import date
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
@@ -62,8 +65,6 @@ from cookbook.connectors.connector_manager import ConnectorManager, ActionType
|
||||
from cookbook.forms import ImportForm, ImportExportBase
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.HelperFunctions import str2bool, validate_import_url
|
||||
from cookbook.helper.ai_helper import has_monthly_token, can_perform_ai_request, AiCallbackHandler
|
||||
from cookbook.helper.batch_edit_helper import add_to_relation, remove_from_relation, remove_all_from_relation, set_relation
|
||||
from cookbook.helper.image_processing import handle_image
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.helper.open_data_importer import OpenDataImporter
|
||||
@@ -73,7 +74,7 @@ from cookbook.helper.permission_helper import (CustomIsAdmin, CustomIsOwner, Cus
|
||||
CustomTokenHasScope, CustomUserPermission, IsReadOnlyDRF,
|
||||
above_space_limit,
|
||||
group_required, has_group_permission, is_space_owner,
|
||||
switch_user_active_space, CustomAiProviderPermission, IsCreateDRF
|
||||
switch_user_active_space
|
||||
)
|
||||
from cookbook.helper.recipe_search import RecipeSearch
|
||||
from cookbook.helper.recipe_url_import import clean_dict, get_from_youtube_scraper, get_images_from_soup
|
||||
@@ -84,7 +85,7 @@ from cookbook.models import (Automation, BookmarkletImport, ConnectorConfig, Coo
|
||||
RecipeBookEntry, ShareLink, ShoppingListEntry,
|
||||
ShoppingListRecipe, Space, Step, Storage, Supermarket, SupermarketCategory,
|
||||
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
|
||||
UserFile, UserPreference, UserSpace, ViewLog, RecipeImport, SearchPreference, SearchFields, AiLog, AiProvider
|
||||
UserFile, UserPreference, UserSpace, ViewLog, RecipeImport, SearchPreference, SearchFields
|
||||
)
|
||||
from cookbook.provider.dropbox import Dropbox
|
||||
from cookbook.provider.local import Local
|
||||
@@ -109,13 +110,12 @@ from cookbook.serializer import (AccessTokenSerializer, AutomationSerializer, Au
|
||||
UserSerializer, UserSpaceSerializer, ViewLogSerializer,
|
||||
LocalizationSerializer, ServerSettingsSerializer, RecipeFromSourceResponseSerializer, ShoppingListEntryBulkCreateSerializer, FdcQuerySerializer,
|
||||
AiImportSerializer, ImportOpenDataSerializer, ImportOpenDataMetaDataSerializer, ImportOpenDataResponseSerializer, ExportRequestSerializer,
|
||||
RecipeImportSerializer, ConnectorConfigSerializer, SearchPreferenceSerializer, SearchFieldsSerializer, RecipeBatchUpdateSerializer,
|
||||
AiProviderSerializer, AiLogSerializer, FoodBatchUpdateSerializer
|
||||
RecipeImportSerializer, ConnectorConfigSerializer, SearchPreferenceSerializer, SearchFieldsSerializer
|
||||
)
|
||||
from cookbook.version_info import TANDOOR_VERSION
|
||||
from cookbook.views.import_export import get_integration
|
||||
from recipes import settings
|
||||
from recipes.settings import DRF_THROTTLE_RECIPE_URL_IMPORT, FDC_API_KEY, AI_RATELIMIT
|
||||
from recipes.settings import DRF_THROTTLE_RECIPE_URL_IMPORT, FDC_API_KEY, AI_RATELIMIT, AI_API_KEY, AI_MODEL_NAME
|
||||
|
||||
DateExample = OpenApiExample('Date Format', value='1972-12-05', request_only=True)
|
||||
BeforeDateExample = OpenApiExample('Before Date Format', value='-1972-12-05', request_only=True)
|
||||
@@ -131,7 +131,7 @@ class LoggingMixin(object):
|
||||
|
||||
if settings.REDIS_HOST:
|
||||
try:
|
||||
d = timezone.now().isoformat()
|
||||
d = date.today().isoformat()
|
||||
space = request.space
|
||||
endpoint = request.resolver_match.url_name
|
||||
|
||||
@@ -179,10 +179,7 @@ class StandardFilterModelViewSet(viewsets.ModelViewSet):
|
||||
queryset = self.queryset
|
||||
query = self.request.query_params.get('query', None)
|
||||
if query is not None:
|
||||
try:
|
||||
queryset = queryset.filter(name__icontains=query)
|
||||
except FieldError:
|
||||
pass
|
||||
queryset = queryset.filter(name__icontains=query)
|
||||
|
||||
updated_at = self.request.query_params.get('updated_at', None)
|
||||
if updated_at is not None:
|
||||
@@ -414,7 +411,6 @@ class MergeMixin(ViewSetMixin):
|
||||
description='Return first level children of {obj} with ID [int]. Integer 0 will return root {obj}s.',
|
||||
type=int),
|
||||
OpenApiParameter(name='tree', description='Return all self and children of {obj} with ID [int].', type=int),
|
||||
OpenApiParameter(name='root_tree', description='Return all items belonging to the tree of the given {obj} id', type=int),
|
||||
]),
|
||||
move=extend_schema(parameters=[
|
||||
OpenApiParameter(name="parent", description='The ID of the desired parent of the {obj}.', type=OpenApiTypes.INT,
|
||||
@@ -427,7 +423,6 @@ class TreeMixin(MergeMixin, FuzzyFilterMixin):
|
||||
def get_queryset(self):
|
||||
root = self.request.query_params.get('root', None)
|
||||
tree = self.request.query_params.get('tree', None)
|
||||
root_tree = self.request.query_params.get('root_tree', None)
|
||||
|
||||
if root:
|
||||
if root.isnumeric():
|
||||
@@ -446,21 +441,10 @@ class TreeMixin(MergeMixin, FuzzyFilterMixin):
|
||||
self.queryset = self.model.objects.get(id=int(tree)).get_descendants_and_self()
|
||||
except self.model.DoesNotExist:
|
||||
self.queryset = self.model.objects.none()
|
||||
elif root_tree:
|
||||
if root_tree.isnumeric():
|
||||
try:
|
||||
self.queryset = self.model.objects.get(id=int(root_tree)).get_root().get_descendants_and_self()
|
||||
except self.model.DoesNotExist:
|
||||
self.queryset = self.model.objects.none()
|
||||
|
||||
else:
|
||||
return self.annotate_recipe(queryset=super().get_queryset(), request=self.request,
|
||||
serializer=self.serializer_class, tree=True)
|
||||
|
||||
self.queryset = self.queryset.filter(space=self.request.space)
|
||||
# only order if not root_tree or tree mde because in these modes the sorting is relevant for the client
|
||||
if not root_tree and not tree:
|
||||
self.queryset = self.queryset.order_by(Lower('name').asc())
|
||||
self.queryset = self.queryset.filter(space=self.request.space).order_by(Lower('name').asc())
|
||||
|
||||
return self.annotate_recipe(queryset=self.queryset, request=self.request, serializer=self.serializer_class,
|
||||
tree=True)
|
||||
@@ -544,9 +528,9 @@ class GroupViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
class SpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
queryset = Space.objects
|
||||
serializer_class = SpaceSerializer
|
||||
permission_classes = [((IsReadOnlyDRF | IsCreateDRF) & CustomIsGuest) | CustomIsOwner & CustomIsAdmin & CustomTokenHasReadWriteScope]
|
||||
permission_classes = [IsReadOnlyDRF & CustomIsGuest | CustomIsOwner & CustomIsAdmin & CustomTokenHasReadWriteScope]
|
||||
pagination_class = DefaultPagination
|
||||
http_method_names = ['get', 'post', 'put', 'patch']
|
||||
http_method_names = ['get', 'patch']
|
||||
|
||||
def get_queryset(self):
|
||||
return self.queryset.filter(
|
||||
@@ -565,7 +549,7 @@ class SpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
class UserSpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
queryset = UserSpace.objects
|
||||
serializer_class = UserSpaceSerializer
|
||||
permission_classes = [(CustomIsSpaceOwner | (IsReadOnlyDRF & CustomIsUser) | CustomIsOwnerReadOnly) & CustomTokenHasReadWriteScope]
|
||||
permission_classes = [(CustomIsSpaceOwner | CustomIsOwnerReadOnly) & CustomTokenHasReadWriteScope]
|
||||
http_method_names = ['get', 'put', 'patch', 'delete']
|
||||
pagination_class = DefaultPagination
|
||||
|
||||
@@ -579,23 +563,10 @@ class UserSpaceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
if internal_note is not None:
|
||||
self.queryset = self.queryset.filter(internal_note=internal_note)
|
||||
|
||||
# >= admins can see all users, guest/user can only see themselves
|
||||
if has_group_permission(self.request.user, ['admin']):
|
||||
if is_space_owner(self.request.user, self.request.space):
|
||||
return self.queryset.filter(space=self.request.space)
|
||||
else:
|
||||
return self.queryset.filter(space=self.request.space, user=self.request.user)
|
||||
|
||||
@extend_schema(responses=UserSpaceSerializer(many=True))
|
||||
@decorators.action(detail=False, pagination_class=DefaultPagination, methods=['GET'], serializer_class=UserSpaceSerializer, )
|
||||
def all_personal(self, request):
|
||||
"""
|
||||
return all userspaces for the user requesting the endpoint
|
||||
:param request:
|
||||
:return:
|
||||
"""
|
||||
with scopes_disabled():
|
||||
self.queryset = self.queryset.filter(user=self.request.user)
|
||||
return Response(self.serializer_class(self.queryset.all(), many=True, context={'request': self.request}).data)
|
||||
return self.queryset.filter(user=self.request.user, space=self.request.space)
|
||||
|
||||
|
||||
class UserPreferenceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
@@ -633,29 +604,6 @@ class SearchPreferenceViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
return self.queryset.filter(user=self.request.user)
|
||||
|
||||
|
||||
class AiProviderViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
queryset = AiProvider.objects
|
||||
serializer_class = AiProviderSerializer
|
||||
permission_classes = [CustomAiProviderPermission & CustomTokenHasReadWriteScope]
|
||||
pagination_class = DefaultPagination
|
||||
|
||||
def get_queryset(self):
|
||||
# read only access to all space and global AiProviders
|
||||
with scopes_disabled():
|
||||
return self.queryset.filter(Q(space=self.request.space) | Q(space__isnull=True))
|
||||
|
||||
|
||||
class AiLogViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
queryset = AiLog.objects
|
||||
serializer_class = AiLogSerializer
|
||||
permission_classes = [CustomIsUser & CustomTokenHasReadWriteScope]
|
||||
http_method_names = ['get']
|
||||
pagination_class = DefaultPagination
|
||||
|
||||
def get_queryset(self):
|
||||
return self.queryset.filter(space=self.request.space)
|
||||
|
||||
|
||||
class StorageViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
# TODO handle delete protect error and adjust test
|
||||
queryset = Storage.objects
|
||||
@@ -954,94 +902,6 @@ class FoodViewSet(LoggingMixin, TreeMixin):
|
||||
content = {'error': True, 'msg': e.args[0]}
|
||||
return Response(content, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
@decorators.action(detail=False, methods=['PUT'], serializer_class=FoodBatchUpdateSerializer)
|
||||
def batch_update(self, request):
|
||||
serializer = self.serializer_class(data=request.data, partial=True)
|
||||
|
||||
if serializer.is_valid():
|
||||
foods = Food.objects.filter(id__in=serializer.validated_data['foods'], space=self.request.space)
|
||||
safe_food_ids = Food.objects.filter(id__in=serializer.validated_data['foods'], space=self.request.space).values_list('id', flat=True)
|
||||
|
||||
if 'category' in serializer.validated_data:
|
||||
foods.update(supermarket_category_id=serializer.validated_data['category'])
|
||||
|
||||
if 'ignore_shopping' in serializer.validated_data and serializer.validated_data['ignore_shopping'] is not None:
|
||||
foods.update(ignore_shopping=serializer.validated_data['ignore_shopping'])
|
||||
|
||||
if 'on_hand' in serializer.validated_data and serializer.validated_data['on_hand'] is not None:
|
||||
if serializer.validated_data['on_hand']:
|
||||
user_relation = []
|
||||
for f in safe_food_ids:
|
||||
user_relation.append(Food.onhand_users.through(food_id=f, user_id=request.user.id))
|
||||
Food.onhand_users.through.objects.bulk_create(user_relation, ignore_conflicts=True, unique_fields=('food_id', 'user_id',))
|
||||
else:
|
||||
Food.onhand_users.through.objects.filter(food_id__in=safe_food_ids, user_id=request.user.id).delete()
|
||||
|
||||
if 'substitute_children' in serializer.validated_data and serializer.validated_data['substitute_children'] is not None:
|
||||
foods.update(substitute_children=serializer.validated_data['substitute_children'])
|
||||
|
||||
if 'substitute_siblings' in serializer.validated_data and serializer.validated_data['substitute_siblings'] is not None:
|
||||
foods.update(substitute_siblings=serializer.validated_data['substitute_siblings'])
|
||||
|
||||
# ---------- substitutes -------------
|
||||
if 'substitute_add' in serializer.validated_data:
|
||||
add_to_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_add'])
|
||||
|
||||
if 'substitute_remove' in serializer.validated_data:
|
||||
remove_from_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_remove'])
|
||||
|
||||
if 'substitute_set' in serializer.validated_data and len(serializer.validated_data['substitute_set']) > 0:
|
||||
set_relation(Food.substitute.through, 'from_food_id', safe_food_ids, 'to_food_id', serializer.validated_data['substitute_set'])
|
||||
|
||||
if 'substitute_remove_all' in serializer.validated_data and serializer.validated_data['substitute_remove_all']:
|
||||
remove_all_from_relation(Food.substitute.through, 'from_food_id', safe_food_ids)
|
||||
|
||||
# ---------- inherit fields -------------
|
||||
if 'inherit_fields_add' in serializer.validated_data:
|
||||
add_to_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_add'])
|
||||
|
||||
if 'inherit_fields_remove' in serializer.validated_data:
|
||||
remove_from_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_remove'])
|
||||
|
||||
if 'inherit_fields_set' in serializer.validated_data and len(serializer.validated_data['inherit_fields_set']) > 0:
|
||||
set_relation(Food.inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['inherit_fields_set'])
|
||||
|
||||
if 'inherit_fields_remove_all' in serializer.validated_data and serializer.validated_data['inherit_fields_remove_all']:
|
||||
remove_all_from_relation(Food.inherit_fields.through, 'food_id', safe_food_ids)
|
||||
|
||||
# ---------- child inherit fields -------------
|
||||
if 'child_inherit_fields_add' in serializer.validated_data:
|
||||
add_to_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_add'])
|
||||
|
||||
if 'child_inherit_fields_remove' in serializer.validated_data:
|
||||
remove_from_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_remove'])
|
||||
|
||||
if 'child_inherit_fields_set' in serializer.validated_data and len(serializer.validated_data['child_inherit_fields_set']) > 0:
|
||||
set_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids, 'foodinheritfield_id', serializer.validated_data['child_inherit_fields_set'])
|
||||
|
||||
if 'child_inherit_fields_remove_all' in serializer.validated_data and serializer.validated_data['child_inherit_fields_remove_all']:
|
||||
remove_all_from_relation(Food.child_inherit_fields.through, 'food_id', safe_food_ids)
|
||||
|
||||
# ------- parent --------
|
||||
if self.model.node_order_by:
|
||||
node_location = 'sorted'
|
||||
else:
|
||||
node_location = 'last'
|
||||
|
||||
if 'parent_remove' in serializer.validated_data and serializer.validated_data['parent_remove']:
|
||||
for f in foods:
|
||||
f.move(Food.get_first_root_node(), f'{node_location}-sibling')
|
||||
|
||||
if 'parent_set' in serializer.validated_data:
|
||||
parent_food = Food.objects.filter(space=request.space, id=serializer.validated_data['parent_set']).first()
|
||||
if parent_food:
|
||||
for f in foods:
|
||||
f.move(parent_food, f'{node_location}-child')
|
||||
|
||||
return Response({}, 200)
|
||||
|
||||
return Response(serializer.errors, 400)
|
||||
|
||||
|
||||
@extend_schema_view(list=extend_schema(parameters=[
|
||||
OpenApiParameter(name='order_field', description='Field to order recipe books on', type=str,
|
||||
@@ -1235,19 +1095,7 @@ class IngredientViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
return self.serializer_class
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = self.queryset.prefetch_related('food',
|
||||
'food__properties',
|
||||
'food__properties__property_type',
|
||||
'food__inherit_fields',
|
||||
'food__supermarket_category',
|
||||
'food__onhand_users',
|
||||
'food__substitute',
|
||||
'food__child_inherit_fields',
|
||||
'unit',
|
||||
'unit__unit_conversion_base_relation',
|
||||
'unit__unit_conversion_base_relation__base_unit',
|
||||
'unit__unit_conversion_converted_relation',
|
||||
'unit__unit_conversion_converted_relation__converted_unit', ).filter(step__recipe__space=self.request.space)
|
||||
queryset = self.queryset.filter(step__recipe__space=self.request.space)
|
||||
food = self.request.query_params.get('food', None)
|
||||
if food and re.match(r'^(\d)+$', food):
|
||||
queryset = queryset.filter(food_id=food)
|
||||
@@ -1511,103 +1359,9 @@ class RecipeViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
|
||||
return Response(self.serializer_class(qs, many=True).data)
|
||||
|
||||
@decorators.action(detail=False, methods=['PUT'], serializer_class=RecipeBatchUpdateSerializer)
|
||||
def batch_update(self, request):
|
||||
serializer = self.serializer_class(data=request.data, partial=True)
|
||||
|
||||
if serializer.is_valid():
|
||||
recipes = Recipe.objects.filter(id__in=serializer.validated_data['recipes'], space=self.request.space)
|
||||
safe_recipe_ids = Recipe.objects.filter(id__in=serializer.validated_data['recipes'], space=self.request.space).values_list('id', flat=True)
|
||||
|
||||
if 'keywords_add' in serializer.validated_data:
|
||||
keyword_relations = []
|
||||
for r in recipes:
|
||||
for k in serializer.validated_data['keywords_add']:
|
||||
keyword_relations.append(Recipe.keywords.through(recipe_id=r.pk, keyword_id=k))
|
||||
Recipe.keywords.through.objects.bulk_create(keyword_relations, ignore_conflicts=True, unique_fields=('recipe_id', 'keyword_id',))
|
||||
|
||||
if 'keywords_remove' in serializer.validated_data:
|
||||
for k in serializer.validated_data['keywords_remove']:
|
||||
Recipe.keywords.through.objects.filter(recipe_id__in=safe_recipe_ids, keyword_id=k).delete()
|
||||
|
||||
if 'keywords_set' in serializer.validated_data and len(serializer.validated_data['keywords_set']) > 0:
|
||||
keyword_relations = []
|
||||
Recipe.keywords.through.objects.filter(recipe_id__in=safe_recipe_ids).delete()
|
||||
for r in recipes:
|
||||
for k in serializer.validated_data['keywords_set']:
|
||||
keyword_relations.append(Recipe.keywords.through(recipe_id=r.pk, keyword_id=k))
|
||||
Recipe.keywords.through.objects.bulk_create(keyword_relations, ignore_conflicts=True, unique_fields=('recipe_id', 'keyword_id',))
|
||||
|
||||
if 'keywords_remove_all' in serializer.validated_data and serializer.validated_data['keywords_remove_all']:
|
||||
Recipe.keywords.through.objects.filter(recipe_id__in=safe_recipe_ids).delete()
|
||||
|
||||
if 'working_time' in serializer.validated_data:
|
||||
recipes.update(working_time=serializer.validated_data['working_time'])
|
||||
|
||||
if 'waiting_time' in serializer.validated_data:
|
||||
recipes.update(waiting_time=serializer.validated_data['waiting_time'])
|
||||
|
||||
if 'servings' in serializer.validated_data:
|
||||
recipes.update(servings=serializer.validated_data['servings'])
|
||||
|
||||
if 'servings_text' in serializer.validated_data:
|
||||
recipes.update(servings_text=serializer.validated_data['servings_text'])
|
||||
|
||||
if 'private' in serializer.validated_data and serializer.validated_data['private'] is not None:
|
||||
recipes.update(private=serializer.validated_data['private'])
|
||||
|
||||
if 'shared_add' in serializer.validated_data:
|
||||
shared_relation = []
|
||||
for r in recipes:
|
||||
for u in serializer.validated_data['shared_add']:
|
||||
shared_relation.append(Recipe.shared.through(recipe_id=r.pk, user_id=u))
|
||||
Recipe.shared.through.objects.bulk_create(shared_relation, ignore_conflicts=True, unique_fields=('recipe_id', 'user_id',))
|
||||
|
||||
if 'shared_remove' in serializer.validated_data:
|
||||
for s in serializer.validated_data['shared_remove']:
|
||||
Recipe.shared.through.objects.filter(recipe_id__in=safe_recipe_ids, user_id=s).delete()
|
||||
|
||||
if 'shared_set' in serializer.validated_data and len(serializer.validated_data['shared_set']) > 0:
|
||||
shared_relation = []
|
||||
Recipe.shared.through.objects.filter(recipe_id__in=safe_recipe_ids).delete()
|
||||
for r in recipes:
|
||||
for u in serializer.validated_data['shared_set']:
|
||||
shared_relation.append(Recipe.shared.through(recipe_id=r.pk, user_id=u))
|
||||
Recipe.shared.through.objects.bulk_create(shared_relation, ignore_conflicts=True, unique_fields=('recipe_id', 'user_id',))
|
||||
|
||||
if 'shared_remove_all' in serializer.validated_data and serializer.validated_data['shared_remove_all']:
|
||||
Recipe.shared.through.objects.filter(recipe_id__in=safe_recipe_ids).delete()
|
||||
|
||||
if 'clear_description' in serializer.validated_data and serializer.validated_data['clear_description']:
|
||||
recipes.update(description='')
|
||||
|
||||
if 'show_ingredient_overview' in serializer.validated_data and serializer.validated_data['show_ingredient_overview'] is not None:
|
||||
recipes.update(show_ingredient_overview=serializer.validated_data['show_ingredient_overview'])
|
||||
|
||||
return Response({}, 200)
|
||||
|
||||
return Response(serializer.errors, 400)
|
||||
|
||||
@extend_schema(responses=RecipeSerializer(many=False))
|
||||
@decorators.action(detail=True, pagination_class=None, methods=['PATCH'], serializer_class=RecipeSerializer)
|
||||
def delete_external(self, request, pk):
|
||||
obj = self.get_object()
|
||||
if obj.get_space() != request.space and has_group_permission(request.user, ['user']):
|
||||
raise PermissionDenied(detail='You do not have the required permission to perform this action', code=403)
|
||||
|
||||
if obj.storage:
|
||||
get_recipe_provider(obj).delete_file(obj)
|
||||
obj.storage = None
|
||||
obj.file_path = ''
|
||||
obj.file_uid = ''
|
||||
obj.save()
|
||||
|
||||
return Response(self.serializer_class(obj, many=False, context={'request': request}).data)
|
||||
|
||||
|
||||
@extend_schema_view(list=extend_schema(
|
||||
parameters=[OpenApiParameter(name='food_id', description='ID of food to filter for', type=int),
|
||||
OpenApiParameter(name='query', description='query that looks into food, base unit or converted unit by name', type=str), ]))
|
||||
parameters=[OpenApiParameter(name='food_id', description='ID of food to filter for', type=int), ]))
|
||||
class UnitConversionViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
queryset = UnitConversion.objects
|
||||
serializer_class = UnitConversionSerializer
|
||||
@@ -1619,10 +1373,6 @@ class UnitConversionViewSet(LoggingMixin, viewsets.ModelViewSet):
|
||||
if food_id is not None:
|
||||
self.queryset = self.queryset.filter(food_id=food_id)
|
||||
|
||||
query = self.request.query_params.get('query', None)
|
||||
if query is not None:
|
||||
self.queryset = self.queryset.filter(Q(food__name__icontains=query) | Q(base_unit__name__icontains=query) | Q(converted_unit__name__icontains=query))
|
||||
|
||||
return self.queryset.filter(space=self.request.space)
|
||||
|
||||
|
||||
@@ -1904,8 +1654,7 @@ class AutomationViewSet(LoggingMixin, StandardFilterModelViewSet):
|
||||
|
||||
|
||||
@extend_schema_view(list=extend_schema(parameters=[
|
||||
OpenApiParameter(name='internal_note', description=_('Text field to store data that gets carried over to the UserSpace created from the InviteLink'), type=str),
|
||||
OpenApiParameter(name='unused', description=_('Only return InviteLinks that have not been used yet.'), type=bool),
|
||||
OpenApiParameter(name='internal_note', description=_('Text field to store data that gets carried over to the UserSpace created from the InviteLink'), type=str)
|
||||
]))
|
||||
class InviteLinkViewSet(LoggingMixin, StandardFilterModelViewSet):
|
||||
queryset = InviteLink.objects
|
||||
@@ -1918,10 +1667,6 @@ class InviteLinkViewSet(LoggingMixin, StandardFilterModelViewSet):
|
||||
if internal_note is not None:
|
||||
self.queryset = self.queryset.filter(internal_note=internal_note)
|
||||
|
||||
used = self.request.query_params.get('used', False)
|
||||
if not used:
|
||||
self.queryset = self.queryset.filter(used_by=None)
|
||||
|
||||
if is_space_owner(self.request.user, self.request.space):
|
||||
self.queryset = self.queryset.filter(space=self.request.space).all()
|
||||
return super().get_queryset()
|
||||
@@ -2044,9 +1789,9 @@ class RecipeUrlImportView(APIView):
|
||||
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_200_OK)
|
||||
|
||||
tandoor_url = None
|
||||
if re.match(r'^(.)*/recipe/[0-9]+/\?share=[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', url):
|
||||
if re.match('^(.)*/recipe/[0-9]+/?share=[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', url):
|
||||
tandoor_url = url.replace('/recipe/', '/api/recipe/')
|
||||
elif re.match(r'^(.)*/view/recipe/[0-9]+/[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', url):
|
||||
elif re.match('^(.)*/view/recipe/[0-9]+/[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', url):
|
||||
tandoor_url = (url.replace('/view/recipe/', '/api/recipe/').replace(re.split('/recipe/[0-9]+', url)[1], '') + '?share=' +
|
||||
re.split('/recipe/[0-9]+', url)[1].replace('/', ''))
|
||||
if tandoor_url and validate_import_url(tandoor_url):
|
||||
@@ -2139,32 +1884,8 @@ class AiImportView(APIView):
|
||||
if serializer.is_valid():
|
||||
# TODO max file size check
|
||||
|
||||
if 'ai_provider_id' not in serializer.validated_data:
|
||||
response = {
|
||||
'error': True,
|
||||
'msg': _('You must select an AI provider to perform your request.'),
|
||||
}
|
||||
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not can_perform_ai_request(request.space):
|
||||
response = {
|
||||
'error': True,
|
||||
'msg': _("You don't have any credits remaining to use AI or AI features are not enabled for your space."),
|
||||
}
|
||||
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
ai_provider = AiProvider.objects.filter(pk=serializer.validated_data['ai_provider_id']).filter(Q(space=request.space) | Q(space__isnull=True)).first()
|
||||
|
||||
litellm.callbacks = [AiCallbackHandler(request.space, request.user, ai_provider)]
|
||||
|
||||
messages = []
|
||||
uploaded_file = serializer.validated_data['file']
|
||||
|
||||
if serializer.validated_data['recipe_id']:
|
||||
if recipe := Recipe.objects.filter(id=serializer.validated_data['recipe_id']).first():
|
||||
if recipe.file_path:
|
||||
uploaded_file = get_recipe_provider(recipe).get_file(recipe)
|
||||
|
||||
if uploaded_file:
|
||||
base64type = None
|
||||
try:
|
||||
@@ -2225,15 +1946,7 @@ class AiImportView(APIView):
|
||||
return Response(RecipeFromSourceResponseSerializer(context={'request': request}).to_representation(response), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
ai_request = {
|
||||
'api_key': ai_provider.api_key,
|
||||
'model': ai_provider.model_name,
|
||||
'response_format': {"type": "json_object"},
|
||||
'messages': messages,
|
||||
}
|
||||
if ai_provider.url:
|
||||
ai_request['api_base'] = ai_provider.url
|
||||
ai_response = completion(**ai_request)
|
||||
ai_response = completion(api_key=AI_API_KEY, model=AI_MODEL_NAME, response_format={"type": "json_object"}, messages=messages, )
|
||||
except BadRequestError as err:
|
||||
response = {
|
||||
'error': True,
|
||||
@@ -2292,13 +2005,7 @@ class AppImportView(APIView):
|
||||
files = []
|
||||
for f in request.FILES.getlist('files'):
|
||||
files.append({'file': io.BytesIO(f.read()), 'name': f.name})
|
||||
t = threading.Thread(target=integration.do_import,
|
||||
args=[files, il, form.cleaned_data['duplicates']],
|
||||
kwargs={'meal_plans': form.cleaned_data['meal_plans'],
|
||||
'shopping_lists': form.cleaned_data['shopping_lists'],
|
||||
'nutrition_per_serving': form.cleaned_data['nutrition_per_serving']
|
||||
}
|
||||
)
|
||||
t = threading.Thread(target=integration.do_import, args=[files, il, form.cleaned_data['duplicates']])
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
|
||||
@@ -2544,6 +2251,7 @@ class ServerSettingsViewSet(viewsets.GenericViewSet):
|
||||
# Attention: No login required, do not return sensitive data
|
||||
s['shopping_min_autosync_interval'] = settings.SHOPPING_MIN_AUTOSYNC_INTERVAL
|
||||
s['enable_pdf_export'] = settings.ENABLE_PDF_EXPORT
|
||||
s['enable_ai_import'] = settings.AI_API_KEY != ''
|
||||
s['disable_external_connectors'] = settings.DISABLE_EXTERNAL_CONNECTORS
|
||||
s['terms_url'] = settings.TERMS_URL
|
||||
s['privacy_url'] = settings.PRIVACY_URL
|
||||
@@ -2696,7 +2404,7 @@ def meal_plans_to_ical(queryset, filename):
|
||||
request=inline_serializer(name="IngredientStringSerializer", fields={'text': CharField()}),
|
||||
responses=inline_serializer(name="ParsedIngredientSerializer",
|
||||
fields={'amount': IntegerField(), 'unit': CharField(), 'food': CharField(),
|
||||
'note': CharField(), 'original_text': CharField()})
|
||||
'note': CharField()})
|
||||
)
|
||||
@api_view(['POST'])
|
||||
@permission_classes([CustomIsUser & CustomTokenHasReadWriteScope])
|
||||
@@ -2706,19 +2414,13 @@ def ingredient_from_string(request):
|
||||
ingredient_parser = IngredientParser(request, False)
|
||||
amount, unit, food, note = ingredient_parser.parse(text)
|
||||
|
||||
ingredient = {'amount': amount, 'unit': None, 'food': None, 'note': note, 'original_text': text}
|
||||
ingredient = {'amount': amount, 'unit': None, 'food': None, 'note': note}
|
||||
if food:
|
||||
if food_obj := Food.objects.filter(space=request.space).filter(Q(name=food) | Q(plural_name=food)).first():
|
||||
ingredient['food'] = {'name': food_obj.name, 'id': food_obj.id}
|
||||
else:
|
||||
food_obj = Food.objects.create(space=request.space, name=food)
|
||||
ingredient['food'] = {'name': food_obj.name, 'id': food_obj.id}
|
||||
food, created = Food.objects.get_or_create(space=request.space, name=food)
|
||||
ingredient['food'] = {'name': food.name, 'id': food.id}
|
||||
|
||||
if unit:
|
||||
if unit_obj := Unit.objects.filter(space=request.space).filter(Q(name=unit) | Q(plural_name=unit)).first():
|
||||
ingredient['unit'] = {'name': unit_obj.name, 'id': unit_obj.id}
|
||||
else:
|
||||
unit_obj = Unit.objects.create(space=request.space, name=unit)
|
||||
ingredient['unit'] = {'name': unit_obj.name, 'id': unit_obj.id}
|
||||
unit, created = Unit.objects.get_or_create(space=request.space, name=unit)
|
||||
ingredient['unit'] = {'name': unit.name, 'id': unit.id}
|
||||
|
||||
return JsonResponse(ingredient, status=200)
|
||||
|
||||
@@ -17,7 +17,6 @@ from cookbook.integration.copymethat import CopyMeThat
|
||||
from cookbook.integration.default import Default
|
||||
from cookbook.integration.domestica import Domestica
|
||||
from cookbook.integration.mealie import Mealie
|
||||
from cookbook.integration.mealie1 import Mealie1
|
||||
from cookbook.integration.mealmaster import MealMaster
|
||||
from cookbook.integration.melarecipes import MelaRecipes
|
||||
from cookbook.integration.nextcloud_cookbook import NextcloudCookbook
|
||||
@@ -46,8 +45,6 @@ def get_integration(request, export_type):
|
||||
return NextcloudCookbook(request, export_type)
|
||||
if export_type == ImportExportBase.MEALIE:
|
||||
return Mealie(request, export_type)
|
||||
if export_type == ImportExportBase.MEALIE1:
|
||||
return Mealie1(request, export_type)
|
||||
if export_type == ImportExportBase.CHOWDOWN:
|
||||
return Chowdown(request, export_type)
|
||||
if export_type == ImportExportBase.SAFFRON:
|
||||
|
||||
@@ -54,7 +54,7 @@ def hook(request, token):
|
||||
f = ingredient_parser.get_food(food)
|
||||
u = ingredient_parser.get_unit(unit)
|
||||
|
||||
ShoppingListEntry.objects.create(food=f, unit=u, amount=max(1, amount), created_by=request.user, space=request.space)
|
||||
ShoppingListEntry.objects.create(food=f, unit=u, amount=amount, created_by=request.user, space=request.space)
|
||||
|
||||
return JsonResponse({'data': data['message']['text']})
|
||||
except Exception:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from datetime import datetime, timedelta
|
||||
from io import StringIO
|
||||
from uuid import UUID
|
||||
@@ -13,15 +12,14 @@ from django.contrib import messages
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import ValidationError, PermissionDenied, BadRequest
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.management import call_command
|
||||
from django.db import models
|
||||
from django.http import HttpResponseRedirect, JsonResponse, HttpResponse
|
||||
from django.http import HttpResponseRedirect, JsonResponse
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
from django.templatetags.static import static
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils import timezone
|
||||
from django.utils.datetime_safe import date
|
||||
from django.utils.translation import gettext as _
|
||||
from django_scopes import scopes_disabled
|
||||
from drf_spectacular.views import SpectacularRedocView, SpectacularSwaggerView
|
||||
@@ -32,7 +30,7 @@ from cookbook.helper.permission_helper import CustomIsGuest, GroupRequiredMixin,
|
||||
from cookbook.models import InviteLink, ShareLink, Space, UserSpace
|
||||
from cookbook.templatetags.theming_tags import get_theming_values
|
||||
from cookbook.version_info import VERSION_INFO
|
||||
from recipes.settings import PLUGINS, BASE_DIR
|
||||
from recipes.settings import PLUGINS
|
||||
|
||||
|
||||
def index(request, path=None, resource=None):
|
||||
@@ -42,10 +40,7 @@ def index(request, path=None, resource=None):
|
||||
if User.objects.count() < 1 and 'django.contrib.auth.backends.RemoteUserBackend' not in settings.AUTHENTICATION_BACKENDS:
|
||||
return HttpResponseRedirect(reverse_lazy('view_setup'))
|
||||
|
||||
if 'signup_token' in request.session:
|
||||
return HttpResponseRedirect(reverse('view_invite', args=[request.session.pop('signup_token', '')]))
|
||||
|
||||
if request.user.is_authenticated or re.search(r'/recipe/\d+/', request.path[:512]) and request.GET.get('share'):
|
||||
if request.user.is_authenticated or re.search(r'/recipe/\d+/', request.path[:512]) and request.GET.get('share') :
|
||||
return render(request, 'frontend/tandoor.html', {})
|
||||
else:
|
||||
return HttpResponseRedirect(reverse('account_login') + '?next=' + request.path)
|
||||
@@ -100,8 +95,7 @@ def space_overview(request):
|
||||
max_recipes=settings.SPACE_DEFAULT_MAX_RECIPES,
|
||||
max_users=settings.SPACE_DEFAULT_MAX_USERS,
|
||||
allow_sharing=settings.SPACE_DEFAULT_ALLOW_SHARING,
|
||||
ai_enabled=settings.SPACE_AI_ENABLED,
|
||||
ai_credits_monthly=settings.SPACE_AI_CREDITS_MONTHLY, )
|
||||
)
|
||||
|
||||
user_space = UserSpace.objects.create(space=created_space, user=request.user, active=False)
|
||||
user_space.groups.add(Group.objects.filter(name='admin').get())
|
||||
@@ -139,16 +133,15 @@ def no_perm(request):
|
||||
return HttpResponseRedirect(reverse('account_login') + '?next=' + request.GET.get('next', '/search/'))
|
||||
return render(request, 'no_perm_info.html')
|
||||
|
||||
|
||||
def recipe_pdf_viewer(request, pk):
|
||||
with scopes_disabled():
|
||||
recipe = get_object_or_404(Recipe, pk=pk)
|
||||
if share_link_valid(recipe, request.GET.get('share', None)) or (has_group_permission(
|
||||
request.user, ['guest']) and recipe.space == request.space):
|
||||
|
||||
return render(request, 'pdf_viewer.html', {'recipe_id': pk, 'share': request.GET.get('share', None)})
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
|
||||
|
||||
def system(request):
|
||||
if not request.user.is_superuser:
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
@@ -226,7 +219,7 @@ def system(request):
|
||||
total_stats = ['All', int(r.get('api:request-count'))]
|
||||
|
||||
for i in range(0, 6):
|
||||
d = (timezone.now() - timedelta(days=i)).isoformat()
|
||||
d = (date.today() - timedelta(days=i)).isoformat()
|
||||
api_stats[0].append(d)
|
||||
api_space_stats[0].append(d)
|
||||
total_stats.append(int(r.get(f'api:request-count:{d}')) if r.get(f'api:request-count:{d}') else 0)
|
||||
@@ -237,22 +230,17 @@ def system(request):
|
||||
endpoint = x[0].decode('utf-8')
|
||||
endpoint_stats = [endpoint, x[1]]
|
||||
for i in range(0, 6):
|
||||
d = (timezone.now() - timedelta(days=i)).isoformat()
|
||||
d = (date.today() - timedelta(days=i)).isoformat()
|
||||
endpoint_stats.append(r.zscore(f'api:endpoint-request-count:{d}', endpoint))
|
||||
api_stats.append(endpoint_stats)
|
||||
|
||||
for x in r.zrange('api:space-request-count', 0, 20, withscores=True, desc=True):
|
||||
s = x[0].decode('utf-8')
|
||||
if space := Space.objects.filter(pk=s).first():
|
||||
space_stats = [space.name, x[1]]
|
||||
for i in range(0, 6):
|
||||
d = (timezone.now() - timedelta(days=i)).isoformat()
|
||||
space_stats.append(r.zscore(f'api:space-request-count:{d}', s))
|
||||
api_space_stats.append(space_stats)
|
||||
|
||||
cache_response = caches['default'].get(f'system_view_test_cache_entry', None)
|
||||
if not cache_response:
|
||||
caches['default'].set(f'system_view_test_cache_entry', datetime.now(), 10)
|
||||
space_stats = [Space.objects.get(pk=s).name, x[1]]
|
||||
for i in range(0, 6):
|
||||
d = (date.today() - timedelta(days=i)).isoformat()
|
||||
space_stats.append(r.zscore(f'api:space-request-count:{d}', s))
|
||||
api_space_stats.append(space_stats)
|
||||
|
||||
return render(
|
||||
request, 'system.html', {
|
||||
@@ -268,26 +256,9 @@ def system(request):
|
||||
'orphans': orphans,
|
||||
'migration_info': migration_info,
|
||||
'missing_migration': missing_migration,
|
||||
'cache_response': cache_response,
|
||||
})
|
||||
|
||||
|
||||
def plugin_update(request):
|
||||
if not request.user.is_superuser:
|
||||
raise PermissionDenied
|
||||
|
||||
if not 'module' in request.GET:
|
||||
raise BadRequest
|
||||
|
||||
for p in PLUGINS:
|
||||
if p['module'] == request.GET['module']:
|
||||
update_response = subprocess.check_output(['git', 'pull'], cwd=p['base_path'])
|
||||
print(update_response)
|
||||
return HttpResponseRedirect(reverse('view_system'))
|
||||
|
||||
return HttpResponseRedirect(reverse('view_system'))
|
||||
|
||||
|
||||
def setup(request):
|
||||
with scopes_disabled():
|
||||
if User.objects.count() > 0 or 'django.contrib.auth.backends.RemoteUserBackend' in settings.AUTHENTICATION_BACKENDS:
|
||||
@@ -325,7 +296,7 @@ def invite_link(request, token):
|
||||
try:
|
||||
token = UUID(token, version=4)
|
||||
except ValueError:
|
||||
print('Malformed Invite Link supplied!')
|
||||
messages.add_message(request, messages.ERROR, _('Malformed Invite Link supplied!'))
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
|
||||
if link := InviteLink.objects.filter(valid_until__gte=datetime.today(), used_by=None, uuid=token).first():
|
||||
@@ -334,17 +305,22 @@ def invite_link(request, token):
|
||||
link.used_by = request.user
|
||||
link.save()
|
||||
|
||||
UserSpace.objects.filter(user=request.user).update(active=False)
|
||||
user_space = UserSpace.objects.create(user=request.user, space=link.space, internal_note=link.internal_note, invite_link=link, active=True)
|
||||
user_space = UserSpace.objects.create(user=request.user, space=link.space, internal_note=link.internal_note, invite_link=link, active=False)
|
||||
|
||||
if request.user.userspace_set.count() == 1:
|
||||
user_space.active = True
|
||||
user_space.save()
|
||||
|
||||
user_space.groups.add(link.group)
|
||||
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
messages.add_message(request, messages.SUCCESS, _('Successfully joined space.'))
|
||||
return HttpResponseRedirect(reverse('view_space_overview'))
|
||||
else:
|
||||
request.session['signup_token'] = str(token)
|
||||
return HttpResponseRedirect(reverse('account_signup'))
|
||||
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
messages.add_message(request, messages.ERROR, _('Invite Link not valid or already used!'))
|
||||
return HttpResponseRedirect(reverse('view_space_overview'))
|
||||
|
||||
|
||||
def report_share_abuse(request, token):
|
||||
@@ -469,13 +445,6 @@ def offline(request):
|
||||
return render(request, 'offline.html', {})
|
||||
|
||||
|
||||
def service_worker(request):
|
||||
with open(os.path.join(BASE_DIR, 'cookbook', 'static', 'vue3', 'service-worker.js'), 'rb') as service_worker_file:
|
||||
response = HttpResponse(content=service_worker_file)
|
||||
response['Content-Type'] = 'text/javascript'
|
||||
return response
|
||||
|
||||
|
||||
def test(request):
|
||||
if not settings.DEBUG:
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
@@ -541,3 +510,4 @@ def get_orphan_files(delete_orphans=False):
|
||||
orphans = find_orphans()
|
||||
|
||||
return [img[1] for img in orphans]
|
||||
|
||||
|
||||
@@ -11,14 +11,34 @@ If you like this application and want it to give back, there are many ways to co
|
||||
If you know any foreign languages you can:
|
||||
Improve the translations for any of the existing languages.
|
||||
|
||||
See [here](/contribute/translations/) for further information on how to contribute translation to Tandoor.
|
||||
Add a new language to the long list of existing translations.
|
||||
|
||||
- Armenian
|
||||
- Bulgarian
|
||||
- Catalan
|
||||
- Czech
|
||||
- Danish
|
||||
- Dutch
|
||||
- English
|
||||
- French
|
||||
- German
|
||||
- Hungarian
|
||||
- Italian
|
||||
- Latvian
|
||||
- Norwegian
|
||||
- Polish
|
||||
- Russian
|
||||
- Spanish
|
||||
- Swedish
|
||||
|
||||
See [here](/contribute/translations) for further information on how to contribute translation to Tandoor.
|
||||
|
||||
## Issues and Feature Requests
|
||||
|
||||
The most basic but also crucial way of contributing is reporting issues and commenting on ideas and feature requests
|
||||
The most basic but also very important way of contributing is reporting issues and commenting on ideas and feature requests
|
||||
over at [GitHub issues](https://github.com/vabene1111/recipes/issues).
|
||||
|
||||
Without feedback, improvement can't happen, so don't hesitate to say what you want to say.
|
||||
Without feedback improvement can't happen, so don't hesitate to say what you want to say.
|
||||
|
||||
## Documentation
|
||||
|
||||
@@ -26,12 +46,12 @@ Helping improve the documentation for Tandoor is one of the easiest ways to give
|
||||
You can write guides on how to install and configure Tandoor expanding our repository of non-standard configuations.
|
||||
Or you can write how-to guides using some of Tandoor's advanced features such as authentication or automation.
|
||||
|
||||
See [here](/contribute/documentation/) for more information on how to add documentation to Tandoor.
|
||||
See [here](/contribute/documentation) for more information on how to add documentation to Tandoor.
|
||||
|
||||
## Contributing Code
|
||||
|
||||
For the truly ambitious, you can help write code to fix issues, add additional features, or write your own scripts using
|
||||
Tandoor's extensive API and share your work with the community.
|
||||
|
||||
Before writing any code, please make sure that you review [contribution guidelines](/contribute/guidelines/) and
|
||||
Before writing any code, please make sure that you review [contribution guidelines](/contribute/guidelines) and
|
||||
[VSCode](/contribute/vscode) or [PyCharm](/contribute/pycharm) specific configurations.
|
||||
|
||||
@@ -32,10 +32,10 @@ To contribute to the project you are required to use the following packages with
|
||||
|
||||
## Testing
|
||||
|
||||
Django uses pytest-django to implement a full suite of testing. If you make any functional changes, please implement the appropriate
|
||||
Django uses pytest-django to implement a full suite of testing. If you make any functional changes, please implment the appropriate
|
||||
tests.
|
||||
|
||||
Tandoor is also actively soliciting contributors willing to setup vue3 testing. If you have knowledge in this area it would be greatly appreciated.
|
||||
Tandoor is also actively soliciting contribors willing to setup vue3 testing. If you have knowledge in this area it would be greatly appreciated.
|
||||
|
||||
## API Client
|
||||
|
||||
@@ -44,7 +44,9 @@ Tandoor is also actively soliciting contributors willing to setup vue3 testing.
|
||||
The OpenAPI Generator is a Java project. You must have the java binary executable available on your PATH for this to work.
|
||||
|
||||
Tandoor uses [django-rest-framework](https://www.django-rest-framework.org/) for API implementation. Making contributions that impact the API requires an understanding of
|
||||
ViewSets and Serializers.
|
||||
Viewsets and Serializers.
|
||||
|
||||
Also double check that your changes are actively reflected in the schema so that client apis are generated accurately.
|
||||
|
||||
The API Client is generated automatically from the OpenAPI interface provided by the Django REST framework.
|
||||
For this [openapi-generator](https://github.com/OpenAPITools/openapi-generator) is used.
|
||||
@@ -53,7 +55,15 @@ Install it using your desired setup method. (For example, using `npm install @op
|
||||
|
||||
### Vue
|
||||
|
||||
Generate the schema using the `generate_api_client.py` script in the main directory.
|
||||
Navigate to `vue/src/utils/openapi`.
|
||||
|
||||
Generate the schema using `openapi-generator-cli generate -g typescript-axios -i http://127.0.0.1:8000/openapi/`. (Replace your dev server url if required.)
|
||||
|
||||
### Vue3
|
||||
|
||||
Navigate to `vue3/src/openapi`.
|
||||
|
||||
Generate the schema using `openapi-generator-cli generate -g typescript-fetch -i http://127.0.0.1:8000/openapi/`. (Replace your dev server url if required.)
|
||||
|
||||
## Install and Configuration
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
<!-- prettier-ignore-start -->
|
||||
!!! info "Development Setup"
|
||||
The dev setup is a little messy as this application combines the best (at least in my opinion) of both Django and Vue.js.
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
### Devcontainer Setup
|
||||
|
||||
@@ -30,15 +32,17 @@ populated from default values.
|
||||
|
||||
### Vue.js
|
||||
|
||||
!!! danger "Development Setup"
|
||||
The vite dev server **must** be started before the django runserver command is run or else django will **not** recognize it and try to fallback to the build files.
|
||||
<!-- prettier-ignore-start -->
|
||||
!!! warning "Feature Freeze"
|
||||
With the exception of bug fixes, no changes will be accepted on the legacy `vue` front-end.
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
The frontend is build using [Vue.js](https://vuejs.org/).
|
||||
Most new frontend pages are build using [Vue.js](https://vuejs.org/).
|
||||
|
||||
In order to work on these pages, you will have to install a Javascript package manager of your choice. The following examples use yarn.
|
||||
|
||||
1. go to the `vue3` and run `yarn install` to install the dependencies
|
||||
2. run `yarn serve` to start the dev server that allows hot reloading and easy and quick development
|
||||
In the `vue` folder run `yarn install` followed by `yarn build` to install and build the legacy front-end.
|
||||
In the `vue3` folder run `yarn install` followed by `yarn build` to install and build the new front-end.
|
||||
|
||||
If you do not wish to work on those pages, but instead want the application to work properly during development, run `yarn build` to build the frontend pages once. After that you
|
||||
might need to run `python manage.py collectstatic` to setup the static files.
|
||||
After that you can use `yarn serve` from the `vue3` folder to start the development server, and proceed to test your changes.
|
||||
If you do not wish to work on those pages, but instead want the application to work properly during development, run `yarn build` to build the frontend pages once.
|
||||
|
||||
@@ -16,6 +16,8 @@ Maintained by [Aimo](https://github.com/aimok04/kitshn)
|
||||
- Website: [https://kitshn.app/](https://kitshn.app/)
|
||||
- Appstores: [Apple](https://apps.apple.com/us/app/kitshn-for-tandoor/id6740168361), [Android](https://play.google.com/store/apps/details?id=de.kitshn.android)
|
||||
|
||||
|
||||
|
||||
### Untare (discontinued)
|
||||
|
||||
Maintained by [phantomate](https://github.com/phantomate/Untare)
|
||||
|
||||
@@ -33,26 +33,17 @@ VS Marketplace Link: https://marketplace.visualstudio.com/items?itemName=esbenp.
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
!!! note
|
||||
In order to hot reload vue, the `yarn dev` server must be started before starting the django server.
|
||||
In order to debug vue yarn and vite servers must be started before starting the django server.
|
||||
|
||||
There are a number of built in tasks that are available. Here are a few of the key ones:
|
||||
|
||||
- `Setup Dev Server` - Runs all the prerequisite steps so that the dev server can be run inside VSCode.
|
||||
- `Setup Tests` - Runs all prerequisites so tests can be run inside VSCode.
|
||||
|
||||
Once these are run, there are 2 options. If you want to run a vue3 server in a hot reload mode for quick development of the frontend, you should run a development vue server:
|
||||
|
||||
- `Yarn Dev` - Runs development Vue.js vite server not connected to VSCode. Useful if you want to make Vue changes and see them in realtime.
|
||||
|
||||
If not, you need to build and copy the frontend to the django server. If you make changes to the frontend, you need to re-run this and restart the django server:
|
||||
|
||||
- `Collect Static Files` - Builds and collects the vue3 frontend so that it can be served via the django server.
|
||||
|
||||
Once either of those steps are done, you can start the django server:
|
||||
|
||||
- `Run Dev Server` - Runs a django development server not connected to VSCode.
|
||||
|
||||
Once these are run, you should be able to run/debug a django server in VSCode as well as run/debug tests directly through VSCode.
|
||||
There are also a few other tasks specified in case you have specific development needs:
|
||||
|
||||
- `Run Dev Server` - Runs a django development server not connected to VSCode.
|
||||
- `Run all pytests` - Runs all the pytests outside of VSCode.
|
||||
- `Yarn Serve` - Runs development Vue.js server not connected to VSCode. Useful if you want to make Vue changes and see them in realtime.
|
||||
- `Serve Documentation` - Runs a documentation server. Useful if you want to see how changes to documentation show up.
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
Tandoor has several AI based features. To allow maximum flexibility, you can configure different AI providers and select them based on the task you want to perform.
|
||||
To prevent accidental cost escalation Tandoor has a robust system of tracking and limiting AI costs.
|
||||
|
||||
## Default Configuration
|
||||
By default the AI features are enabled for every space. Each space has a spending limit of roughly 1 USD per month.
|
||||
This can be changed using the [configuration variables](https://docs.tandoor.dev/system/configuration/#ai-integration)
|
||||
|
||||
You can change these settings any time using the django admin. If you do not care about AI cost you can enter a very high limit or disable cost tracking for your providers.
|
||||
The limit resets on the first of every month.
|
||||
|
||||
## Configure AI Providers
|
||||
When AI support is enabled for a space every user in a space can configure AI providers.
|
||||
The models shown in the editor have been tested and work with Tandoor. Most other models that can parse images/files and return text should also work.
|
||||
|
||||
Superusers also have the ability to configure global AI providers that every space can use.
|
||||
|
||||
## AI Log
|
||||
The AI Log allows you to track the usage of AI calls. Here you can also see the usage.
|
||||
@@ -97,17 +97,10 @@ Follow these steps to import your recipes
|
||||
|
||||
Mealie provides structured data similar to nextcloud.
|
||||
|
||||
!!! WARNING "Versions"
|
||||
There are two different versions of the Mealie importer. One for all backups created prior to Version 1.0 and one for all after.
|
||||
|
||||
!!! INFO "Versions"
|
||||
The Mealie UI does not indicate weather or not nutrition information is stored per serving or per recipe. This choice is left to the user. During the import you will have to choose
|
||||
how Tandoor should treat your nutrition data.
|
||||
|
||||
To migrate your recipes
|
||||
|
||||
1. Go to your Mealie admin settings and create a new backup.
|
||||
2. Download the backup.
|
||||
1. Go to your Mealie settings and create a new Backup.
|
||||
2. Download the backup by clicking on it and pressing download (this wasn't working for me, so I had to manually pull it from the server).
|
||||
3. Upload the entire `.zip` file to the importer page and import everything.
|
||||
|
||||
## Chowdown
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
<a href="https://docs.tandoor.dev/install/docker/" target="_blank" rel="noopener noreferrer">Installation</a> •
|
||||
<a href="https://docs.tandoor.dev/" target="_blank" rel="noopener noreferrer">Docs</a> •
|
||||
<a href="https://app.tandoor.dev/accounts/login/?demo" target="_blank" rel="noopener noreferrer">Demo</a> •
|
||||
<a href="https://community.tandoor.dev" target="_blank" rel="noopener noreferrer">Community</a> •
|
||||
<a href="https://discord.gg/RhzBrfWgtp" target="_blank" rel="noopener noreferrer">Discord</a>
|
||||
</p>
|
||||
|
||||
@@ -66,13 +65,13 @@ Share some information on how you use Tandoor to help me improve the application
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td><a href="https://community.tandoor.dev">Community</a></td>
|
||||
<td>Get support, share best practices, discuss feature ideas, and meet other Tandoor users.</td>
|
||||
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
|
||||
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td><a href="https://discord.gg/RhzBrfWgtp">Discord</a></td>
|
||||
<td>We have a public Discord server that anyone can join. This is where all our developers and contributors hang out and where we make announcements</td>
|
||||
<td><a href="https://twitter.com/TandoorRecipes">Twitter</a></td>
|
||||
<td>You can follow our Twitter account to get updates on new features or releases</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
These are instructions for pacman based distributions, like ArchLinux. The package is available from the [AUR](https://aur.archlinux.org/packages/tandoor-recipes-git) or from [GitHub](https://github.com/jdecourval/tandoor-recipes-pkgbuild).
|
||||
|
||||
## Features
|
||||
|
||||
@@ -1,32 +1,28 @@
|
||||
!!! success "Recommended Installation"
|
||||
Setting up this application using Docker is recommended. This does not mean that other options are bad, but its the only method
|
||||
that is officially maintained and gets regularly tested.
|
||||
Setting up this application using Docker is recommended. This does not mean that other options are bad, just that
|
||||
support is much easier for this setup.
|
||||
|
||||
This guide shows you some basic setups using Docker and docker compose. For configuration options see the [configuration page](https://docs.tandoor.dev/system/configuration/).
|
||||
It is possible to install this application using many different Docker configurations.
|
||||
|
||||
## **Versions**
|
||||
Please read the instructions on each example carefully and decide if this is the way for you.
|
||||
|
||||
There are different versions (tags) released on [Docker Hub](https://hub.docker.com/r/vabene1111/recipes/tags).
|
||||
## **DockSTARTer**
|
||||
|
||||
- **latest** Default image. The one you should use if you don't know that you need anything else.
|
||||
- **beta** Partially stable version that gets updated every now and then. Expect to have some problems.
|
||||
- **develop** If you want the most bleeding-edge version with potentially many breaking changes, feel free to use this version (not recommended!).
|
||||
- **X.Y.Z** each released version has its own image. If you need to revert to an old version or want to make sure you stay on one specific use these tags.
|
||||
|
||||
!!! danger "No Downgrading"
|
||||
There is currently no way to migrate back to an older version as there is no mechanism to downgrade the database.
|
||||
You could probably do it but I cannot help you with that. Choose wisely if you want to use the unstable images.
|
||||
That said **beta** should usually be working if you like frequent updates and new stuff.
|
||||
The main goal of [DockSTARTer](https://dockstarter.com/) is to make it quick and easy to get up and running with Docker.
|
||||
You may choose to rely on DockSTARTer for various changes to your Docker system or use DockSTARTer as a stepping stone and learn to do more advanced configurations.
|
||||
Follow the guide for installing DockSTARTer and then run `ds` then select 'Configuration' and 'Select Apps' to get Tandoor up and running quickly and easily.
|
||||
|
||||
## **Docker**
|
||||
|
||||
The docker image (`vabene1111/recipes`) simply exposes the application on the container's port `80` through the integrated nginx webserver.
|
||||
The docker image (`vabene1111/recipes`) simply exposes the application on the container's port `8080`.
|
||||
|
||||
It can be run and accessed on port 80 using:
|
||||
|
||||
```shell
|
||||
docker run -d \
|
||||
-v "$(pwd)"/staticfiles:/opt/recipes/staticfiles \
|
||||
-v "$(pwd)"/mediafiles:/opt/recipes/mediafiles \
|
||||
-p 80:80 \
|
||||
-p 80:8080 \
|
||||
-e SECRET_KEY=YOUR_SECRET_KEY \
|
||||
-e DB_ENGINE=django.db.backends.postgresql \
|
||||
-e POSTGRES_HOST=db_recipes \
|
||||
@@ -38,7 +34,25 @@ docker run -d \
|
||||
vabene1111/recipes
|
||||
```
|
||||
|
||||
Please make sure to replace the ```SECRET_KEY``` and ```POSTGRES_PASSWORD``` placeholders!
|
||||
Please make sure, if you run your image this way, to consult
|
||||
the [.env.template](https://raw.githubusercontent.com/vabene1111/recipes/master/.env.template)
|
||||
file in the GitHub repository to verify if additional environment variables are required for your setup.
|
||||
|
||||
Also, don't forget to replace the placeholders for ```SECRET_KEY``` and ```POSTGRES_PASSWORD```!
|
||||
|
||||
## **Versions**
|
||||
|
||||
There are different versions (tags) released on [Docker Hub](https://hub.docker.com/r/vabene1111/recipes/tags).
|
||||
|
||||
- **latest** Default image. The one you should use if you don't know that you need anything else.
|
||||
- **beta** Partially stable version that gets updated every now and then. Expect to have some problems.
|
||||
- **develop** If you want the most bleeding edge version with potentially many breaking changes feel free to use this version (not recommended!).
|
||||
- **X.Y.Z** each released version has its own image. If you need to revert to an old version or want to make sure you stay on one specific use these tags.
|
||||
|
||||
!!! danger "No Downgrading"
|
||||
There is currently no way to migrate back to an older version as there is no mechanism to downgrade the database.
|
||||
You could probably do it but I cannot help you with that. Choose wisely if you want to use the unstable images.
|
||||
That said **beta** should usually be working if you like frequent updates and new stuff.
|
||||
|
||||
## **Docker Compose**
|
||||
|
||||
@@ -49,7 +63,7 @@ The main, and also recommended, installation option for this application is Dock
|
||||
```shell
|
||||
wget https://raw.githubusercontent.com/vabene1111/recipes/develop/.env.template -O .env
|
||||
```
|
||||
3. **Edit it accordingly** (you NEED to set `SECRET_KEY` and `POSTGRES_PASSWORD`), see [configuration page](https://docs.tandoor.dev/system/configuration/).
|
||||
3. **Edit it accordingly** (you NEED to set `SECRET_KEY` and `POSTGRES_PASSWORD`).
|
||||
4. Start your container using `docker-compose up -d`.
|
||||
|
||||
### **Plain**
|
||||
@@ -65,6 +79,9 @@ wget https://raw.githubusercontent.com/vabene1111/recipes/develop/docs/install/d
|
||||
{% include "./docker/plain/docker-compose.yml" %}
|
||||
~~~
|
||||
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
### **Reverse Proxy**
|
||||
|
||||
Most deployments will likely use a reverse proxy.
|
||||
@@ -88,6 +105,8 @@ wget https://raw.githubusercontent.com/vabene1111/recipes/develop/docs/install/d
|
||||
{% include "./docker/traefik-nginx/docker-compose.yml" %}
|
||||
~~~
|
||||
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
#### **jwilder's Nginx-proxy**
|
||||
|
||||
@@ -115,25 +134,206 @@ wget https://raw.githubusercontent.com/vabene1111/recipes/develop/docs/install/d
|
||||
{% include "./docker/nginx-proxy/docker-compose.yml" %}
|
||||
~~~
|
||||
|
||||
## **DockSTARTer**
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
The main goal of [DockSTARTer](https://dockstarter.com/) is to make it quick and easy to get up and running with Docker.
|
||||
You may choose to rely on DockSTARTer for various changes to your Docker system or use DockSTARTer as a stepping stone and learn to do more advanced configurations.
|
||||
Follow the guide for installing DockSTARTer and then run `ds` then select 'Configuration' and 'Select Apps' to get Tandoor up and running quickly and easily.
|
||||
#### **Nginx Swag by LinuxServer**
|
||||
|
||||
[This container](https://github.com/linuxserver/docker-swag) is an all in one solution created by LinuxServer.io.
|
||||
|
||||
It contains templates for popular apps, including Tandoor Recipes, so you don't have to manually configure nginx and discard the template provided in Tandoor repo. Tandoor config is called `recipes.subdomain.conf.sample` which you can adapt for your instance.
|
||||
|
||||
If you're running Swag on the default port, you'll just need to change the container name to yours.
|
||||
|
||||
If your running Swag on a custom port, some headers must be changed:
|
||||
|
||||
- Create a copy of `proxy.conf`
|
||||
- Replace `proxy_set_header X-Forwarded-Host $host;` and `proxy_set_header Host $host;` to
|
||||
- `proxy_set_header X-Forwarded-Host $http_host;` and `proxy_set_header Host $http_host;`
|
||||
- Update `recipes.subdomain.conf` to use the new file
|
||||
- Restart the linuxserver/swag container and Recipes will work correctly
|
||||
|
||||
More information [here](https://github.com/TandoorRecipes/recipes/issues/959#issuecomment-962648627).
|
||||
|
||||
In both cases, also make sure to mount `/media/` in your swag container to point to your Tandoor Recipes Media directory.
|
||||
|
||||
Please refer to the [appropriate documentation](https://github.com/linuxserver/docker-swag#usage) for the container setup.
|
||||
|
||||
For step-by-step instructions to set this up from scratch, see [this example](swag.md).
|
||||
|
||||
#### **Pure Nginx**
|
||||
|
||||
If you have Nginx installed locally on your host system without using any third party integration like Swag or similar, this is for you.
|
||||
|
||||
You can use the Docker-Compose file from [Plain](#plain).
|
||||
!!!warning "Adjust Docker-Compose file"
|
||||
Replace `80:80` with `PORT:80` with PORT being your desired outward-facing port.
|
||||
In the nginx config example below, 8080 is used.
|
||||
|
||||
An example configuration with LetsEncrypt to get you started can be seen below.
|
||||
Please note, that since every setup is different, you might need to adjust some things.
|
||||
|
||||
!!!warning "Placeholders"
|
||||
Don't forget to replace the domain and port.
|
||||
|
||||
```nginx
|
||||
server {
|
||||
if ($host = recipes.mydomain.tld) { # replace domain
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
|
||||
server_name recipes.mydomain.tld; # replace domain
|
||||
listen 80;
|
||||
return 404;
|
||||
}
|
||||
server {
|
||||
server_name recipes.mydomain.tld; # replace domain
|
||||
listen 443 ssl;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/recipes.mydomain.tld/fullchain.pem; # replace domain
|
||||
ssl_certificate_key /etc/letsencrypt/live/recipes.mydomain.tld/privkey.pem; # replace domain
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
location / {
|
||||
proxy_set_header Host $http_host; # try $host instead if this doesn't work
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass http://127.0.0.1:8080; # replace port
|
||||
proxy_redirect http://127.0.0.1:8080 https://recipes.domain.tld; # replace port and domain
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Tandoor does not support directly serving of images, as explained in the [Nginx vs Gunicorn"](#nginx-vs-gunicorn) section. If you are already using nginx to serve as a reverse proxy, you can configure it to serve images as well.
|
||||
|
||||
Add the following directly after the `location /` context:
|
||||
|
||||
```
|
||||
location /media/ {
|
||||
root /media/;
|
||||
index index.html index.htm;
|
||||
}
|
||||
```
|
||||
|
||||
Make sure you also update your `docker-compose.yml` file to mount the `mediafiles` directory. If you are using the [Plain](#plain) deployment, you do not need to make any changes. If you are using nginx to act as a reverse proxy for other apps, it may not be optimal to have `mediafiles` mounted to `/media`. In that case, adjust the directory declarations as needed, utilizing nginx's [`alias`](https://nginx.org/en/docs/http/ngx_http_core_module.html#alias) if needed.
|
||||
|
||||
!!!note
|
||||
DockSTARTer might not be updated for Tandoor 2 configurations
|
||||
Use `alias` if your mount point directory is not the same as the URL request path. Tandoor media files are requested from `$http_host/media/recipes/xxx.jpg`. This means if you are mounting to a directory that does **NOT** end in `./media`, you will need to use `alias`.
|
||||
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
#### **Apache**
|
||||
|
||||
You can use the Docker-Compose file from [Plain](#plain).
|
||||
!!!warning "Adjust Docker-Compose file"
|
||||
Replace `80:80` with `PORT:80` with PORT being your desired outward-facing port.
|
||||
In the Apache config example below, 8080 is used.
|
||||
|
||||
If you use e.g. LetsEncrypt for SSL encryption, you can use the example configuration from [solaris7590](https://github.com/TandoorRecipes/recipes/issues/1312#issuecomment-1020034375) below.
|
||||
|
||||
!!!warning "Placeholders"
|
||||
Don't forget to replace the domain and port.
|
||||
|
||||
```apache
|
||||
<IfModule mod_ssl.c>
|
||||
<VirtualHost *:80>
|
||||
ServerAdmin webmaster@mydomain.de # replace domain
|
||||
ServerName mydomain.de # replace domain
|
||||
|
||||
Redirect permanent / https://mydomain.de/ # replace domain
|
||||
</VirtualHost>
|
||||
|
||||
<VirtualHost *:443>
|
||||
ServerAdmin webmaster@mydomain.de # replace domain
|
||||
ServerName mydomain.de # replace domain
|
||||
|
||||
SSLEngine on
|
||||
|
||||
RequestHeader set X-Forwarded-Proto "https"
|
||||
Header always set Access-Control-Allow-Origin "*"
|
||||
|
||||
ProxyPreserveHost On
|
||||
ProxyRequests Off
|
||||
ProxyPass / http://localhost:8080/ # replace port
|
||||
ProxyPassReverse / http://localhost:8080/ # replace port
|
||||
|
||||
SSLCertificateFile /etc/letsencrypt/live/mydomain.de/fullchain.pem # replace domain/path
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/mydomain.de/privkey.pem # replace domain/path
|
||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/recipes_error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/recipes_access.log combined
|
||||
</VirtualHost>
|
||||
</IfModule>
|
||||
```
|
||||
|
||||
If you're having issues with the example configuration above, you can try [beedaddy](https://github.com/TandoorRecipes/recipes/issues/1312#issuecomment-1015252663)'s example config.
|
||||
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
#### **Others**
|
||||
|
||||
If you use none of the above mentioned reverse proxies or want to use an existing one on your host machine (like a local nginx or Caddy), simply use the [Plain](#plain) setup above and change the outbound port to one of your liking.
|
||||
|
||||
An example port config (inside the respective docker-compose.yml) would be: `8123:80` instead of the `80:80` or if you want to be sure, that Tandoor is **just** accessible via your proxy and don't wanna bother with your firewall, then `127.0.0.1:8123:80` is a viable option too.
|
||||
|
||||
!!!note
|
||||
Don't forget to [download and configure](#docker-compose) your ```.env``` file!
|
||||
|
||||
## **Additional Information**
|
||||
|
||||
### **Nginx vs Gunicorn**
|
||||
|
||||
All examples use an additional `nginx` container to serve mediafiles and act as the forward facing webserver.
|
||||
This is **technically not required** but **very much recommended**.
|
||||
|
||||
I do not 100% understand the deep technical details but the [developers of gunicorn](https://serverfault.com/questions/331256/why-do-i-need-nginx-and-something-like-gunicorn/331263#331263),
|
||||
the WSGi server that handles the Python execution, explicitly state that it is not recommended to deploy without nginx.
|
||||
You will also likely not see any decrease in performance or a lot of space used as nginx is a very light container.
|
||||
|
||||
!!! info
|
||||
Even if you run behind a reverse proxy as described above, using an additional nginx container is the recommended option.
|
||||
|
||||
If you run a small private deployment and don't care about performance, security and whatever else feel free to run
|
||||
without a nginx container.
|
||||
|
||||
!!! warning
|
||||
When running without nginx make sure to enable `GUNICORN_MEDIA` in the `.env`. Without it, media files will be uploaded
|
||||
but not shown on the page.
|
||||
|
||||
For additional information please refer to the [0.9.0 Release](https://github.com/vabene1111/recipes/releases?after=0.9.0)
|
||||
and [Issue 201](https://github.com/vabene1111/recipes/issues/201) where these topics have been discussed.
|
||||
See also refer to the [official gunicorn docs](https://docs.gunicorn.org/en/stable/deploy.html).
|
||||
|
||||
### **Nginx Config**
|
||||
Starting with Tandoor 2 the Docker container includes a nginx service. Its default configuration is pulled from the [http.d](https://github.com/TandoorRecipes/recipes/tree/develop/http.d) folder
|
||||
in the repository.
|
||||
|
||||
You can setup a volume to link to the ```/opt/recipes/http.d``` folder inside your container to change the configuration. Keep in mind that you will not receive any updates on the configuration
|
||||
if you manually change it/bind the folder as a volume.
|
||||
In order to give the user (you) the greatest amount of freedom when choosing how to deploy this application the
|
||||
webserver is not directly bundled with the Docker image.
|
||||
|
||||
This has the downside that it is difficult to supply the configuration to the webserver (e.g. nginx). Up until
|
||||
version `0.13.0`, this had to be done manually by downloading the nginx config file and placing it in a directory that
|
||||
was then mounted into the nginx container.
|
||||
|
||||
From version `0.13.0`, the config file is supplied using the application image (`vabene1111/recipes`). It is then mounted
|
||||
to the host system and from there into the nginx container.
|
||||
|
||||
This is not really a clean solution, but I could not find any better alternative that provided the same amount of
|
||||
usability. If you know of any better way, feel free to open an issue.
|
||||
|
||||
### **Volumes vs Bind Mounts**
|
||||
|
||||
Since I personally prefer to have my data where my `docker-compose.yml` resides, bind mounts are used in the example
|
||||
configuration files for all user generated data (e.g. Postgresql and media files).
|
||||
|
||||
!!!warning
|
||||
Please note that [there is a difference in functionality](https://docs.docker.com/storage/volumes/)
|
||||
between the two and you cannot always simply interchange them.
|
||||
|
||||
You can move everything to volumes if you prefer it this way, **but you cannot convert the nginx config file to a bind
|
||||
mount.**
|
||||
If you do so you will have to manually create the nginx config file and restart the container once after creating it.
|
||||
|
||||
### **Required Headers**
|
||||
|
||||
@@ -162,13 +362,12 @@ ProxyPassReverse / http://localhost:8080/ # replace port
|
||||
|
||||
### **Setup issues on Raspberry Pi**
|
||||
|
||||
!!! danger
|
||||
Tandoor 2 does no longer build images for arm/v7 architectures. You can certainly get Tandoor working there but it has simply been to much effort to maintain these architectures over the past years
|
||||
to justify the continued support of this mostly deprecated platform.
|
||||
|
||||
!!!info
|
||||
Always wait at least 2-3 minutes after the very first start, since migrations will take some time!
|
||||
|
||||
!!!info
|
||||
In the past there was a special `*-raspi` version of the image. This no longer exists. The normal Tags all support Arm/v7 architectures which should work on all Raspberry Pi's above Version 1 and the first generation Zero.
|
||||
See [Wikipedia Raspberry Pi specifications](https://en.wikipedia.org/wiki/Raspberry_Pi#Specifications).
|
||||
|
||||
If you're having issues with installing Tandoor on your Raspberry Pi or similar device,
|
||||
follow these instructions:
|
||||
@@ -181,7 +380,7 @@ follow these instructions:
|
||||
|
||||
### Sub Path nginx config
|
||||
|
||||
If hosting under a sub-path you might want to change the default nginx config
|
||||
If hosting under a sub-path you might want to change the default nginx config (which gets mounted through the named volume from the application container into the nginx container)
|
||||
with the following config.
|
||||
|
||||
```nginx
|
||||
@@ -208,10 +407,3 @@ location /static/ {
|
||||
|
||||
}
|
||||
```
|
||||
### Tandoor 1 vs Tandoor 2
|
||||
Tandoor 1 includes gunicorn, a python WSGI server that handles python code well but is not meant to serve mediafiles. Thus, it has always been recommended to set up a nginx webserver
|
||||
(not just a reverse proxy) in front of Tandoor to handle mediafiles. The gunicorn server by default is exposed on port 8080.
|
||||
|
||||
Tandoor 2 now occasionally bundles nginx inside the container and exposes port 80 where mediafiles are handled by nginx and all the other requests are (mostly) passed to gunicorn.
|
||||
|
||||
A [GitHub Issue](https://github.com/TandoorRecipes/recipes/issues/3851) has been created to allow for discussions and FAQ's on this issue while this change is fresh. It will later be updated in the docs here if necessary.
|
||||
@@ -16,11 +16,28 @@ services:
|
||||
- ./.env
|
||||
volumes:
|
||||
- staticfiles:/opt/recipes/staticfiles
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/opt/recipes/nginx/conf.d
|
||||
- ./mediafiles:/opt/recipes/mediafiles
|
||||
depends_on:
|
||||
- db_recipes
|
||||
networks:
|
||||
- default
|
||||
|
||||
nginx_recipes:
|
||||
image: nginx:mainline-alpine
|
||||
restart: always
|
||||
env_file:
|
||||
- ./.env
|
||||
depends_on:
|
||||
- web_recipes
|
||||
volumes:
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/etc/nginx/conf.d:ro
|
||||
- staticfiles:/static:ro
|
||||
- ./mediafiles:/media:ro
|
||||
networks:
|
||||
- default
|
||||
- nginx-proxy
|
||||
|
||||
networks:
|
||||
@@ -30,4 +47,5 @@ networks:
|
||||
name: nginx-proxy
|
||||
|
||||
volumes:
|
||||
nginx_config:
|
||||
staticfiles:
|
||||
|
||||
@@ -14,9 +14,27 @@ services:
|
||||
- ./.env
|
||||
volumes:
|
||||
- staticfiles:/opt/recipes/staticfiles
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/opt/recipes/nginx/conf.d
|
||||
- ./mediafiles:/opt/recipes/mediafiles
|
||||
depends_on:
|
||||
- db_recipes
|
||||
|
||||
nginx_recipes:
|
||||
image: nginx:mainline-alpine
|
||||
restart: always
|
||||
ports:
|
||||
- 80:80
|
||||
env_file:
|
||||
- ./.env
|
||||
depends_on:
|
||||
- web_recipes
|
||||
volumes:
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/etc/nginx/conf.d:ro
|
||||
- staticfiles:/static:ro
|
||||
- ./mediafiles:/media:ro
|
||||
|
||||
volumes:
|
||||
nginx_config:
|
||||
staticfiles:
|
||||
|
||||
@@ -16,23 +16,40 @@ services:
|
||||
- ./.env
|
||||
volumes:
|
||||
- staticfiles:/opt/recipes/staticfiles
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/opt/recipes/nginx/conf.d
|
||||
- ./mediafiles:/opt/recipes/mediafiles
|
||||
depends_on:
|
||||
- db_recipes
|
||||
networks:
|
||||
- default
|
||||
|
||||
nginx_recipes:
|
||||
image: nginx:mainline-alpine
|
||||
restart: always
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
# Do not make this a bind mount, see https://docs.tandoor.dev/install/docker/#volumes-vs-bind-mounts
|
||||
- nginx_config:/etc/nginx/conf.d:ro
|
||||
- staticfiles:/static:ro
|
||||
- ./mediafiles:/media:ro
|
||||
labels: # traefik example labels
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.recipes.rule=Host(`recipes.mydomain.com`, `recipes.myotherdomain.com`)"
|
||||
- "traefik.http.routers.recipes.entrypoints=web_secure" # your https endpoint
|
||||
- "traefik.http.routers.recipes.tls.certresolver=le_resolver" # your cert resolver
|
||||
depends_on:
|
||||
- web_recipes
|
||||
networks:
|
||||
- default
|
||||
- traefik
|
||||
|
||||
|
||||
networks:
|
||||
default:
|
||||
traefik: # This is your external traefik network
|
||||
external: true
|
||||
|
||||
volumes:
|
||||
nginx_config:
|
||||
staticfiles:
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
Many thanks to [alexbelgium](https://github.com/alexbelgium) for making implementing everything required to have
|
||||
Tandoor run in HA.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
  ![aarch64][aarch64-badge] ![amd64][amd64-badge] ![armv7][armv7-badge]
|
||||
|
||||
### Introduction
|
||||
|
||||
@@ -63,6 +63,7 @@ spec:
|
||||
source venv/bin/activate
|
||||
echo "Updating database"
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic_js_reverse
|
||||
python manage.py collectstatic --noinput
|
||||
echo "Setting media file attributes"
|
||||
chown -R 65534:65534 /opt/recipes/mediafiles
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
## K8s Setup
|
||||
|
||||
This is a setup which should be sufficient for production use. Be sure to replace the default secrets! You can find the example files [here](https://github.com/MyDigitalLife/recipes/tree/fix-k8s-documentation/docs/install/k8s) on Github.
|
||||
This is a setup which should be sufficient for production use. Be sure to replace the default secrets!
|
||||
|
||||
## Files
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
[KubeSail](https://kubesail.com/) lets you install Tandoor by providing a simple web interface for installing and managing apps. You can connect any server running Kubernetes, or get a pre-configured [PiBox](https://pibox.io).
|
||||
|
||||
<!-- A portion of every PiBox sale goes toward supporting Tandoor development. -->
|
||||
|
||||
@@ -5,9 +5,6 @@ These instructions are inspired from a standard django/gunicorn/postgresql instr
|
||||
!!! warning
|
||||
Make sure to use at least Python 3.10 (although 3.12 is preferred) or higher, and ensure that `pip` is associated with Python 3. Depending on your system configuration, using `python` or `pip` might default to Python 2. Make sure your machine has at least 2048 MB of memory; otherwise, the `yarn build` process may fail with the error: `FATAL ERROR: Reached heap limit - Allocation failed: JavaScript heap out of memory`.
|
||||
|
||||
!!! warning
|
||||
These instructions are **not** regularly reviewed and might be outdated.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Setup user: `sudo useradd recipes`
|
||||
@@ -196,7 +193,6 @@ server {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_pass http://unix:/var/www/recipes/recipes.sock;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
This page especially contains some setups that might help you if you really want to go down a certain path but none
|
||||
of the examples are supported (as I simply am not able to give you support for them).
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
## Apache + Traefik + Sub-Path
|
||||
|
||||
@@ -37,6 +35,7 @@ docker-compose:
|
||||
environment:
|
||||
# all the other env
|
||||
- SCRIPT_NAME=/<sub path>
|
||||
- JS_REVERSE_SCRIPT_PREFIX=/<sub path>/
|
||||
- STATIC_URL=/<www path>/static/
|
||||
- MEDIA_URL=/<www path>/media/
|
||||
labels:
|
||||
@@ -101,6 +100,7 @@ and the relevant section from the docker-compose.yml:
|
||||
image: vabene1111/recipes
|
||||
environment:
|
||||
- SCRIPT_NAME=/tandoor
|
||||
- JS_REVERSE_SCRIPT_PREFIX=/tandoor
|
||||
- STATIC_URL=/tandoor/static/
|
||||
- MEDIA_URL=/tandoor/media/
|
||||
- GUNICORN_MEDIA=0
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! danger
|
||||
Please refer to the [official documentation](https://github.com/linuxserver/docker-swag#usage) for the container setup. This example shows just one setup that may or may not differ from yours in significant ways. This tutorial does not cover security measures, backups, and many other things that you might want to consider.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- You have a newly spun-up Ubuntu server with docker (pre-)installed.
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested. Since I cannot test it myself, feedback and improvements are always very welcome.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
## **Instructions**
|
||||
|
||||
Basic guide to setup `vabenee1111/recipes` docker container on Synology NAS.
|
||||
|
||||
60
docs/install/traefik.md
Normal file
60
docs/install/traefik.md
Normal file
@@ -0,0 +1,60 @@
|
||||
!!! danger
|
||||
Please refer to [the official documentation](https://doc.traefik.io/traefik/).
|
||||
This example just shows something similar to my setup in case you dont understand the official documentation.
|
||||
|
||||
You need to create a network called `traefik` using `docker network create traefik`.
|
||||
## docker-compose.yml
|
||||
|
||||
```
|
||||
version: "3.3"
|
||||
|
||||
services:
|
||||
|
||||
traefik:
|
||||
image: "traefik:v2.1"
|
||||
container_name: "traefik"
|
||||
ports:
|
||||
- "443:443"
|
||||
- "80:80"
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- "./letsencrypt:/letsencrypt"
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
- "./config:/etc/traefik/"
|
||||
|
||||
|
||||
networks:
|
||||
default:
|
||||
external:
|
||||
name: traefik
|
||||
```
|
||||
|
||||
## traefik.toml
|
||||
Place this in a directory called `config` as this is mounted into the traefik container (see docer compose).
|
||||
**Change the email address accordingly**.
|
||||
```
|
||||
[api]
|
||||
insecure=true
|
||||
|
||||
[providers.docker]
|
||||
endpoint = "unix:///var/run/docker.sock"
|
||||
exposedByDefault = false
|
||||
network = "traefik"
|
||||
|
||||
#[log]
|
||||
# level = "DEBUG"
|
||||
|
||||
[entryPoints]
|
||||
[entryPoints.web]
|
||||
address = ":80"
|
||||
|
||||
[entryPoints.web_secure]
|
||||
address = ":443"
|
||||
|
||||
[certificatesResolvers.le_resolver.acme]
|
||||
|
||||
email = "you_email@mail.com"
|
||||
storage = "/letsencrypt/acme.json"
|
||||
|
||||
tlsChallenge=true
|
||||
```
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
This guide is to assist those installing Tandoor Recipes on Truenas Core using Docker and or Portainer
|
||||
|
||||
Docker install instructions adapted from [PhasedLogix IT Services's guide](https://getmethegeek.com/blog/2021-01-07-add-docker-capabilities-to-truenas-core/). Portainer install instructions adopted from the [Portainer Official Documentation](https://docs.portainer.io/start/install-ce/server/docker/linux). Tandoor installation on Portainer provided by users `Szeraax` and `TransatlanticFoe` on Discord (Thank you two!)
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested.
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
[Unraid](https://unraid.net/) is an operating system that allows you to easily install and setup applications.
|
||||
|
||||
Thanks to [CorneliousJD](https://github.com/CorneliousJD) this application can easily be installed using unraid.
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
!!! info "Community Contributed"
|
||||
This guide was contributed by the community and is neither officially supported, nor updated or tested. Since I cannot test it myself, feedback and improvements are always very welcome.
|
||||
|
||||
|
||||
!!! danger "Tandoor 2 Compatibility"
|
||||
This guide has not been verified/tested for Tandoor 2, which now integrates a nginx service inside the default docker container and exposes its service on port 80 instead of 8080.
|
||||
|
||||
# Ubuntu Installation on Windows (WSL) and Docker Desktop
|
||||
|
||||
Install Docker from https://docs.docker.com/desktop/install/windows-install/
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
:root > * {
|
||||
--md-primary-fg-color: #ddbf86;
|
||||
--md-accent-fg-color: #b55e4f;
|
||||
:root {
|
||||
--md-primary-fg-color: #ffcb76;
|
||||
--md-accent-fg-color: #FF6F00;
|
||||
|
||||
--md-primary-fg-color--light: #ddbf86;
|
||||
--md-primary-fg-color--light: #ffcb76;
|
||||
|
||||
/* not working part, has no effect */
|
||||
--md-primary-bg-color: #121212;
|
||||
--md-default-bg-color: #121212;
|
||||
--md-default-bg-color--light: #f5efea;
|
||||
--md-default-bg-color--lighter: #f5efea;
|
||||
--md-default-bg-color--lightest: #f5efea;
|
||||
--md-primary-bg-color: #272727;
|
||||
--md-default-bg-color: #272727;
|
||||
--md-default-bg-color--light: #272727;
|
||||
--md-default-bg-color--lighter: #272727;
|
||||
--md-default-bg-color--lightest: #272727;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -472,20 +472,15 @@ S3_CUSTOM_DOMAIN= # when using a CDN/proxy to S3 (see https://github.com/Tandoor
|
||||
|
||||
#### AI Integration
|
||||
|
||||
Most AI features are configured trough the AI Provider settings in the Tandoor web interface. Some defaults can be set for new spaces on your instance.
|
||||
To use AI to perform different tasks you need to configure an API key and the AI provider. [LiteLLM](https://www.litellm.ai/) is used
|
||||
to make a standardized request to different AI providers of your liking.
|
||||
|
||||
Enables AI features for spaces by default
|
||||
```
|
||||
SPACE_AI_ENABLED=1
|
||||
```
|
||||
Configuring this via environment parameters is a temporary solution. In the future I plan on adding support for multiple AI providers per Tandoor instance
|
||||
with the option to select them for various tasks. For now only gemini 2.0 flash has been tested but feel free to try out other models.
|
||||
|
||||
Sets the monthly default credit limit for AI usage
|
||||
```
|
||||
SPACE_AI_CREDITS_MONTHLY=100
|
||||
```
|
||||
|
||||
Ratelimit for AI API
|
||||
```
|
||||
AI_API_KEY=
|
||||
AI_MODEL_NAME=gemini/gemini-2.0-flash
|
||||
AI_RATELIMIT=60/hour
|
||||
```
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80 ipv6only=on;
|
||||
server_name localhost;
|
||||
|
||||
client_max_body_size 128M;
|
||||
|
||||
# serve media files
|
||||
location /media {
|
||||
alias ${MEDIA_ROOT};
|
||||
add_header Content-Disposition 'attachment; filename="$args"';
|
||||
}
|
||||
|
||||
# serve service worker under main path
|
||||
location = /service-worker.js {
|
||||
alias ${STATIC_ROOT}/vue3/service-worker.js;
|
||||
}
|
||||
|
||||
# pass requests for dynamic content to gunicorn
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_pass http://localhost:${TANDOOR_PORT};
|
||||
error_page 502 /errors/http502.html;
|
||||
}
|
||||
|
||||
location /errors/ {
|
||||
alias /etc/nginx/conf.d/errorpages/;
|
||||
internal;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<!-- Simple HttpErrorPages | MIT License | https://github.com/HttpErrorPages -->
|
||||
<meta charset="utf-8" /><meta http-equiv="X-UA-Compatible" content="IE=edge" /><meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>502 - Webservice currently unavailable</title>
|
||||
<style type="text/css">/*! normalize.css v5.0.0 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;line-height:1.15;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,footer,header,nav,section{display:block}h1{font-size:2em;margin:.67em 0}figcaption,figure,main{display:block}figure{margin:1em 40px}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent;-webkit-text-decoration-skip:objects}a:active,a:hover{outline-width:0}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:inherit}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}dfn{font-style:italic}mark{background-color:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}audio,video{display:inline-block}audio:not([controls]){display:none;height:0}img{border-style:none}svg:not(:root){overflow:hidden}button,input,optgroup,select,textarea{font-family:sans-serif;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type=reset],[type=submit],button,html [type=button]{-webkit-appearance:button}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{border:1px solid silver;margin:0 2px;padding:.35em .625em .75em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{display:inline-block;vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-cancel-button,[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details,menu{display:block}summary{display:list-item}canvas{display:inline-block}template{display:none}[hidden]{display:none}/*! Simple HttpErrorPages | MIT X11 License | https://github.com/AndiDittrich/HttpErrorPages */body,html{width:100%;height:100%;background-color:#21232a}body{color:#fff;text-align:center;text-shadow:0 2px 4px rgba(0,0,0,.5);padding:0;min-height:100%;-webkit-box-shadow:inset 0 0 100px rgba(0,0,0,.8);box-shadow:inset 0 0 100px rgba(0,0,0,.8);display:table;font-family:"Open Sans",Arial,sans-serif}h1{font-family:inherit;font-weight:500;line-height:1.1;color:inherit;font-size:36px}h1 small{font-size:68%;font-weight:400;line-height:1;color:#777}a{text-decoration:none;color:#fff;font-size:inherit;border-bottom:dotted 1px #707070}.lead{color:silver;font-size:21px;line-height:1.4}.cover{display:table-cell;vertical-align:middle;padding:0 20px}footer{position:fixed;width:100%;height:40px;left:0;bottom:0;color:#a0a0a0;font-size:14px}</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="cover"><h1>Tandoor Recipes is not yet available <small>502</small></h1>
|
||||
<p class="lead">
|
||||
Services are still trying to start.<br>
|
||||
Please allow up to 3 minutes after you started the application on your server.<br><br>
|
||||
If this status persists, check the application or docker logs for further information.<br>
|
||||
After checking and trying everything mentioned in the <a href="https://docs.tandoor.dev/" target="_blank">docs</a>, you can request help on the project's <a href="https://github.com/TandoorRecipes/recipes/issues/new?assignees=&labels=setup+issue&template=help_request.yml" target="_blank">GitHub</a> page.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -13,8 +13,6 @@ theme:
|
||||
favicon: logo_color.svg
|
||||
palette:
|
||||
scheme: slate
|
||||
primary: custom
|
||||
accent: custom
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -6,16 +6,10 @@ server {
|
||||
client_max_body_size 128M;
|
||||
|
||||
# serve media files
|
||||
location /media {
|
||||
alias /opt/recipes/mediafiles;
|
||||
location /media/ {
|
||||
alias /media/;
|
||||
add_header Content-Disposition 'attachment; filename="$args"';
|
||||
}
|
||||
|
||||
# serve service worker under main path
|
||||
location = /service-worker.js {
|
||||
alias /opt/recipes/staticfiles/vue3/service-worker.js;
|
||||
}
|
||||
|
||||
# pass requests for dynamic content to gunicorn
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
|
||||
5
package.json
Normal file
5
package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@vueuse/core": "^10.9.0"
|
||||
}
|
||||
}
|
||||
21
plugin.py
21
plugin.py
@@ -1,21 +0,0 @@
|
||||
import os
|
||||
import subprocess
|
||||
import traceback
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
#TODO clean existing links for when plugins are uninstalled or not necessary because it will just be empty links?
|
||||
|
||||
PLUGINS_DIRECTORY = os.path.join(BASE_DIR, 'recipes', 'plugins')
|
||||
if os.path.isdir(PLUGINS_DIRECTORY):
|
||||
for d in os.listdir(PLUGINS_DIRECTORY):
|
||||
if d != '__pycache__':
|
||||
try:
|
||||
subprocess.run(['python', 'setup_repo.py'], shell=(os.name == 'nt'), cwd=os.path.join(BASE_DIR, 'recipes', 'plugins', d))
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
print(f'ERROR failed to link plugin {d}')
|
||||
|
||||
subprocess.run(['npm', 'install', '--global', 'yarn'], shell=(os.name == 'nt'), cwd=os.path.join(BASE_DIR, 'vue3'))
|
||||
subprocess.run(['yarn', 'install'], shell=(os.name == 'nt'), cwd=os.path.join(BASE_DIR, 'vue3'))
|
||||
subprocess.run(['yarn', 'build'], shell=(os.name == 'nt'), cwd=os.path.join(BASE_DIR, 'vue3'))
|
||||
@@ -4,5 +4,4 @@ testpaths = cookbook/tests
|
||||
python_files = tests.py test_*.py *_tests.py
|
||||
# uncomment to run coverage reports
|
||||
addopts = -n auto --cov=. --cov-report=html:docs/reports/coverage --cov-report=xml:docs/reports/coverage/coverage.xml --junitxml=docs/reports/tests/pytest.xml --html=docs/reports/tests/tests.html
|
||||
# addopts = -n auto --junitxml=docs/reports/tests/pytest.xml --html=docs/reports/tests/tests.html
|
||||
asyncio_default_fixture_loop_scope = fixture
|
||||
# addopts = -n auto --junitxml=docs/reports/tests/pytest.xml --html=docs/reports/tests/tests.html
|
||||
@@ -40,6 +40,9 @@ def extract_comma_list(env_key, default=None):
|
||||
load_dotenv()
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
SCRIPT_NAME = os.getenv('SCRIPT_NAME', '')
|
||||
# path for django_js_reverse to generate the javascript file containing all urls. Only done because the default command (collectstatic_js_reverse) fails to update the manifest
|
||||
JS_REVERSE_OUTPUT_PATH = os.path.join(BASE_DIR, "cookbook/static/django_js_reverse")
|
||||
JS_REVERSE_SCRIPT_PREFIX = os.getenv('JS_REVERSE_SCRIPT_PREFIX', SCRIPT_NAME)
|
||||
|
||||
STATIC_URL = os.getenv('STATIC_URL', '/static/')
|
||||
STATIC_ROOT = os.getenv('STATIC_ROOT', os.path.join(BASE_DIR, "staticfiles"))
|
||||
@@ -59,8 +62,6 @@ SPACE_DEFAULT_MAX_RECIPES = int(os.getenv('SPACE_DEFAULT_MAX_RECIPES', 0))
|
||||
SPACE_DEFAULT_MAX_USERS = int(os.getenv('SPACE_DEFAULT_MAX_USERS', 0))
|
||||
SPACE_DEFAULT_MAX_FILES = int(os.getenv('SPACE_DEFAULT_MAX_FILES', 0))
|
||||
SPACE_DEFAULT_ALLOW_SHARING = extract_bool('SPACE_DEFAULT_ALLOW_SHARING', True)
|
||||
SPACE_AI_ENABLED = extract_bool('SPACE_AI_ENABLED', True)
|
||||
SPACE_AI_CREDITS_MONTHLY = int(os.getenv('SPACE_AI_CREDITS_MONTHLY', 10000))
|
||||
|
||||
INTERNAL_IPS = extract_comma_list('INTERNAL_IPS', '127.0.0.1')
|
||||
|
||||
@@ -85,10 +86,6 @@ LOGGING = {
|
||||
'handlers': ['console'],
|
||||
'level': LOG_LEVEL,
|
||||
},
|
||||
'django': {
|
||||
'handlers': ['console'],
|
||||
'level': LOG_LEVEL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -139,6 +136,8 @@ HCAPTCHA_SECRET = os.getenv('HCAPTCHA_SECRET', '')
|
||||
|
||||
FDC_API_KEY = os.getenv('FDC_API_KEY', 'DEMO_KEY')
|
||||
|
||||
AI_API_KEY = os.getenv('AI_API_KEY', '')
|
||||
AI_MODEL_NAME = os.getenv('AI_MODEL_NAME', 'gemini/gemini-2.0-flash')
|
||||
AI_RATELIMIT = os.getenv('AI_RATELIMIT', '60/hour')
|
||||
|
||||
SHARING_ABUSE = extract_bool('SHARING_ABUSE', False)
|
||||
@@ -221,7 +220,10 @@ try:
|
||||
'module': f'recipes.plugins.{d}',
|
||||
'base_path': os.path.join(BASE_DIR, 'recipes', 'plugins', d),
|
||||
'base_url': plugin_class.base_url,
|
||||
'bundle_name': plugin_class.bundle_name if hasattr(plugin_class, 'bundle_name') else '',
|
||||
'api_router_name': plugin_class.api_router_name if hasattr(plugin_class, 'api_router_name') else '',
|
||||
'nav_main': plugin_class.nav_main if hasattr(plugin_class, 'nav_main') else '',
|
||||
'nav_dropdown': plugin_class.nav_dropdown if hasattr(plugin_class, 'nav_dropdown') else '',
|
||||
}
|
||||
PLUGINS.append(plugin_config)
|
||||
print(f'PLUGIN {d} loaded')
|
||||
@@ -243,6 +245,7 @@ ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = 90
|
||||
ACCOUNT_LOGOUT_ON_GET = True
|
||||
|
||||
USERSESSIONS_TRACK_ACTIVITY = True
|
||||
|
||||
HEADLESS_SERVE_SPECIFICATION = True
|
||||
|
||||
try:
|
||||
@@ -518,19 +521,31 @@ CACHES = {
|
||||
}
|
||||
}
|
||||
|
||||
if REDIS_HOST:
|
||||
CACHES['default'] = {
|
||||
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||
'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}',
|
||||
}
|
||||
if REDIS_USERNAME and not REDIS_PASSWORD:
|
||||
CACHES['default']['LOCATION'] = f'redis://{REDIS_USERNAME}@{REDIS_HOST}:{REDIS_PORT}'
|
||||
if REDIS_USERNAME and REDIS_PASSWORD:
|
||||
CACHES['default']['LOCATION'] = f'redis://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}'
|
||||
|
||||
# Vue webpack settings
|
||||
VUE_DIR = os.path.join(BASE_DIR, 'vue')
|
||||
|
||||
WEBPACK_LOADER = {
|
||||
'DEFAULT': {
|
||||
'CACHE': not DEBUG,
|
||||
'BUNDLE_DIR_NAME': 'vue/', # must end with slash
|
||||
'STATS_FILE': os.path.join(VUE_DIR, 'webpack-stats.json'),
|
||||
'POLL_INTERVAL': 0.1,
|
||||
'TIMEOUT': None,
|
||||
'IGNORE': [r'.+\.hot-update.js', r'.+\.map'],
|
||||
},
|
||||
}
|
||||
|
||||
for p in PLUGINS:
|
||||
if p['bundle_name'] != '':
|
||||
WEBPACK_LOADER[p['bundle_name']] = {
|
||||
'CACHE': not DEBUG,
|
||||
'BUNDLE_DIR_NAME': 'vue/', # must end with slash
|
||||
'STATS_FILE': os.path.join(p["base_path"], 'vue', 'webpack-stats.json'),
|
||||
'POLL_INTERVAL': 0.1,
|
||||
'TIMEOUT': None,
|
||||
'IGNORE': [r'.+\.hot-update.js', r'.+\.map'],
|
||||
}
|
||||
|
||||
DJANGO_VITE = {
|
||||
"default": {
|
||||
"dev_mode": False,
|
||||
@@ -565,6 +580,8 @@ else:
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
LANGUAGES = [
|
||||
@@ -592,18 +609,8 @@ LANGUAGES = [
|
||||
|
||||
AWS_ENABLED = True if os.getenv('S3_ACCESS_KEY', False) else False
|
||||
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
# Serve static files with gzip
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
if os.getenv('S3_ACCESS_KEY', ''):
|
||||
STORAGES['default']['BACKEND'] = 'cookbook.helper.CustomStorageClass.CachedS3Boto3Storage'
|
||||
DEFAULT_FILE_STORAGE = 'cookbook.helper.CustomStorageClass.CachedS3Boto3Storage'
|
||||
|
||||
AWS_ACCESS_KEY_ID = os.getenv('S3_ACCESS_KEY', '')
|
||||
AWS_SECRET_ACCESS_KEY = os.getenv('S3_SECRET_ACCESS_KEY', '')
|
||||
@@ -618,9 +625,14 @@ if os.getenv('S3_ACCESS_KEY', ''):
|
||||
if os.getenv('S3_CUSTOM_DOMAIN', ''):
|
||||
AWS_S3_CUSTOM_DOMAIN = os.getenv('S3_CUSTOM_DOMAIN', '')
|
||||
|
||||
MEDIA_URL = os.getenv('MEDIA_URL', '/media/')
|
||||
MEDIA_ROOT = os.getenv('MEDIA_ROOT', os.path.join(BASE_DIR, "mediafiles"))
|
||||
else:
|
||||
MEDIA_URL = os.getenv('MEDIA_URL', '/media/')
|
||||
MEDIA_ROOT = os.getenv('MEDIA_ROOT', os.path.join(BASE_DIR, "mediafiles"))
|
||||
|
||||
MEDIA_URL = os.getenv('MEDIA_URL', '/media/')
|
||||
MEDIA_ROOT = os.getenv('MEDIA_ROOT', os.path.join(BASE_DIR, "mediafiles"))
|
||||
# Serve static files with gzip
|
||||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
|
||||
# settings for cross site origin (CORS)
|
||||
# all origins allowed to support bookmarklet
|
||||
@@ -662,5 +674,17 @@ ACCOUNT_RATE_LIMITS = {
|
||||
DISABLE_EXTERNAL_CONNECTORS = extract_bool('DISABLE_EXTERNAL_CONNECTORS', False)
|
||||
EXTERNAL_CONNECTORS_QUEUE_SIZE = int(os.getenv('EXTERNAL_CONNECTORS_QUEUE_SIZE', 100))
|
||||
|
||||
# ACCOUNT_SIGNUP_FORM_CLASS = 'cookbook.forms.AllAuthSignupForm'
|
||||
ACCOUNT_FORMS = {'signup': 'cookbook.forms.AllAuthSignupForm', 'reset_password': 'cookbook.forms.CustomPasswordResetForm'}
|
||||
|
||||
ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS = False
|
||||
ACCOUNT_RATE_LIMITS = {
|
||||
"change_password": "1/m/user",
|
||||
"reset_password": "1/m/ip,1/m/key",
|
||||
"reset_password_from_key": "1/m/ip",
|
||||
"signup": "5/m/ip",
|
||||
"login": "5/m/ip",
|
||||
}
|
||||
|
||||
mimetypes.add_type("text/javascript", ".js", True)
|
||||
mimetypes.add_type("text/javascript", ".mjs", True)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
Django==5.2.6
|
||||
Django==4.2.22
|
||||
cryptography===45.0.5
|
||||
django-annoying==0.10.6
|
||||
django-cleanup==9.0.0
|
||||
django-crispy-forms==2.4
|
||||
crispy-bootstrap4==2025.6
|
||||
djangorestframework==3.16.1
|
||||
djangorestframework==3.15.2
|
||||
drf-spectacular==0.27.1
|
||||
drf-spectacular-sidecar==2025.8.1
|
||||
drf-spectacular-sidecar==2025.7.1
|
||||
drf-writable-nested==0.7.2
|
||||
django-oauth-toolkit==2.4.0
|
||||
django-debug-toolbar==4.3.0
|
||||
@@ -14,9 +14,9 @@ bleach==6.2.0
|
||||
gunicorn==23.0.0
|
||||
lxml==5.3.1
|
||||
Markdown==3.7
|
||||
Pillow==11.3.0
|
||||
Pillow==11.1.0
|
||||
psycopg2-binary==2.9.10
|
||||
python-dotenv==1.1.1
|
||||
python-dotenv==1.0.0
|
||||
requests==2.32.4
|
||||
six==1.17.0
|
||||
webdavclient3==3.14.6
|
||||
@@ -26,25 +26,24 @@ pyyaml==6.0.2
|
||||
uritemplate==4.1.1
|
||||
beautifulsoup4==4.12.3
|
||||
microdata==0.8.0
|
||||
mock==5.2.0
|
||||
mock==5.1.0
|
||||
Jinja2==3.1.6
|
||||
django-allauth[mfa,socialaccount]==65.9.0
|
||||
recipe-scrapers==15.8.0
|
||||
recipe-scrapers==15.6.0
|
||||
django-scopes==2.0.0
|
||||
django-treebeard==4.7.1
|
||||
django-cors-headers==4.6.0
|
||||
django-storages==1.14.6
|
||||
django-storages==1.14.2
|
||||
boto3==1.28.75
|
||||
django-prometheus==2.4.1
|
||||
django-prometheus==2.3.1
|
||||
django-hCaptcha==0.2.0
|
||||
python-ldap==3.4.4
|
||||
django-auth-ldap==4.6.0
|
||||
pyppeteer==2.0.0
|
||||
pytubefix==9.2.2
|
||||
aiohttp==3.12.15
|
||||
aiohttp==3.10.11
|
||||
inflection==0.5.1
|
||||
redis==6.2.0
|
||||
hiredis==3.2.1
|
||||
redis==5.2.1
|
||||
requests-oauthlib==2.0.0
|
||||
pyjwt==2.10.1
|
||||
python3-openid==3.2.0
|
||||
@@ -55,11 +54,11 @@ litellm==1.64.1
|
||||
# Development
|
||||
pytest==8.4.1
|
||||
pytest-django==4.11.0
|
||||
pytest-cov===6.2.1
|
||||
pytest-factoryboy==2.8.1
|
||||
pytest-cov===6.0.0
|
||||
pytest-factoryboy==2.8.0
|
||||
pytest-html==4.1.1
|
||||
pytest-asyncio==1.1.0
|
||||
pytest-xdist==3.8.0
|
||||
pytest-asyncio==0.25.3
|
||||
pytest-xdist==3.7.0
|
||||
autopep8==2.3.2
|
||||
flake8==7.3.0
|
||||
yapf==0.40.2
|
||||
|
||||
@@ -9,21 +9,21 @@
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/luxon": "^3.7.1",
|
||||
"@types/sortablejs": "^1.15.8",
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@vueform/multiselect": "^2.6.11",
|
||||
"@vueuse/core": "^13.6.0",
|
||||
"@vueuse/router": "^13.6.0",
|
||||
"luxon": "^3.7.1",
|
||||
"@vueuse/core": "^13.1.0",
|
||||
"@vueuse/router": "^13.1.0",
|
||||
"luxon": "^3.6.1",
|
||||
"mavon-editor": "^3.0.1",
|
||||
"pinia": "^3.0.2",
|
||||
"vue": "^3.5.13",
|
||||
"vue-draggable-plus": "^0.6.0",
|
||||
"vue-i18n": "^11.1.11",
|
||||
"vue-i18n": "^11.1.7",
|
||||
"vue-router": "^4.5.0",
|
||||
"vue-simple-calendar": "7.1.0",
|
||||
"vuedraggable": "^4.1.0",
|
||||
"vuetify": "^3.9.7"
|
||||
"@types/sortablejs": "^1.15.8",
|
||||
"vuetify": "^3.8.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@fortawesome/fontawesome-free": "^6.7.2",
|
||||
@@ -32,21 +32,19 @@
|
||||
"@types/node": "^24.0.8",
|
||||
"@vitejs/plugin-vue": "^6.0.0",
|
||||
"@vue/tsconfig": "^0.7.0",
|
||||
"esbuild-register": "^3.6.0",
|
||||
"jsdom": "^26.1.0",
|
||||
"typescript": "^5.8.3",
|
||||
"vite": "7.1.5",
|
||||
"vite-plugin-pwa": "^1.0.3",
|
||||
"vite-plugin-vuetify": "^2.1.1",
|
||||
"vue-tsc": "^3.0.6",
|
||||
"workbox-background-sync": "^7.3.0",
|
||||
"vite": "6.3.5",
|
||||
"vite-plugin-pwa": "^1.0.1",
|
||||
"workbox-build": "^7.3.0",
|
||||
"workbox-core": "^7.3.0",
|
||||
"workbox-window": "^7.3.0",
|
||||
"workbox-background-sync": "^7.3.0",
|
||||
"workbox-expiration": "^7.3.0",
|
||||
"workbox-navigation-preload": "^7.3.0",
|
||||
"workbox-precaching": "^7.3.0",
|
||||
"workbox-routing": "^7.3.0",
|
||||
"workbox-strategies": "^7.3.0",
|
||||
"workbox-window": "^7.3.0"
|
||||
"vite-plugin-vuetify": "^2.1.1",
|
||||
"vue-tsc": "^2.2.8"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,45 +3,92 @@
|
||||
<v-app-bar color="tandoor" flat density="comfortable" v-if="!useUserPreferenceStore().isAuthenticated">
|
||||
|
||||
</v-app-bar>
|
||||
<v-app-bar :color="useUserPreferenceStore().activeSpace.navBgColor ? useUserPreferenceStore().activeSpace.navBgColor : useUserPreferenceStore().userSettings.navBgColor"
|
||||
flat density="comfortable" v-if="useUserPreferenceStore().isAuthenticated" :scroll-behavior="useUserPreferenceStore().userSettings.navSticky ? '' : 'hide'">
|
||||
<v-app-bar :color="useUserPreferenceStore().activeSpace.navBgColor ? useUserPreferenceStore().activeSpace.navBgColor : useUserPreferenceStore().userSettings.navBgColor" flat density="comfortable" v-if="useUserPreferenceStore().isAuthenticated" :scroll-behavior="useUserPreferenceStore().userSettings.navSticky ? '' : 'hide'">
|
||||
<router-link :to="{ name: 'StartPage', params: {} }">
|
||||
<v-img src="../../assets/brand_logo.svg" width="140px" class="ms-2"
|
||||
v-if="useUserPreferenceStore().userSettings.navShowLogo && !useUserPreferenceStore().activeSpace.navLogo"></v-img>
|
||||
<v-img :src="useUserPreferenceStore().activeSpace.navLogo.preview" width="140px" class="ms-2"
|
||||
v-if="useUserPreferenceStore().userSettings.navShowLogo && useUserPreferenceStore().activeSpace.navLogo != undefined"></v-img>
|
||||
<v-img src="../../assets/brand_logo.svg" width="140px" class="ms-2" v-if="useUserPreferenceStore().userSettings.navShowLogo && !useUserPreferenceStore().activeSpace.navLogo"></v-img>
|
||||
<v-img :src="useUserPreferenceStore().activeSpace.navLogo.preview" width="140px" class="ms-2" v-if="useUserPreferenceStore().userSettings.navShowLogo && useUserPreferenceStore().activeSpace.navLogo != undefined"></v-img>
|
||||
</router-link>
|
||||
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
<global-search-dialog></global-search-dialog>
|
||||
<v-btn icon="$add" class="d-print-none">
|
||||
<v-icon icon="$add" class="fa-fw"></v-icon>
|
||||
<v-menu activator="parent">
|
||||
<v-list>
|
||||
<v-list-item prepend-icon="$add" :to="{ name: 'ModelEditPage', params: {model: 'recipe'} }">{{ $t('Create Recipe') }}</v-list-item>
|
||||
<v-list-item prepend-icon="fa-solid fa-globe" :to="{ name: 'RecipeImportPage', params: {} }">{{ $t('Import Recipe') }}</v-list-item>
|
||||
</v-list>
|
||||
</v-menu>
|
||||
</v-btn>
|
||||
<v-spacer></v-spacer>
|
||||
<global-search-dialog ></global-search-dialog>
|
||||
<v-btn icon="$add" class="d-print-none">
|
||||
<v-icon icon="$add" class="fa-fw"></v-icon>
|
||||
<v-menu activator="parent">
|
||||
<v-list>
|
||||
<v-list-item prepend-icon="$add" :to="{ name: 'ModelEditPage', params: {model: 'recipe'} }">{{ $t('Create Recipe') }}</v-list-item>
|
||||
<v-list-item prepend-icon="fa-solid fa-globe" :to="{ name: 'RecipeImportPage', params: {} }">{{ $t('Import Recipe') }}</v-list-item>
|
||||
</v-list>
|
||||
</v-menu>
|
||||
</v-btn>
|
||||
|
||||
<v-avatar color="primary" class="me-2 cursor-pointer d-print-none">{{ useUserPreferenceStore().userSettings.user.displayName.charAt(0) }}
|
||||
<v-menu activator="parent">
|
||||
<v-avatar color="primary" class="me-2 cursor-pointer d-print-none">{{ useUserPreferenceStore().userSettings.user.displayName.charAt(0) }}
|
||||
<v-menu activator="parent">
|
||||
|
||||
<v-list density="compact">
|
||||
<v-list-item class="mb-1">
|
||||
<template #prepend>
|
||||
<v-avatar color="primary">{{ useUserPreferenceStore().userSettings.user.displayName.charAt(0) }}</v-avatar>
|
||||
<v-list density="compact">
|
||||
<v-list-item class="mb-1">
|
||||
<template #prepend>
|
||||
<v-avatar color="primary">{{ useUserPreferenceStore().userSettings.user.displayName.charAt(0) }}</v-avatar>
|
||||
</template>
|
||||
<v-list-item-title>{{ useUserPreferenceStore().userSettings.user.displayName }}</v-list-item-title>
|
||||
<v-list-item-subtitle>{{ useUserPreferenceStore().activeSpace.name }}</v-list-item-subtitle>
|
||||
</v-list-item>
|
||||
<v-divider></v-divider>
|
||||
<v-list-item :to="{ name: 'SettingsPage', params: {} }">
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-sliders"></v-icon>
|
||||
</template>
|
||||
{{ $t('Settings') }}
|
||||
</v-list-item>
|
||||
<v-list-item :to="{ name: 'DatabasePage', params: {} }">
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-folder-tree"></v-icon>
|
||||
</template>
|
||||
{{ $t('Database') }}
|
||||
</v-list-item>
|
||||
<v-list-item :to="{ name: 'HelpPage' }">
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-question"></v-icon>
|
||||
</template>
|
||||
{{ $t('Help') }}
|
||||
</v-list-item>
|
||||
<!-- <v-list-item><template #prepend><v-icon icon="fa-solid fa-user-shield"></v-icon></template>Admin</v-list-item>-->
|
||||
<!-- <v-list-item><template #prepend><v-icon icon="fa-solid fa-question"></v-icon></template>Help</v-list-item>-->
|
||||
<template v-if="useUserPreferenceStore().spaces.length > 1">
|
||||
<v-divider></v-divider>
|
||||
<v-list-subheader>{{ $t('YourSpaces') }}</v-list-subheader>
|
||||
<v-list-item v-for="s in useUserPreferenceStore().spaces" :key="s.id" @click="useUserPreferenceStore().switchSpace(s)">
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-circle-dot" v-if="s.id == useUserPreferenceStore().activeSpace.id"></v-icon>
|
||||
<v-icon icon="fa-solid fa-circle" v-else></v-icon>
|
||||
</template>
|
||||
{{ s.name }}
|
||||
</v-list-item>
|
||||
</template>
|
||||
<v-list-item-title>{{ useUserPreferenceStore().userSettings.user.displayName }}</v-list-item-title>
|
||||
<v-list-item-subtitle>{{ useUserPreferenceStore().activeSpace.name }}</v-list-item-subtitle>
|
||||
</v-list-item>
|
||||
<v-divider></v-divider>
|
||||
|
||||
<component :is="item.component" :="item" :key="item.title" v-for="item in useNavigation().getUserNavigation()"></component>
|
||||
</v-list>
|
||||
</v-menu>
|
||||
</v-avatar>
|
||||
<v-divider></v-divider>
|
||||
<v-list-item link>
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-database"></v-icon>
|
||||
</template>
|
||||
{{ $t('Messages') }}
|
||||
<message-list-dialog></message-list-dialog>
|
||||
</v-list-item>
|
||||
<v-list-item :href="getDjangoUrl('admin')" target="_blank" v-if="useUserPreferenceStore().userSettings.user.isSuperuser">
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-shield"></v-icon>
|
||||
</template>
|
||||
{{ $t('Admin') }}
|
||||
</v-list-item>
|
||||
<v-list-item :href="getDjangoUrl('accounts/logout')" link>
|
||||
<template #prepend>
|
||||
<v-icon icon="fa-solid fa-arrow-right-from-bracket"></v-icon>
|
||||
</template>
|
||||
{{ $t('Logout') }}
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-menu>
|
||||
</v-avatar>
|
||||
|
||||
</v-app-bar>
|
||||
<v-app-bar color="info" density="compact"
|
||||
@@ -79,17 +126,23 @@
|
||||
<v-list-item-subtitle>{{ useUserPreferenceStore().activeSpace.name }}</v-list-item-subtitle>
|
||||
</v-list-item>
|
||||
<v-divider></v-divider>
|
||||
<component :is="item.component" :="item" :key="item.title" v-for="item in useNavigation().getNavigationDrawer()"></component>
|
||||
<v-list-item prepend-icon="$recipes" title="Home" :to="{ name: 'StartPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="$search" :title="$t('Search')" :to="{ name: 'SearchPage' }"></v-list-item>
|
||||
<v-list-item prepend-icon="$mealplan" :title="$t('Meal_Plan')" :to="{ name: 'MealPlanPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="$shopping" :title="$t('Shopping_list')" :to="{ name: 'ShoppingListPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="fas fa-globe" :title="$t('Import')" :to="{ name: 'RecipeImportPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="$books" :title="$t('Books')" :to="{ name: 'BooksPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="fa-solid fa-folder-tree" :title="$t('Database')" :to="{ name: 'DatabasePage' }"></v-list-item>
|
||||
|
||||
<navigation-drawer-context-menu></navigation-drawer-context-menu>
|
||||
</v-list>
|
||||
|
||||
|
||||
<template #append>
|
||||
<v-list nav>
|
||||
<v-list-item prepend-icon="fas fa-sliders" :title="$t('Settings')" :to="{ name: 'SettingsPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="fa-solid fa-heart" link>
|
||||
<v-list-item prepend-icon="fa-solid fa-heart" href="https://tandoor.dev" target="_blank">
|
||||
Tandoor {{ useUserPreferenceStore().serverSettings.version }}
|
||||
<help-dialog></help-dialog>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</template>
|
||||
@@ -113,7 +166,10 @@
|
||||
<v-icon icon="fa-fw fas fa-bars"></v-icon>
|
||||
<v-bottom-sheet activator="parent" close-on-content-click>
|
||||
<v-list nav>
|
||||
<component :is="item.component" :="item" :key="item.title" v-for="item in useNavigation().getBottomNavigation()"></component>
|
||||
<v-list-item prepend-icon="fa-solid fa-sliders" :to="{ name: 'SettingsPage', params: {} }" :title="$t('Settings')"></v-list-item>
|
||||
<v-list-item prepend-icon="fas fa-globe" :title="$t('Import')" :to="{ name: 'RecipeImportPage', params: {} }"></v-list-item>
|
||||
<v-list-item prepend-icon="fa-solid fa-folder-tree" :to="{ name: 'DatabasePage' }" :title="$t('Database')"></v-list-item>
|
||||
<v-list-item prepend-icon="$books" :title="$t('Books')" :to="{ name: 'BooksPage', params: {} }"></v-list-item>
|
||||
</v-list>
|
||||
</v-bottom-sheet>
|
||||
</v-btn>
|
||||
@@ -131,139 +187,28 @@
|
||||
<script lang="ts" setup>
|
||||
import GlobalSearchDialog from "@/components/inputs/GlobalSearchDialog.vue"
|
||||
|
||||
import {useDisplay} from "vuetify"
|
||||
import {useDisplay, useTheme} from "vuetify"
|
||||
import VSnackbarQueued from "@/components/display/VSnackbarQueued.vue";
|
||||
import MessageListDialog from "@/components/dialogs/MessageListDialog.vue";
|
||||
import {useUserPreferenceStore} from "@/stores/UserPreferenceStore";
|
||||
import NavigationDrawerContextMenu from "@/components/display/NavigationDrawerContextMenu.vue";
|
||||
import {useDjangoUrls} from "@/composables/useDjangoUrls";
|
||||
import {nextTick, onMounted} from "vue";
|
||||
import {onMounted, ref} from "vue";
|
||||
import {isSpaceAboveLimit} from "@/utils/logic_utils";
|
||||
import {useMediaQuery, useTitle} from "@vueuse/core";
|
||||
import HelpDialog from "@/components/dialogs/HelpDialog.vue";
|
||||
import {NAVIGATION_DRAWER} from "@/utils/navigation.ts";
|
||||
import {useNavigation} from "@/composables/useNavigation.ts";
|
||||
import {useRouter} from "vue-router";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import '@/assets/tandoor_light.css'
|
||||
import {useMediaQuery} from "@vueuse/core";
|
||||
|
||||
const {lgAndUp} = useDisplay()
|
||||
const {getDjangoUrl} = useDjangoUrls()
|
||||
const {t} = useI18n()
|
||||
|
||||
const title = useTitle()
|
||||
const router = useRouter()
|
||||
|
||||
const isPrintMode = useMediaQuery('print')
|
||||
|
||||
onMounted(() => {
|
||||
useUserPreferenceStore().init()
|
||||
})
|
||||
|
||||
/**
|
||||
* global title update handler, might be overridden by page specific handlers
|
||||
*/
|
||||
router.afterEach((to, from) => {
|
||||
if(to.name == 'StartPage' && !useUserPreferenceStore().activeSpace.spaceSetupCompleted != undefined &&!useUserPreferenceStore().activeSpace.spaceSetupCompleted && useUserPreferenceStore().activeSpace.createdBy.id! == useUserPreferenceStore().userSettings.user.id!){
|
||||
router.push({name: 'WelcomePage'})
|
||||
}
|
||||
nextTick(() => {
|
||||
if (to.meta.title) {
|
||||
title.value = t(to.meta.title)
|
||||
} else {
|
||||
title.value = 'Tandoor'
|
||||
}
|
||||
})
|
||||
useUserPreferenceStore()
|
||||
})
|
||||
|
||||
</script>
|
||||
|
||||
<style>
|
||||
|
||||
.v-theme--dark {
|
||||
|
||||
a:not([class]) {
|
||||
color: #b98766;
|
||||
text-decoration: none;
|
||||
background-color: transparent
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #fff;
|
||||
text-decoration: none
|
||||
}
|
||||
|
||||
a:not([href]):not([tabindex]), a:not([href]):not([tabindex]):focus, a:not([href]):not([tabindex]):hover {
|
||||
color: inherit;
|
||||
text-decoration: none
|
||||
}
|
||||
|
||||
a:not([href]):not([tabindex]):focus {
|
||||
outline: 0
|
||||
}
|
||||
|
||||
/* Meal-Plan */
|
||||
|
||||
.cv-header {
|
||||
background-color: #303030 !important;
|
||||
}
|
||||
|
||||
.cv-weeknumber, .cv-header-day {
|
||||
background-color: #303030 !important;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
.cv-day.past {
|
||||
background-color: #333333 !important;
|
||||
}
|
||||
|
||||
.cv-day.today {
|
||||
background-color: rgba(185, 135, 102, 0.2) !important;
|
||||
}
|
||||
|
||||
.cv-day.outsideOfMonth {
|
||||
background-color: #0d0d0d !important;
|
||||
}
|
||||
|
||||
.cv-item {
|
||||
background-color: #4E4E4E !important;
|
||||
}
|
||||
|
||||
.d01 .cv-day-number {
|
||||
background-color: #b98766 !important;
|
||||
}
|
||||
}
|
||||
|
||||
.v-theme--light {
|
||||
a:not([class]) {
|
||||
color: #b98766;
|
||||
text-decoration: none;
|
||||
background-color: transparent
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #000;
|
||||
text-decoration: none
|
||||
}
|
||||
|
||||
a:not([href]):not([tabindex]), a:not([href]):not([tabindex]):focus, a:not([href]):not([tabindex]):hover {
|
||||
color: inherit;
|
||||
text-decoration: none
|
||||
}
|
||||
|
||||
a:not([href]):not([tabindex]):focus {
|
||||
outline: 0
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* vueform/multiselect */
|
||||
|
||||
.multiselect-option.is-pointed {
|
||||
background: #b98766 !important;
|
||||
}
|
||||
|
||||
.multiselect-option.is-selected {
|
||||
background: #b55e4f !important;
|
||||
}
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {createApp} from "vue";
|
||||
import {createRouter, createWebHistory} from 'vue-router'
|
||||
import {createRouter, createWebHashHistory, createWebHistory} from 'vue-router'
|
||||
import {createPinia} from 'pinia'
|
||||
// @ts-ignore
|
||||
import App from './Tandoor.vue'
|
||||
@@ -8,66 +8,55 @@ import vuetify from "@/vuetify";
|
||||
import mavonEditor from 'mavon-editor'
|
||||
import 'mavon-editor/dist/css/index.css'
|
||||
import 'vite/modulepreload-polyfill';
|
||||
import {createRulesPlugin} from 'vuetify/labs/rules'
|
||||
import { createRulesPlugin } from 'vuetify/labs/rules'
|
||||
|
||||
import {setupI18n} from "@/i18n";
|
||||
import MealPlanPage from "@/pages/MealPlanPage.vue";
|
||||
import {TANDOOR_PLUGINS, TandoorPlugin} from "@/types/Plugins.ts";
|
||||
|
||||
let routes = [
|
||||
{path: '/', component: () => import("@/pages/StartPage.vue"), name: 'StartPage' },
|
||||
const routes = [
|
||||
{path: '/', component: () => import("@/pages/StartPage.vue"), name: 'StartPage'},
|
||||
{path: '/search', redirect: {name: 'StartPage'}},
|
||||
{path: '/test', component: () => import("@/pages/TestPage.vue"), name: 'view_test'},
|
||||
{path: '/welcome', component: () => import("@/pages/WelcomePage.vue"), name: 'WelcomePage', meta: {title: 'Welcome'}},
|
||||
{path: '/help', component: () => import("@/pages/HelpPage.vue"), name: 'HelpPage', meta: {title: 'Help'}},
|
||||
{path: '/help', component: () => import("@/pages/HelpPage.vue"), name: 'HelpPage'},
|
||||
{
|
||||
path: '/settings', component: () => import("@/pages/SettingsPage.vue"), name: 'SettingsPage', redirect: '/settings/account',
|
||||
children: [
|
||||
{path: 'account', component: () => import("@/components/settings/AccountSettings.vue"), name: 'AccountSettings', meta: {title: 'Settings'}},
|
||||
{path: 'cosmetic', component: () => import("@/components/settings/CosmeticSettings.vue"), name: 'CosmeticSettings', meta: {title: 'Settings'}},
|
||||
{path: 'shopping', component: () => import("@/components/settings/ShoppingSettings.vue"), name: 'ShoppingSettings', meta: {title: 'Settings'}},
|
||||
{path: 'meal-plan', component: () => import("@/components/settings/MealPlanSettings.vue"), name: 'MealPlanSettings', meta: {title: 'Settings'}},
|
||||
{path: 'search', component: () => import("@/components/settings/SearchSettings.vue"), name: 'SearchSettings', meta: {title: 'Settings'}},
|
||||
{path: 'space', component: () => import("@/components/settings/SpaceSettings.vue"), name: 'SpaceSettings', meta: {title: 'Settings'}},
|
||||
{path: 'open-data-import', component: () => import("@/components/settings/OpenDataImportSettings.vue"), name: 'OpenDataImportSettings', meta: {title: 'Settings'}},
|
||||
{path: 'export', component: () => import("@/components/settings/ExportDataSettings.vue"), name: 'ExportDataSettings', meta: {title: 'Settings'}},
|
||||
{path: 'api', component: () => import("@/components/settings/ApiSettings.vue"), name: 'ApiSettings', meta: {title: 'Settings'}},
|
||||
], meta: {title: 'Settings'}
|
||||
{path: 'account', component: () => import("@/components/settings/AccountSettings.vue"), name: 'AccountSettings'},
|
||||
{path: 'cosmetic', component: () => import("@/components/settings/CosmeticSettings.vue"), name: 'CosmeticSettings'},
|
||||
{path: 'shopping', component: () => import("@/components/settings/ShoppingSettings.vue"), name: 'ShoppingSettings'},
|
||||
{path: 'meal-plan', component: () => import("@/components/settings/MealPlanSettings.vue"), name: 'MealPlanSettings'},
|
||||
{path: 'search', component: () => import("@/components/settings/SearchSettings.vue"), name: 'SearchSettings'},
|
||||
{path: 'space', component: () => import("@/components/settings/SpaceSettings.vue"), name: 'SpaceSettings'},
|
||||
{path: 'space-members', component: () => import("@/components/settings/SpaceMemberSettings.vue"), name: 'SpaceMemberSettings'},
|
||||
{path: 'user-space', component: () => import("@/components/settings/UserSpaceSettings.vue"), name: 'UserSpaceSettings'},
|
||||
{path: 'open-data-import', component: () => import("@/components/settings/OpenDataImportSettings.vue"), name: 'OpenDataImportSettings'},
|
||||
{path: 'export', component: () => import("@/components/settings/ExportDataSettings.vue"), name: 'ExportDataSettings'},
|
||||
{path: 'api', component: () => import("@/components/settings/ApiSettings.vue"), name: 'ApiSettings'},
|
||||
]
|
||||
},
|
||||
//{path: '/settings/:page', component: SettingsPage, name: 'view_settings_page', props: true},
|
||||
{path: '/advanced-search', component: () => import("@/pages/SearchPage.vue"), name: 'SearchPage', meta: {title: 'Search'}},
|
||||
{path: '/shopping', component: () => import("@/pages/ShoppingListPage.vue"), name: 'ShoppingListPage', meta: {title: 'Shopping_list'}},
|
||||
{path: '/mealplan', component: MealPlanPage, name: 'MealPlanPage', meta: {title: 'Meal_Plan'}},
|
||||
{path: '/books', component: () => import("@/pages/BooksPage.vue"), name: 'BooksPage', meta: {title: 'Books'}},
|
||||
{path: '/book/:bookId', component: () => import("@/pages/BookViewPage.vue"), name: 'BookViewPage', props: true, meta: {title: 'Book'}},
|
||||
{path: '/recipe/import', component: () => import("@/pages/RecipeImportPage.vue"), name: 'RecipeImportPage', meta: {title: 'Import'}},
|
||||
{path: '/advanced-search', component: () => import("@/pages/SearchPage.vue"), name: 'SearchPage'},
|
||||
{path: '/shopping', component: () => import("@/pages/ShoppingListPage.vue"), name: 'ShoppingListPage'},
|
||||
{path: '/mealplan', component: MealPlanPage, name: 'MealPlanPage'},
|
||||
{path: '/books', component: () => import("@/pages/BooksPage.vue"), name: 'BooksPage'},
|
||||
{path: '/book/:bookId', component: () => import("@/pages/BookViewPage.vue"), name: 'BookViewPage', props: true},
|
||||
{path: '/recipe/import', component: () => import("@/pages/RecipeImportPage.vue"), name: 'RecipeImportPage'},
|
||||
|
||||
{path: '/recipe/:id', component: () => import("@/pages/RecipeViewPage.vue"), name: 'RecipeViewPage', props: true, meta: {title: 'Recipe'}},
|
||||
{path: '/recipe/:id', component: () => import("@/pages/RecipeViewPage.vue"), name: 'RecipeViewPage', props: true},
|
||||
{path: '/view/recipe/:id', redirect: {name: 'RecipeViewPage'}}, // old Tandoor v1 url pattern
|
||||
|
||||
{path: '/list/:model?', component: () => import("@/pages/ModelListPage.vue"), props: true, name: 'ModelListPage'},
|
||||
{path: '/edit/:model/:id?', component: () => import("@/pages/ModelEditPage.vue"), props: true, name: 'ModelEditPage'},
|
||||
{path: '/database', component: () => import("@/pages/DatabasePage.vue"), props: true, name: 'DatabasePage', meta: {title: 'Database'}},
|
||||
{path: '/database', component: () => import("@/pages/DatabasePage.vue"), props: true, name: 'DatabasePage'},
|
||||
|
||||
{path: '/ingredient-editor', component: () => import("@/pages/IngredientEditorPage.vue"), name: 'IngredientEditorPage', meta: {title: 'Ingredient Editor'}},
|
||||
{path: '/property-editor', component: () => import("@/pages/PropertyEditorPage.vue"), name: 'PropertyEditorPage', meta: {title: 'Property_Editor'}},
|
||||
{path: '/ingredient-editor', component: () => import("@/pages/IngredientEditorPage.vue"), name: 'IngredientEditorPage'},
|
||||
{path: '/property-editor', component: () => import("@/pages/PropertyEditorPage.vue"), name: 'PropertyEditorPage'},
|
||||
|
||||
{path: '/space-setup', component: () => import("@/pages/SpaceSetupPage.vue"), name: 'SpaceSetupPage'},
|
||||
|
||||
{path: '/:pathMatch(.*)*', component: () => import("@/pages/404Page.vue"), name: '404Page', meta: {title: 'NotFound'}},
|
||||
]
|
||||
|
||||
// load plugin routes into routing table
|
||||
TANDOOR_PLUGINS.forEach(plugin => {
|
||||
routes = routes.concat(plugin.routes)
|
||||
})
|
||||
|
||||
const basePath = localStorage.getItem("BASE_PATH")
|
||||
const pathname = basePath?.startsWith("http") ? new URL(basePath).pathname : undefined
|
||||
const base = pathname === "/" ? undefined : pathname
|
||||
|
||||
const router = createRouter({
|
||||
history: createWebHistory(base),
|
||||
history: createWebHistory(),
|
||||
routes,
|
||||
})
|
||||
|
||||
@@ -77,7 +66,7 @@ const app = createApp(App)
|
||||
|
||||
app.use(createPinia())
|
||||
app.use(vuetify)
|
||||
app.use(createRulesPlugin({ /* options */}, vuetify.locale))
|
||||
app.use(createRulesPlugin({ /* options */ }, vuetify.locale))
|
||||
app.use(router)
|
||||
app.use(i18n)
|
||||
app.use(mavonEditor) // TODO only use on pages that need it
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user