Compare commits
34 Commits
2.0.0-alph
...
1.5.35
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd2c4b35df | ||
|
|
d1d4cec00f | ||
|
|
6940f405eb | ||
|
|
3e6f7a554a | ||
|
|
4df451a540 | ||
|
|
fd06d67218 | ||
|
|
f546a4d382 | ||
|
|
c2ecf7ef57 | ||
|
|
b9771d7c11 | ||
|
|
d5b53d2bca | ||
|
|
f502cd47e1 | ||
|
|
358fab9a8f | ||
|
|
445e78825a | ||
|
|
e7d9824520 | ||
|
|
c9ebbdd4ec | ||
|
|
85fa211390 | ||
|
|
3f5db2cb03 | ||
|
|
79227b347e | ||
|
|
ffe0c22b3e | ||
|
|
25e5faf135 | ||
|
|
a6f8a9ef06 | ||
|
|
05b51e83a6 | ||
|
|
5625a9ae4f | ||
|
|
8e60a1954b | ||
|
|
52d2d75fe9 | ||
|
|
edb669ab36 | ||
|
|
e9ed28b44d | ||
|
|
343373bf4d | ||
|
|
698ede9eb6 | ||
|
|
3ae73f7cad | ||
|
|
340d36c628 | ||
|
|
703f782be1 | ||
|
|
2ef4514987 | ||
|
|
a89411aeec |
@@ -20,10 +20,6 @@
|
||||
"ms-python.python"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
"containerEnv": {
|
||||
"CSRF_TRUSTED_ORIGINS": "http://localhost:8000,http://localhost:8080"
|
||||
}
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
|
||||
@@ -6,9 +6,6 @@
|
||||
# random secret key, use for example `base64 /dev/urandom | head -c50` to generate one
|
||||
SECRET_KEY=
|
||||
|
||||
# your default timezone See https://timezonedb.com/time-zones for a list of timezones
|
||||
TZ=Europe/Berlin
|
||||
|
||||
# allowed hosts (see documentation), should be set to your hostname(s) but might be * (default) for some proxies/providers
|
||||
# ALLOWED_HOSTS=recipes.mydomain.com
|
||||
|
||||
|
||||
110
.github/workflows/build-docker-open-data.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Build Docker Container with open data plugin installed
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build-container:
|
||||
name: Build ${{ matrix.name }} Container
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'TandoorRecipes'
|
||||
continue-on-error: ${{ matrix.continue-on-error }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Standard build config
|
||||
- name: Standard
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get version number
|
||||
id: get_version
|
||||
run: |
|
||||
if [[ "$GITHUB_REF" = refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
|
||||
elif [[ "$GITHUB_REF" = refs/heads/beta ]]; then
|
||||
echo VERSION=beta >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo VERSION=develop >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# clone open data plugin
|
||||
- name: clone open data plugin repo
|
||||
uses: actions/checkout@master
|
||||
with:
|
||||
repository: TandoorRecipes/open_data_plugin
|
||||
ref: master
|
||||
path: ./recipes/plugins/open_data_plugin
|
||||
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: yarn
|
||||
cache-dependency-path: vue/yarn.lock
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn install --frozen-lockfile
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
|
||||
- name: Setup Open Data Plugin Links
|
||||
working-directory: ./recipes/plugins/open_data_plugin
|
||||
run: python setup_repo.py
|
||||
|
||||
- name: Build Open Data Frontend
|
||||
working-directory: ./recipes/plugins/open_data_plugin/vue
|
||||
run: yarn build
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: github.secret_source == 'Actions'
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
vabene1111/recipes
|
||||
ghcr.io/TandoorRecipes/recipes
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
tags: |
|
||||
type=raw,value=latest,suffix=-open-data-plugin,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{version}}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{major}}.{{minor}}
|
||||
type=semver,suffix=-open-data-plugin,pattern={{major}}
|
||||
type=ref,suffix=-open-data-plugin,event=branch
|
||||
- name: Build and Push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
pull: true
|
||||
push: ${{ github.secret_source == 'Actions' }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
53
.github/workflows/build-docker.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# Standard build config
|
||||
- name: Standard
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
suffix: ""
|
||||
continue-on-error: false
|
||||
steps:
|
||||
@@ -34,17 +34,17 @@ jobs:
|
||||
echo VERSION=develop >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Vue 3 frontend
|
||||
# Build Vue frontend
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '18'
|
||||
cache: yarn
|
||||
cache-dependency-path: vue3/yarn.lock
|
||||
cache-dependency-path: vue/yarn.lock
|
||||
- name: Install dependencies
|
||||
working-directory: ./vue3
|
||||
working-directory: ./vue
|
||||
run: yarn install --frozen-lockfile
|
||||
- name: Build dependencies
|
||||
working-directory: ./vue3
|
||||
working-directory: ./vue
|
||||
run: yarn build
|
||||
|
||||
- name: Set up QEMU
|
||||
@@ -74,9 +74,8 @@ jobs:
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
# disable latest for tagged releases while in beta
|
||||
# type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
@@ -94,29 +93,29 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# notify-stable:
|
||||
# name: Notify Stable
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: build-container
|
||||
# if: startsWith(github.ref, 'refs/tags/')
|
||||
# steps:
|
||||
# - name: Set tag name
|
||||
# run: |
|
||||
# # Strip "refs/tags/" prefix
|
||||
# echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
# # Send stable discord notification
|
||||
# - name: Discord notification
|
||||
# env:
|
||||
# DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
# uses: Ilshidur/action-discord@0.3.2
|
||||
# with:
|
||||
# args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
|
||||
notify-stable:
|
||||
name: Notify Stable
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- name: Set tag name
|
||||
run: |
|
||||
# Strip "refs/tags/" prefix
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
# Send stable discord notification
|
||||
- name: Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
|
||||
|
||||
notify-beta:
|
||||
name: Notify Beta
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-container
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: github.ref == 'refs/heads/beta'
|
||||
steps:
|
||||
# Send beta discord notification
|
||||
- name: Discord notification
|
||||
@@ -124,4 +123,4 @@ jobs:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@0.3.2
|
||||
with:
|
||||
args: '🚀 The Tandoor 2 Image has been updated! 🥳'
|
||||
args: '🚀 The BETA Image has been updated! 🥳'
|
||||
|
||||
3
.github/workflows/ci.yml
vendored
@@ -11,11 +11,12 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
node-version: ["18"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libxslt-dev libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
|
||||
packages: libsasl2-dev python3-dev libxml2-dev libxmlsec1-dev libxslt-dev libxmlsec1-openssl libldap2-dev libssl-dev gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl
|
||||
version: 1.0
|
||||
|
||||
# Setup python & dependencies
|
||||
|
||||
9
.gitignore
vendored
@@ -75,10 +75,8 @@ cookbook/static/vue
|
||||
vue/webpack-stats.json
|
||||
/docker-compose.override.yml
|
||||
vue/node_modules
|
||||
/recipes/plugins
|
||||
vetur.config.js
|
||||
cookbook/static/vue
|
||||
vue/webpack-stats.json
|
||||
plugins
|
||||
vue3/node_modules
|
||||
cookbook/templates/sw.js
|
||||
vue/.yarn
|
||||
vue3/.vite
|
||||
@@ -87,5 +85,4 @@ vue3/.vite
|
||||
vetur.config.js
|
||||
venv/
|
||||
.idea/easy-i18n.xml
|
||||
cookbook/static/vue3
|
||||
vue3/node_modules
|
||||
cookbook/static/vue3
|
||||
6
.idea/prettier.xml
generated
@@ -1,6 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="PrettierConfiguration">
|
||||
<option name="myConfigurationMode" value="AUTOMATIC" />
|
||||
</component>
|
||||
</project>
|
||||
2
.idea/recipes.iml
generated
@@ -18,7 +18,7 @@
|
||||
<excludeFolder url="file://$MODULE_DIR$/staticfiles" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.12 (recipes)" jdkType="Python SDK" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="TemplatesService">
|
||||
|
||||
62
.idea/watcherTasks.xml
generated
@@ -5,7 +5,7 @@
|
||||
<option name="arguments" value="-m flake8 $FilePath$ --config $ContentRoot$\.flake8" />
|
||||
<option name="checkSyntaxErrors" value="true" />
|
||||
<option name="description" />
|
||||
<option name="exitCodeBehavior" value="NEVER" />
|
||||
<option name="exitCodeBehavior" value="ALWAYS" />
|
||||
<option name="fileExtension" value="py" />
|
||||
<option name="immediateSync" value="false" />
|
||||
<option name="name" value="Flake8 Watcher" />
|
||||
@@ -27,65 +27,5 @@
|
||||
<option name="workingDir" value="" />
|
||||
<envs />
|
||||
</TaskOptions>
|
||||
<TaskOptions isEnabled="false">
|
||||
<option name="arguments" value="-m isort $FilePath$" />
|
||||
<option name="checkSyntaxErrors" value="true" />
|
||||
<option name="description" />
|
||||
<option name="exitCodeBehavior" value="ERROR" />
|
||||
<option name="fileExtension" value="py" />
|
||||
<option name="immediateSync" value="false" />
|
||||
<option name="name" value="isort Watcher" />
|
||||
<option name="output" value="$FilePath$" />
|
||||
<option name="outputFilters">
|
||||
<array />
|
||||
</option>
|
||||
<option name="outputFromStdout" value="false" />
|
||||
<option name="program" value="$PyInterpreterDirectory$/python" />
|
||||
<option name="runOnExternalChanges" value="false" />
|
||||
<option name="scopeName" value="Project Files" />
|
||||
<option name="trackOnlyRoot" value="false" />
|
||||
<option name="workingDir" value="" />
|
||||
<envs />
|
||||
</TaskOptions>
|
||||
<TaskOptions isEnabled="false">
|
||||
<option name="arguments" value="-m yapf -i $FilePath$" />
|
||||
<option name="checkSyntaxErrors" value="true" />
|
||||
<option name="description" />
|
||||
<option name="exitCodeBehavior" value="NEVER" />
|
||||
<option name="fileExtension" value="py" />
|
||||
<option name="immediateSync" value="false" />
|
||||
<option name="name" value="YAPF" />
|
||||
<option name="output" value="$FilePath$" />
|
||||
<option name="outputFilters">
|
||||
<array />
|
||||
</option>
|
||||
<option name="outputFromStdout" value="false" />
|
||||
<option name="program" value="$PyInterpreterDirectory$/python" />
|
||||
<option name="runOnExternalChanges" value="false" />
|
||||
<option name="scopeName" value="Project Files" />
|
||||
<option name="trackOnlyRoot" value="false" />
|
||||
<option name="workingDir" value="" />
|
||||
<envs />
|
||||
</TaskOptions>
|
||||
<TaskOptions isEnabled="false">
|
||||
<option name="arguments" value="--cwd $ProjectFileDir$\vue prettier -w --config $ProjectFileDir$\.prettierrc $FilePath$" />
|
||||
<option name="checkSyntaxErrors" value="true" />
|
||||
<option name="description" />
|
||||
<option name="exitCodeBehavior" value="ERROR" />
|
||||
<option name="fileExtension" value="*" />
|
||||
<option name="immediateSync" value="true" />
|
||||
<option name="name" value="Prettier" />
|
||||
<option name="output" value="" />
|
||||
<option name="outputFilters">
|
||||
<array />
|
||||
</option>
|
||||
<option name="outputFromStdout" value="false" />
|
||||
<option name="program" value="yarn" />
|
||||
<option name="runOnExternalChanges" value="true" />
|
||||
<option name="scopeName" value="Prettier" />
|
||||
<option name="trackOnlyRoot" value="false" />
|
||||
<option name="workingDir" value="" />
|
||||
<envs />
|
||||
</TaskOptions>
|
||||
</component>
|
||||
</project>
|
||||
2
.vscode/launch.json
vendored
@@ -24,7 +24,7 @@
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
// coverage and pytest can't both be running at the same time
|
||||
"PYTEST_ADDOPTS": "--no-cov -n 0"
|
||||
"PYTEST_ADDOPTS": "--no-cov"
|
||||
},
|
||||
"django": true,
|
||||
"justMyCode": true
|
||||
|
||||
200
.vscode/tasks.json
vendored
@@ -1,131 +1,75 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Migrations",
|
||||
"type": "shell",
|
||||
"command": "python3 manage.py migrate"
|
||||
},
|
||||
{
|
||||
"label": "Collect Static Files",
|
||||
"type": "shell",
|
||||
"command": "python3 manage.py collectstatic",
|
||||
"dependsOn": ["Yarn Build"]
|
||||
},
|
||||
{
|
||||
"label": "Setup Dev Server",
|
||||
"dependsOn": ["Run Migrations", "Yarn Build"]
|
||||
},
|
||||
{
|
||||
"label": "Run Dev Server",
|
||||
"type": "shell",
|
||||
"dependsOn": ["Setup Dev Server"],
|
||||
"command": "python3 manage.py runserver"
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install",
|
||||
"dependsOn": ["Yarn Install - Vue", "Yarn Install - Vue3"]
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install - Vue",
|
||||
"type": "shell",
|
||||
"command": "yarn install --force",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install - Vue3",
|
||||
"type": "shell",
|
||||
"command": "yarn install --force",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Generate API",
|
||||
"dependsOn": ["Generate API - Vue", "Generate API - Vue3"]
|
||||
},
|
||||
{
|
||||
"label": "Generate API - Vue",
|
||||
"type": "shell",
|
||||
"command": "openapi-generator-cli generate -g typescript-axios -i http://127.0.0.1:8000/openapi/",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue/src/utils/openapi"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Generate API - Vue3",
|
||||
"type": "shell",
|
||||
"command": "openapi-generator-cli generate -g typescript-fetch -i http://127.0.0.1:8000/openapi/",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3/src/openapi"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Serve",
|
||||
"type": "shell",
|
||||
"command": "yarn serve",
|
||||
"dependsOn": ["Yarn Install - Vue"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Vite Serve",
|
||||
"type": "shell",
|
||||
"command": "vite",
|
||||
"dependsOn": ["Yarn Install - Vue3"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Build",
|
||||
"dependsOn": ["Yarn Build - Vue", "Vite Build - Vue3"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Yarn Build - Vue",
|
||||
"type": "shell",
|
||||
"command": "yarn build",
|
||||
"dependsOn": ["Yarn Install - Vue"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Migrations",
|
||||
"type": "shell",
|
||||
"command": "python3 manage.py migrate",
|
||||
},
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Vite Build - Vue3",
|
||||
"type": "shell",
|
||||
"command": "vite build",
|
||||
"dependsOn": ["Yarn Install - Vue3"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue3"
|
||||
{
|
||||
"label": "Collect Static Files",
|
||||
"type": "shell",
|
||||
"command": "python3 manage.py collectstatic",
|
||||
"dependsOn": ["Yarn Build"],
|
||||
},
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Setup Tests",
|
||||
"dependsOn": ["Run Migrations", "Collect Static Files"]
|
||||
},
|
||||
{
|
||||
"label": "Run all pytests",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest cookbook/tests",
|
||||
"dependsOn": ["Setup Tests"],
|
||||
"group": "test"
|
||||
},
|
||||
{
|
||||
"label": "Setup Documentation Dependencies",
|
||||
"type": "shell",
|
||||
"command": "pip install mkdocs-material mkdocs-include-markdown-plugin"
|
||||
},
|
||||
{
|
||||
"label": "Serve Documentation",
|
||||
"type": "shell",
|
||||
"command": "mkdocs serve",
|
||||
"dependsOn": ["Setup Documentation Dependencies"]
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
"label": "Setup Dev Server",
|
||||
"dependsOn": ["Run Migrations", "Yarn Build"],
|
||||
},
|
||||
{
|
||||
"label": "Run Dev Server",
|
||||
"type": "shell",
|
||||
"dependsOn": ["Setup Dev Server"],
|
||||
"command": "python3 manage.py runserver",
|
||||
},
|
||||
{
|
||||
"label": "Yarn Install",
|
||||
"type": "shell",
|
||||
"command": "yarn install",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Serve",
|
||||
"type": "shell",
|
||||
"command": "yarn serve",
|
||||
"dependsOn": ["Yarn Install"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Yarn Build",
|
||||
"type": "shell",
|
||||
"command": "yarn build",
|
||||
"dependsOn": ["Yarn Install"],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/vue"
|
||||
},
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "Setup Tests",
|
||||
"dependsOn": ["Run Migrations", "Collect Static Files"],
|
||||
},
|
||||
{
|
||||
"label": "Run all pytests",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest cookbook/tests",
|
||||
"dependsOn": ["Setup Tests"],
|
||||
"group": "test",
|
||||
},
|
||||
{
|
||||
"label": "Setup Documentation Dependencies",
|
||||
"type": "shell",
|
||||
"command": "pip install mkdocs-material mkdocs-include-markdown-plugin",
|
||||
},
|
||||
{
|
||||
"label": "Serve Documentation",
|
||||
"type": "shell",
|
||||
"command": "mkdocs serve",
|
||||
"dependsOn": ["Setup Documentation Dependencies"],
|
||||
}
|
||||
]
|
||||
}
|
||||
15
Dockerfile
@@ -1,7 +1,7 @@
|
||||
FROM python:3.13-alpine3.21
|
||||
FROM python:3.12-alpine3.19
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git libgcc libstdc++
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@@ -21,19 +21,14 @@ RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
|
||||
# remove Development dependencies from requirements.txt
|
||||
RUN sed -i '/# Development/,$d' requirements.txt
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base g++ curl && \
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev xmlsec-dev xmlsec build-base && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip debug -v && \
|
||||
venv/bin/pip install wheel==0.45.1 && \
|
||||
venv/bin/pip install setuptools_rust==1.10.2 && \
|
||||
if [ `apk --print-arch` = "aarch64" ]; then \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y; \
|
||||
fi &&\
|
||||
venv/bin/pip install wheel==0.42.0 && \
|
||||
venv/bin/pip install setuptools_rust==1.9.0 && \
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
|
||||
apk --purge del .build-deps
|
||||
|
||||
|
||||
3
boot.sh
@@ -41,6 +41,9 @@ if [ -f "${SOCIALACCOUNT_PROVIDERS_FILE}" ]; then
|
||||
export SOCIALACCOUNT_PROVIDERS=$(cat "$SOCIALACCOUNT_PROVIDERS_FILE")
|
||||
fi
|
||||
|
||||
if [ -f "${S3_SECRET_ACCESS_KEY_FILE}" ]; then
|
||||
export S3_SECRET_ACCESS_KEY=$(cat "$S3_SECRET_ACCESS_KEY_FILE")
|
||||
fi
|
||||
|
||||
echo "Waiting for database to be ready..."
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from treebeard.forms import movenodeform_factory
|
||||
|
||||
from cookbook.managers import DICTIONARY
|
||||
|
||||
from .models import (BookmarkletImport, Comment, CookLog, CustomFilter, Food, ImportLog, Ingredient, InviteLink,
|
||||
from .models import (BookmarkletImport, Comment, CookLog, Food, ImportLog, Ingredient, InviteLink,
|
||||
Keyword, MealPlan, MealType, NutritionInformation, Property, PropertyType,
|
||||
Recipe, RecipeBook, RecipeBookEntry, RecipeImport, SearchPreference, ShareLink,
|
||||
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
|
||||
@@ -103,13 +103,6 @@ class ConnectorConfigAdmin(admin.ModelAdmin):
|
||||
admin.site.register(ConnectorConfig, ConnectorConfigAdmin)
|
||||
|
||||
|
||||
class CustomFilterAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'type', 'name')
|
||||
|
||||
|
||||
admin.site.register(CustomFilter, CustomFilterAdmin)
|
||||
|
||||
|
||||
class SyncAdmin(admin.ModelAdmin):
|
||||
list_display = ('storage', 'path', 'active', 'last_checked')
|
||||
search_fields = ('storage__name', 'path')
|
||||
|
||||
@@ -1,43 +1,6 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from cookbook.models import ShoppingListEntry, User, ConnectorConfig
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserDTO:
|
||||
username: str
|
||||
first_name: Optional[str]
|
||||
|
||||
@staticmethod
|
||||
def create_from_user(instance: User) -> 'UserDTO':
|
||||
return UserDTO(
|
||||
username=instance.username,
|
||||
first_name=instance.first_name if instance.first_name else None
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ShoppingListEntryDTO:
|
||||
food_name: str
|
||||
amount: Optional[float]
|
||||
base_unit: Optional[str]
|
||||
unit_name: Optional[str]
|
||||
created_by: UserDTO
|
||||
|
||||
@staticmethod
|
||||
def try_create_from_entry(instance: ShoppingListEntry) -> Optional['ShoppingListEntryDTO']:
|
||||
if instance.food is None or instance.created_by is None:
|
||||
return None
|
||||
|
||||
return ShoppingListEntryDTO(
|
||||
food_name=instance.food.name,
|
||||
amount=instance.amount if instance.amount else None,
|
||||
unit_name=instance.unit.name if instance.unit else None,
|
||||
base_unit=instance.unit.base_unit if instance.unit and instance.unit.base_unit else None,
|
||||
created_by=UserDTO.create_from_user(instance.created_by),
|
||||
)
|
||||
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
|
||||
|
||||
|
||||
# A Connector is 'destroyed' & recreated each time 'any' ConnectorConfig in a space changes.
|
||||
@@ -47,18 +10,20 @@ class Connector(ABC):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def on_shopping_list_entry_created(self, instance: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_created(self, space: Space, instance: ShoppingListEntry) -> None:
|
||||
pass
|
||||
|
||||
# This method might not trigger on 'direct' entry updates: https://stackoverflow.com/a/35238823
|
||||
@abstractmethod
|
||||
async def on_shopping_list_entry_updated(self, instance: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_updated(self, space: Space, instance: ShoppingListEntry) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def on_shopping_list_entry_deleted(self, instance: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_deleted(self, space: Space, instance: ShoppingListEntry) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
||||
# TODO: Add Recipes & possibly Meal Place listeners/hooks (And maybe more?)
|
||||
|
||||
@@ -12,7 +12,7 @@ from typing import List, Any, Dict, Optional, Type
|
||||
from django.conf import settings
|
||||
from django_scopes import scope
|
||||
|
||||
from cookbook.connectors.connector import Connector, ShoppingListEntryDTO
|
||||
from cookbook.connectors.connector import Connector
|
||||
from cookbook.connectors.homeassistant import HomeAssistant
|
||||
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
|
||||
|
||||
@@ -56,13 +56,15 @@ class ConnectorManager(metaclass=Singleton):
|
||||
|
||||
def __init__(self):
|
||||
self._logger = logging.getLogger("recipes.connector")
|
||||
self._logger.debug("ConnectorManager initializing")
|
||||
self._queue = queue.Queue(maxsize=settings.EXTERNAL_CONNECTORS_QUEUE_SIZE)
|
||||
self._worker = threading.Thread(target=self.worker, args=(0, self._queue,), daemon=True)
|
||||
self._worker.start()
|
||||
|
||||
# Called by post save & post delete signals
|
||||
def __call__(self, instance: Any, **kwargs) -> None:
|
||||
if not isinstance(instance, self._listening_to_classes) or not hasattr(instance, "space"):
|
||||
return
|
||||
|
||||
action_type: ActionType
|
||||
if "created" in kwargs and kwargs["created"]:
|
||||
action_type = ActionType.CREATED
|
||||
@@ -73,37 +75,16 @@ class ConnectorManager(metaclass=Singleton):
|
||||
else:
|
||||
return
|
||||
|
||||
self._add_work(action_type, instance)
|
||||
|
||||
def _add_work(self, action_type: ActionType, *instances: REGISTERED_CLASSES):
|
||||
for instance in instances:
|
||||
if not isinstance(instance, self._listening_to_classes) or not hasattr(instance, "space"):
|
||||
continue
|
||||
try:
|
||||
_force_load_instance(instance)
|
||||
self._queue.put_nowait(Work(instance, action_type))
|
||||
except queue.Full:
|
||||
self._logger.info(f"queue was full, so skipping {action_type} of type {type(instance)}")
|
||||
try:
|
||||
self._queue.put_nowait(Work(instance, action_type))
|
||||
except queue.Full:
|
||||
self._logger.info(f"queue was full, so skipping {action_type} of type {type(instance)}")
|
||||
return
|
||||
|
||||
def stop(self):
|
||||
self._queue.join()
|
||||
self._worker.join()
|
||||
|
||||
@classmethod
|
||||
def is_initialized(cls):
|
||||
return cls in cls._instances
|
||||
|
||||
@staticmethod
|
||||
def add_work(action_type: ActionType, *instances: REGISTERED_CLASSES):
|
||||
"""
|
||||
Manually inject work that failed to come in through the __call__ (aka Django signal)
|
||||
Before the work is processed, we check if the connectionManager is initialized, because if it's not, we don't want to accidentally initialize it.
|
||||
Be careful calling it, because it might result in a instance being processed twice.
|
||||
"""
|
||||
if not ConnectorManager.is_initialized():
|
||||
return
|
||||
ConnectorManager()._add_work(action_type, *instances)
|
||||
|
||||
@staticmethod
|
||||
def worker(worker_id: int, worker_queue: queue.Queue):
|
||||
logger = logging.getLogger("recipes.connector.worker")
|
||||
@@ -135,7 +116,7 @@ class ConnectorManager(metaclass=Singleton):
|
||||
|
||||
if connectors is None or refresh_connector_cache:
|
||||
if connectors is not None:
|
||||
loop.run_until_complete(_close_connectors(connectors))
|
||||
loop.run_until_complete(close_connectors(connectors))
|
||||
|
||||
with scope(space=space):
|
||||
connectors: List[Connector] = list()
|
||||
@@ -161,7 +142,7 @@ class ConnectorManager(metaclass=Singleton):
|
||||
|
||||
logger.debug(f"running {len(connectors)} connectors for {item.instance=} with {item.actionType=}")
|
||||
|
||||
loop.run_until_complete(run_connectors(connectors, item.instance, item.actionType))
|
||||
loop.run_until_complete(run_connectors(connectors, space, item.instance, item.actionType))
|
||||
worker_queue.task_done()
|
||||
|
||||
logger.info(f"terminating ConnectionManager worker {worker_id}")
|
||||
@@ -178,14 +159,7 @@ class ConnectorManager(metaclass=Singleton):
|
||||
return None
|
||||
|
||||
|
||||
def _force_load_instance(instance: REGISTERED_CLASSES):
|
||||
if isinstance(instance, ShoppingListEntry):
|
||||
_ = instance.food # Force load food
|
||||
_ = instance.unit # Force load unit
|
||||
_ = instance.created_by # Force load created_by
|
||||
|
||||
|
||||
async def _close_connectors(connectors: List[Connector]):
|
||||
async def close_connectors(connectors: List[Connector]):
|
||||
tasks: List[Task] = [asyncio.create_task(connector.close()) for connector in connectors]
|
||||
|
||||
if len(tasks) == 0:
|
||||
@@ -197,24 +171,22 @@ async def _close_connectors(connectors: List[Connector]):
|
||||
logging.exception("received an exception while closing one of the connectors")
|
||||
|
||||
|
||||
async def run_connectors(connectors: List[Connector], instance: REGISTERED_CLASSES, action_type: ActionType):
|
||||
async def run_connectors(connectors: List[Connector], space: Space, instance: REGISTERED_CLASSES, action_type: ActionType):
|
||||
tasks: List[Task] = list()
|
||||
|
||||
if isinstance(instance, ShoppingListEntry):
|
||||
shopping_list_entry = ShoppingListEntryDTO.try_create_from_entry(instance)
|
||||
if shopping_list_entry is None:
|
||||
return
|
||||
shopping_list_entry: ShoppingListEntry = instance
|
||||
|
||||
match action_type:
|
||||
case ActionType.CREATED:
|
||||
for connector in connectors:
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_created(shopping_list_entry)))
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_created(space, shopping_list_entry)))
|
||||
case ActionType.UPDATED:
|
||||
for connector in connectors:
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_updated(shopping_list_entry)))
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_updated(space, shopping_list_entry)))
|
||||
case ActionType.DELETED:
|
||||
for connector in connectors:
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_deleted(shopping_list_entry)))
|
||||
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_deleted(space, shopping_list_entry)))
|
||||
|
||||
if len(tasks) == 0:
|
||||
return
|
||||
|
||||
@@ -5,14 +5,16 @@ from urllib.parse import urljoin
|
||||
|
||||
from aiohttp import request, ClientResponseError
|
||||
|
||||
from cookbook.connectors.connector import Connector, ShoppingListEntryDTO
|
||||
from cookbook.models import ConnectorConfig
|
||||
from cookbook.connectors.connector import Connector
|
||||
from cookbook.models import ShoppingListEntry, ConnectorConfig, Space
|
||||
|
||||
|
||||
class HomeAssistant(Connector):
|
||||
_config: ConnectorConfig
|
||||
_logger: Logger
|
||||
|
||||
_required_foreign_keys = ("food", "unit", "created_by")
|
||||
|
||||
def __init__(self, config: ConnectorConfig):
|
||||
if not config.token or not config.url or not config.todo_entity:
|
||||
raise ValueError("config for HomeAssistantConnector in incomplete")
|
||||
@@ -32,7 +34,7 @@ class HomeAssistant(Connector):
|
||||
response.raise_for_status()
|
||||
return await response.json()
|
||||
|
||||
async def on_shopping_list_entry_created(self, shopping_list_entry: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_created(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
|
||||
if not self._config.on_shopping_list_entry_created_enabled:
|
||||
return
|
||||
|
||||
@@ -53,15 +55,20 @@ class HomeAssistant(Connector):
|
||||
except ClientResponseError as err:
|
||||
self._logger.warning(f"received an exception from the api: {err.request_info.url=}, {err.request_info.method=}, {err.status=}, {err.message=}, {type(err)=}")
|
||||
|
||||
async def on_shopping_list_entry_updated(self, shopping_list_entry: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_updated(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
|
||||
if not self._config.on_shopping_list_entry_updated_enabled:
|
||||
return
|
||||
pass
|
||||
|
||||
async def on_shopping_list_entry_deleted(self, shopping_list_entry: ShoppingListEntryDTO) -> None:
|
||||
async def on_shopping_list_entry_deleted(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
|
||||
if not self._config.on_shopping_list_entry_deleted_enabled:
|
||||
return
|
||||
|
||||
if not all(k in shopping_list_entry._state.fields_cache for k in self._required_foreign_keys):
|
||||
# Sometimes the food foreign key is not loaded, and we cant load it from an async process
|
||||
self._logger.debug("required property was not present in ShoppingListEntry")
|
||||
return
|
||||
|
||||
item, _ = _format_shopping_list_entry(shopping_list_entry)
|
||||
|
||||
self._logger.debug(f"removing {item=} from {self._config.todo_entity}")
|
||||
@@ -81,19 +88,19 @@ class HomeAssistant(Connector):
|
||||
pass
|
||||
|
||||
|
||||
def _format_shopping_list_entry(shopping_list_entry: ShoppingListEntryDTO) -> Tuple[str, str]:
|
||||
item = shopping_list_entry.food_name
|
||||
if shopping_list_entry.amount:
|
||||
def _format_shopping_list_entry(shopping_list_entry: ShoppingListEntry) -> Tuple[str, str]:
|
||||
item = shopping_list_entry.food.name
|
||||
if shopping_list_entry.amount > 0:
|
||||
item += f" ({shopping_list_entry.amount:.2f}".rstrip('0').rstrip('.')
|
||||
if shopping_list_entry.base_unit:
|
||||
item += f" {shopping_list_entry.base_unit})"
|
||||
elif shopping_list_entry.unit_name:
|
||||
item += f" {shopping_list_entry.unit_name})"
|
||||
if shopping_list_entry.unit and shopping_list_entry.unit.base_unit and len(shopping_list_entry.unit.base_unit) > 0:
|
||||
item += f" {shopping_list_entry.unit.base_unit})"
|
||||
elif shopping_list_entry.unit and shopping_list_entry.unit.name and len(shopping_list_entry.unit.name) > 0:
|
||||
item += f" {shopping_list_entry.unit.name})"
|
||||
else:
|
||||
item += ")"
|
||||
|
||||
description = "From TandoorRecipes"
|
||||
if shopping_list_entry.created_by.first_name:
|
||||
if shopping_list_entry.created_by.first_name and len(shopping_list_entry.created_by.first_name) > 0:
|
||||
description += f", by {shopping_list_entry.created_by.first_name}"
|
||||
else:
|
||||
description += f", by {shopping_list_entry.created_by.username}"
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
# custom processing for schema
|
||||
# reason: DRF writable nested needs ID's to decide if a nested object should be created or updated
|
||||
# the API schema/client make ID's read only by default and strips them entirely in request objects (with COMPONENT_SPLIT_REQUEST enabled)
|
||||
# change the schema to make IDs optional but writable so they are included in the request
|
||||
|
||||
def custom_postprocessing_hook(result, generator, request, public):
|
||||
for c in result['components']['schemas'].keys():
|
||||
# handle schemas used by the client to do requests on the server
|
||||
if 'properties' in result['components']['schemas'][c] and 'id' in result['components']['schemas'][c]['properties']:
|
||||
# make ID field not read only so it's not stripped from the request on the client
|
||||
result['components']['schemas'][c]['properties']['id']['readOnly'] = False
|
||||
# make ID field not required
|
||||
if 'required' in result['components']['schemas'][c] and 'id' in result['components']['schemas'][c]['required']:
|
||||
result['components']['schemas'][c]['required'].remove('id')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# TODO remove below once legacy API has been fully deprecated
|
||||
from drf_spectacular.openapi import AutoSchema # noqa: E402 isort: skip
|
||||
import functools # noqa: E402 isort: skip
|
||||
import re # noqa: E402 isort: skip
|
||||
|
||||
|
||||
class LegacySchema(AutoSchema):
|
||||
operation_id_base = None
|
||||
|
||||
@functools.cached_property
|
||||
def path(self):
|
||||
path = re.sub(pattern=self.path_prefix, repl='', string=self.path, flags=re.IGNORECASE)
|
||||
# remove path variables
|
||||
return re.sub(pattern=r'\{[\w\-]+\}', repl='', string=path)
|
||||
|
||||
def get_operation_id(self):
|
||||
"""
|
||||
Compute an operation ID from the view type and get_operation_id_base method.
|
||||
"""
|
||||
method_name = getattr(self.view, 'action', self.method.lower())
|
||||
if self._is_list_view():
|
||||
action = 'list'
|
||||
elif method_name not in self.method_mapping:
|
||||
action = self._to_camel_case(method_name)
|
||||
else:
|
||||
action = self.method_mapping[self.method.lower()]
|
||||
|
||||
name = self.get_operation_id_base(action)
|
||||
|
||||
return action + name
|
||||
|
||||
def get_operation_id_base(self, action):
|
||||
"""
|
||||
Compute the base part for operation ID from the model, serializer or view name.
|
||||
"""
|
||||
model = getattr(getattr(self.view, 'queryset', None), 'model', None)
|
||||
|
||||
if self.operation_id_base is not None:
|
||||
name = self.operation_id_base
|
||||
|
||||
# Try to deduce the ID from the view's model
|
||||
elif model is not None:
|
||||
name = model.__name__
|
||||
|
||||
# Try with the serializer class name
|
||||
elif self.get_serializer() is not None:
|
||||
name = self.get_serializer().__class__.__name__
|
||||
if name.endswith('Serializer'):
|
||||
name = name[:-10]
|
||||
|
||||
# Fallback to the view name
|
||||
else:
|
||||
name = self.view.__class__.__name__
|
||||
if name.endswith('APIView'):
|
||||
name = name[:-7]
|
||||
elif name.endswith('View'):
|
||||
name = name[:-4]
|
||||
|
||||
# Due to camel-casing of classes and `action` being lowercase, apply title in order to find if action truly
|
||||
# comes at the end of the name
|
||||
if name.endswith(action.title()): # ListView, UpdateAPIView, ThingDelete ...
|
||||
name = name[:-len(action)]
|
||||
|
||||
if action == 'list' and not name.endswith('s'): # listThings instead of listThing
|
||||
name += 's'
|
||||
|
||||
return name
|
||||
|
||||
def get_serializer(self):
|
||||
view = self.view
|
||||
|
||||
if not hasattr(view, 'get_serializer'):
|
||||
return None
|
||||
|
||||
try:
|
||||
return view.get_serializer()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _to_camel_case(self, snake_str):
|
||||
components = snake_str.split('_')
|
||||
# We capitalize the first letter of each component except the first one
|
||||
# with the 'title' method and join them together.
|
||||
return components[0] + ''.join(x.title() for x in components[1:])
|
||||
@@ -37,7 +37,7 @@ def get_filetype(name):
|
||||
|
||||
def is_file_type_allowed(filename, image_only=False):
|
||||
is_file_allowed = False
|
||||
allowed_file_types = ['.pdf', '.docx', '.xlsx']
|
||||
allowed_file_types = ['.pdf','.docx', '.xlsx']
|
||||
allowed_image_types = ['.png', '.jpg', '.jpeg', '.gif', '.webp']
|
||||
check_list = allowed_image_types
|
||||
if not image_only:
|
||||
@@ -49,19 +49,6 @@ def is_file_type_allowed(filename, image_only=False):
|
||||
|
||||
return is_file_allowed
|
||||
|
||||
|
||||
def strip_image_meta(image_object, file_format):
|
||||
image_object = Image.open(image_object)
|
||||
|
||||
data = list(image_object.getdata())
|
||||
image_without_exif = Image.new(image_object.mode, image_object.size)
|
||||
image_without_exif.putdata(data)
|
||||
|
||||
im_io = BytesIO()
|
||||
image_without_exif.save(im_io, file_format)
|
||||
return im_io
|
||||
|
||||
|
||||
# TODO this whole file needs proper documentation, refactoring, and testing
|
||||
# TODO also add env variable to define which images sizes should be compressed
|
||||
# filetype argument can not be optional, otherwise this function will treat all images as if they were a jpeg
|
||||
@@ -72,20 +59,9 @@ def handle_image(request, image_object, filetype):
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
file_format = None
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
file_format = 'JPEG'
|
||||
if filetype == '.png':
|
||||
file_format = 'PNG'
|
||||
|
||||
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
return rescale_image_jpeg(image_object)
|
||||
if filetype == '.png':
|
||||
return rescale_image_png(image_object)
|
||||
else:
|
||||
print('STripping image')
|
||||
return strip_image_meta(image_object, file_format)
|
||||
|
||||
# TODO webp and gifs bypass the scaling and metadata checks, fix
|
||||
return image_object
|
||||
|
||||
@@ -6,8 +6,8 @@ class StyleTreeprocessor(Treeprocessor):
|
||||
|
||||
def run_processor(self, node):
|
||||
for child in node:
|
||||
# if child.tag == "table":
|
||||
# child.set("class", "markdown-body")
|
||||
if child.tag == "table":
|
||||
child.set("class", "table table-bordered")
|
||||
if child.tag == "img":
|
||||
child.set("class", "img-fluid")
|
||||
self.run_processor(child)
|
||||
|
||||
@@ -6,8 +6,6 @@ from cookbook.models import (Food, FoodProperty, Property, PropertyType, Superma
|
||||
SupermarketCategory, SupermarketCategoryRelation, Unit, UnitConversion)
|
||||
import re
|
||||
|
||||
from recipes.settings import DEBUG
|
||||
|
||||
|
||||
class OpenDataImportResponse:
|
||||
total_created = 0
|
||||
@@ -369,28 +367,12 @@ class OpenDataImporter:
|
||||
create_list.append({'data': obj_dict})
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
try:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
except Exception:
|
||||
if DEBUG:
|
||||
print('========= LOAD FOOD FAILED ============')
|
||||
print(update_list)
|
||||
print(existing_data_names)
|
||||
print(existing_data_slugs)
|
||||
traceback.print_exc()
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
try:
|
||||
Food.load_bulk(create_list, None)
|
||||
od_response.total_created += len(create_list)
|
||||
except Exception:
|
||||
if DEBUG:
|
||||
print('========= LOAD FOOD FAILED ============')
|
||||
print(create_list)
|
||||
print(existing_data_names)
|
||||
print(existing_data_slugs)
|
||||
traceback.print_exc()
|
||||
Food.load_bulk(create_list, None)
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
# --------------- PROPERTY STUFF -----------------------
|
||||
model_type = Property
|
||||
|
||||
@@ -160,15 +160,18 @@ class GroupRequiredMixin(object):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if not has_group_permission(request.user, self.groups_required):
|
||||
if not request.user.is_authenticated:
|
||||
messages.add_message(request, messages.ERROR, _('You are not logged in and therefore cannot view this page!'))
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You are not logged in and therefore cannot view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('account_login') + '?next=' + request.path)
|
||||
else:
|
||||
messages.add_message(request, messages.ERROR, _('You do not have the required permissions to view this page!'))
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You do not have the required permissions to view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('index'))
|
||||
try:
|
||||
obj = self.get_object()
|
||||
if obj.get_space() != request.space:
|
||||
messages.add_message(request, messages.ERROR, _('You do not have the required permissions to view this page!'))
|
||||
messages.add_message(request, messages.ERROR,
|
||||
_('You do not have the required permissions to view this page!'))
|
||||
return HttpResponseRedirect(reverse_lazy('index'))
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
@@ -44,17 +44,13 @@ class FoodPropertyHelper:
|
||||
if i.food is not None:
|
||||
conversions = uch.get_conversions(i)
|
||||
for pt in property_types:
|
||||
# if a property could be calculated with an actual value
|
||||
found_property = False
|
||||
# if food has a value for the given property type (no matter if conversion is possible)
|
||||
has_property_value = False
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None: # if food is configured incorrectly
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
else:
|
||||
for p in i.food.properties.all():
|
||||
if p.property_type == pt and p.property_amount is not None:
|
||||
has_property_value = True
|
||||
for c in conversions:
|
||||
if c.unit == i.food.properties_food_unit:
|
||||
found_property = True
|
||||
@@ -64,12 +60,10 @@ class FoodPropertyHelper:
|
||||
if not found_property:
|
||||
if i.amount == 0: # don't count ingredients without an amount as missing
|
||||
computed_properties[pt.id]['missing_value'] = computed_properties[pt.id]['missing_value'] or False # don't override if another food was already missing
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': 0}
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
else:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': {'id': i.food.id, 'name': i.food.name}, 'value': None}
|
||||
if has_property_value and i.unit is not None:
|
||||
computed_properties[pt.id]['food_values'][i.food.id]['missing_conversion'] = {'base_unit': {'id': i.unit.id, 'name': i.unit.name}, 'converted_unit': {'id': i.food.properties_food_unit.id, 'name': i.food.properties_food_unit.name}}
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
|
||||
|
||||
return computed_properties
|
||||
|
||||
@@ -80,5 +74,5 @@ class FoodPropertyHelper:
|
||||
if key in d and d[key]['value']:
|
||||
d[key]['value'] += value
|
||||
else:
|
||||
d[key] = {'id': food.id, 'food': {'id': food.id, 'name': food.name}, 'value': value}
|
||||
d[key] = {'id': food.id, 'food': food.name, 'value': value}
|
||||
return d
|
||||
|
||||
@@ -49,11 +49,7 @@ class RecipeSearch():
|
||||
self._search_prefs = SearchPreference()
|
||||
self._string = self._params.get('query').strip(
|
||||
) if self._params.get('query', None) else None
|
||||
|
||||
self._rating = self._params.get('rating', None)
|
||||
self._rating_gte = self._params.get('rating_gte', None)
|
||||
self._rating_lte = self._params.get('rating_lte', None)
|
||||
|
||||
self._keywords = {
|
||||
'or': self._params.get('keywords_or', None) or self._params.get('keywords', None),
|
||||
'and': self._params.get('keywords_and', None),
|
||||
@@ -74,36 +70,20 @@ class RecipeSearch():
|
||||
}
|
||||
self._steps = self._params.get('steps', None)
|
||||
self._units = self._params.get('units', None)
|
||||
self._internal = str2bool(self._params.get('internal', None))
|
||||
# TODO add created by
|
||||
# TODO image exists
|
||||
self._sort_order = self._params.get('sort_order', None)
|
||||
if self._sort_order == 'random':
|
||||
self._random = True
|
||||
self.sort_order = None
|
||||
else:
|
||||
self._random = str2bool(self._params.get('random', False))
|
||||
self._internal = str2bool(self._params.get('internal', None))
|
||||
self._random = str2bool(self._params.get('random', False))
|
||||
self._new = str2bool(self._params.get('new', False))
|
||||
self._num_recent = int(self._params.get('num_recent', 0))
|
||||
self._include_children = str2bool(
|
||||
self._params.get('include_children', None))
|
||||
self._timescooked = self._params.get('timescooked', None)
|
||||
self._timescooked_gte = self._params.get('timescooked_gte', None)
|
||||
self._timescooked_lte = self._params.get('timescooked_lte', None)
|
||||
|
||||
self._cookedon = self._params.get('cookedon', None)
|
||||
self._createdon = self._params.get('createdon', None)
|
||||
self._createdon_gte = self._params.get('createdon_gte', None)
|
||||
self._createdon_lte = self._params.get('createdon_lte', None)
|
||||
|
||||
self._updatedon = self._params.get('updatedon', None)
|
||||
self._updatedon_gte = self._params.get('updatedon_gte', None)
|
||||
self._updatedon_lte = self._params.get('updatedon_lte', None)
|
||||
|
||||
self._viewedon_gte = self._params.get('viewedon_gte', None)
|
||||
self._viewedon_lte = self._params.get('viewedon_lte', None)
|
||||
|
||||
self._cookedon_gte = self._params.get('cookedon_gte', None)
|
||||
self._cookedon_lte = self._params.get('cookedon_lte', None)
|
||||
|
||||
self._createdby = self._params.get('createdby', None)
|
||||
self._viewedon = self._params.get('viewedon', None)
|
||||
self._makenow = self._params.get('makenow', None)
|
||||
# this supports hidden feature to find recipes missing X ingredients
|
||||
if isinstance(self._makenow, bool) and self._makenow == True:
|
||||
@@ -150,19 +130,16 @@ class RecipeSearch():
|
||||
|
||||
self._build_sort_order()
|
||||
self._recently_viewed(num_recent=self._num_recent)
|
||||
|
||||
self._cooked_on_filter()
|
||||
self._created_on_filter()
|
||||
self._updated_on_filter()
|
||||
self._viewed_on_filter()
|
||||
|
||||
self._created_by_filter(created_by_user_id=self._createdby)
|
||||
self._favorite_recipes()
|
||||
self._cooked_on_filter(cooked_date=self._cookedon)
|
||||
self._created_on_filter(created_date=self._createdon)
|
||||
self._updated_on_filter(updated_date=self._updatedon)
|
||||
self._viewed_on_filter(viewed_date=self._viewedon)
|
||||
self._favorite_recipes(times_cooked=self._timescooked)
|
||||
self._new_recipes()
|
||||
self.keyword_filters(**self._keywords)
|
||||
self.food_filters(**self._foods)
|
||||
self.book_filters(**self._books)
|
||||
self.rating_filter()
|
||||
self.rating_filter(rating=self._rating)
|
||||
self.internal_filter(internal=self._internal)
|
||||
self.step_filters(steps=self._steps)
|
||||
self.unit_filters(units=self._units)
|
||||
@@ -209,9 +186,9 @@ class RecipeSearch():
|
||||
else:
|
||||
order += default_order
|
||||
order[:] = [Lower('name').asc() if x ==
|
||||
'name' else x for x in order]
|
||||
'name' else x for x in order]
|
||||
order[:] = [Lower('name').desc() if x ==
|
||||
'-name' else x for x in order]
|
||||
'-name' else x for x in order]
|
||||
self.orderby = order
|
||||
|
||||
def string_filters(self, string=None):
|
||||
@@ -250,9 +227,9 @@ class RecipeSearch():
|
||||
query_filter |= Q(**{"%s" % f: self._string})
|
||||
self._queryset = self._queryset.filter(query_filter).distinct()
|
||||
|
||||
def _cooked_on_filter(self):
|
||||
if self._sort_includes('lastcooked') or self._cookedon_gte or self._cookedon_lte:
|
||||
lessthan = self._sort_includes('-lastcooked') or self._cookedon_lte
|
||||
def _cooked_on_filter(self, cooked_date=None):
|
||||
if self._sort_includes('lastcooked') or cooked_date:
|
||||
lessthan = self._sort_includes('-lastcooked') or '-' in (cooked_date or [])[:1]
|
||||
if lessthan:
|
||||
default = timezone.now() - timedelta(days=100000)
|
||||
else:
|
||||
@@ -260,44 +237,51 @@ class RecipeSearch():
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastcooked=Coalesce(Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default))
|
||||
)
|
||||
if cooked_date is None:
|
||||
return
|
||||
|
||||
if self._cookedon_lte:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__lte=self._cookedon_lte).exclude(lastcooked=default)
|
||||
elif self._cookedon_gte:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__gte=self._cookedon_gte).exclude(lastcooked=default)
|
||||
cooked_date = date(*[int(x)for x in cooked_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__lte=cooked_date).exclude(lastcooked=default)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__gte=cooked_date).exclude(lastcooked=default)
|
||||
|
||||
def _created_on_filter(self, created_date=None):
|
||||
if created_date is None:
|
||||
return
|
||||
lessthan = '-' in created_date[:1]
|
||||
created_date = date(*[int(x) for x in created_date.split('-') if x != ''])
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(created_at__date__lte=created_date)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(created_at__date__gte=created_date)
|
||||
|
||||
def _updated_on_filter(self, updated_date=None):
|
||||
if updated_date is None:
|
||||
return
|
||||
lessthan = '-' in updated_date[:1]
|
||||
updated_date = date(*[int(x)for x in updated_date.split('-') if x != ''])
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(updated_at__date__lte=updated_date)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(updated_at__date__gte=updated_date)
|
||||
|
||||
def _viewed_on_filter(self, viewed_date=None):
|
||||
if self._sort_includes('lastviewed') or self._viewedon_gte or self._viewedon_lte:
|
||||
if self._sort_includes('lastviewed') or viewed_date:
|
||||
longTimeAgo = timezone.now() - timedelta(days=100000)
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastviewed=Coalesce(Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo))
|
||||
)
|
||||
|
||||
if self._viewedon_lte:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__lte=self._viewedon_lte).exclude(lastviewed=longTimeAgo)
|
||||
elif self._viewedon_gte:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__gte=self._viewedon_gte).exclude(lastviewed=longTimeAgo)
|
||||
|
||||
def _created_on_filter(self):
|
||||
if self._createdon:
|
||||
self._queryset = self._queryset.filter(created_at__date=self._createdon)
|
||||
elif self._createdon_lte:
|
||||
self._queryset = self._queryset.filter(created_at__date__lte=self._createdon_lte)
|
||||
elif self._createdon_gte:
|
||||
self._queryset = self._queryset.filter(created_at__date__gte=self._createdon_gte)
|
||||
|
||||
def _updated_on_filter(self):
|
||||
if self._updatedon:
|
||||
self._queryset = self._queryset.filter(updated_at__date__date=self._updatedon)
|
||||
elif self._updatedon_lte:
|
||||
self._queryset = self._queryset.filter(updated_at__date__lte=self._updatedon_lte)
|
||||
elif self._updatedon_gte:
|
||||
self._queryset = self._queryset.filter(updated_at__date__gte=self._updatedon_gte)
|
||||
|
||||
def _created_by_filter(self, created_by_user_id=None):
|
||||
if created_by_user_id is None:
|
||||
if viewed_date is None:
|
||||
return
|
||||
self._queryset = self._queryset.filter(created_by__id=created_by_user_id)
|
||||
lessthan = '-' in viewed_date[:1]
|
||||
viewed_date = date(*[int(x)for x in viewed_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__lte=viewed_date).exclude(lastviewed=longTimeAgo)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__gte=viewed_date).exclude(lastviewed=longTimeAgo)
|
||||
|
||||
def _new_recipes(self, new_days=7):
|
||||
# TODO make new days a user-setting
|
||||
@@ -323,9 +307,9 @@ class RecipeSearch():
|
||||
)
|
||||
self._queryset = self._queryset.annotate(recent=Coalesce(Max(Case(When(pk__in=num_recent_recipes.values('recipe'), then='viewlog__pk'))), Value(0)))
|
||||
|
||||
def _favorite_recipes(self):
|
||||
if self._sort_includes('favorite') or self._timescooked or self._timescooked_gte or self._timescooked_lte:
|
||||
less_than = self._timescooked_lte and not self._sort_includes('-favorite')
|
||||
def _favorite_recipes(self, times_cooked=None):
|
||||
if self._sort_includes('favorite') or times_cooked:
|
||||
less_than = '-' in (str(times_cooked) or []) and not self._sort_includes('-favorite')
|
||||
if less_than:
|
||||
default = 1000
|
||||
else:
|
||||
@@ -337,13 +321,15 @@ class RecipeSearch():
|
||||
.values('count')
|
||||
)
|
||||
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
|
||||
if times_cooked is None:
|
||||
return
|
||||
|
||||
if self._timescooked:
|
||||
if times_cooked == '0':
|
||||
self._queryset = self._queryset.filter(favorite=0)
|
||||
elif self._timescooked_lte:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(self._timescooked_lte)).exclude(favorite=0)
|
||||
elif self._timescooked_gte:
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(self._timescooked_gte))
|
||||
elif less_than:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked.replace('-', ''))).exclude(favorite=0)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(times_cooked))
|
||||
|
||||
def keyword_filters(self, **kwargs):
|
||||
if all([kwargs[x] is None for x in kwargs]):
|
||||
@@ -421,16 +407,25 @@ class RecipeSearch():
|
||||
units = [units]
|
||||
self._queryset = self._queryset.filter(steps__ingredients__unit__in=units)
|
||||
|
||||
def rating_filter(self):
|
||||
if self._rating or self._rating_lte or self._rating_gte or self._sort_includes('rating'):
|
||||
self._queryset = self._queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=self._request.user, then='cooklog__rating'), default=0))))
|
||||
def rating_filter(self, rating=None):
|
||||
if rating or self._sort_includes('rating'):
|
||||
lessthan = '-' in (rating or [])
|
||||
reverse = 'rating' in (self._sort_order or []) and '-rating' not in (self._sort_order or [])
|
||||
if lessthan or reverse:
|
||||
default = 100
|
||||
else:
|
||||
default = 0
|
||||
# TODO make ratings a settings user-only vs all-users
|
||||
self._queryset = self._queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=self._request.user, then='cooklog__rating'), default=default))))
|
||||
if rating is None:
|
||||
return
|
||||
|
||||
if self._rating:
|
||||
self._queryset = self._queryset.filter(rating=round(int(self._rating)))
|
||||
elif self._rating_gte:
|
||||
self._queryset = self._queryset.filter(rating__gte=int(self._rating_gte))
|
||||
elif self._rating_lte:
|
||||
self._queryset = self._queryset.filter(rating__gte=int(self._rating_lte)).exclude(rating=0)
|
||||
if rating == '0':
|
||||
self._queryset = self._queryset.filter(rating=0)
|
||||
elif lessthan:
|
||||
self._queryset = self._queryset.filter(rating__lte=int(rating[1:])).exclude(rating=0)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(rating__gte=int(rating))
|
||||
|
||||
def internal_filter(self, internal=None):
|
||||
if not internal:
|
||||
@@ -540,11 +535,11 @@ class RecipeSearch():
|
||||
shopping_users = [*self._request.user.get_shopping_share(), self._request.user]
|
||||
|
||||
onhand_filter = (
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
# or substitute food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users)
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
# or substitute food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users)
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
)
|
||||
makenow_recipes = Recipe.objects.annotate(
|
||||
count_food=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__isnull=False), distinct=True),
|
||||
|
||||
@@ -106,14 +106,14 @@ def get_from_scraper(scrape, request):
|
||||
|
||||
# assign image
|
||||
try:
|
||||
recipe_json['image_url'] = parse_image(scrape.image()) or None
|
||||
recipe_json['image'] = parse_image(scrape.image()) or None
|
||||
except Exception:
|
||||
recipe_json['image_url'] = None
|
||||
if not recipe_json['image_url']:
|
||||
recipe_json['image'] = None
|
||||
if not recipe_json['image']:
|
||||
try:
|
||||
recipe_json['image_url'] = parse_image(scrape.schema.data.get('image')) or ''
|
||||
recipe_json['image'] = parse_image(scrape.schema.data.get('image')) or ''
|
||||
except Exception:
|
||||
recipe_json['image_url'] = ''
|
||||
recipe_json['image'] = ''
|
||||
|
||||
# assign keywords
|
||||
try:
|
||||
@@ -205,7 +205,6 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
recipe_json['properties'] = []
|
||||
try:
|
||||
recipe_json['properties'] = get_recipe_properties(request.space, scrape.schema.nutrients())
|
||||
print(recipe_json['properties'])
|
||||
@@ -228,13 +227,6 @@ def get_recipe_properties(space, property_data):
|
||||
"property-proteins": "proteinContent",
|
||||
"property-fats": "fatContent",
|
||||
}
|
||||
|
||||
serving_size = 1
|
||||
try:
|
||||
serving_size = parse_servings(property_data['servingSize'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
recipe_properties = []
|
||||
for pt in PropertyType.objects.filter(space=space, open_data_slug__in=list(properties.keys())).all():
|
||||
for p in list(properties.keys()):
|
||||
@@ -245,7 +237,7 @@ def get_recipe_properties(space, property_data):
|
||||
'id': pt.id,
|
||||
'name': pt.name,
|
||||
},
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / serving_size,
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / parse_servings(property_data['servingSize']),
|
||||
})
|
||||
|
||||
return recipe_properties
|
||||
@@ -432,9 +424,9 @@ def parse_keywords(keyword_json, request):
|
||||
if len(kw) != 0:
|
||||
kw = automation_engine.apply_keyword_automation(kw)
|
||||
if k := Keyword.objects.filter(name__iexact=kw, space=request.space).first():
|
||||
keywords.append({'label': str(k), 'name': k.name, 'id': k.id, 'import_keyword': True})
|
||||
keywords.append({'label': str(k), 'name': k.name, 'id': k.id})
|
||||
else:
|
||||
keywords.append({'label': kw, 'name': kw, 'import_keyword': False})
|
||||
keywords.append({'label': kw, 'name': kw})
|
||||
|
||||
return keywords
|
||||
|
||||
@@ -491,11 +483,7 @@ def get_images_from_soup(soup, url):
|
||||
u = u.split('?')[0]
|
||||
filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
if filename:
|
||||
if u.startswith('//'):
|
||||
# urls from e.g. ottolenghi.co.uk start with //
|
||||
u = 'https:' + u
|
||||
if ('http' not in u) and url:
|
||||
print(f'rewriting URL {u}')
|
||||
if (('http' not in u) and (url)):
|
||||
# sometimes an image source can be relative
|
||||
# if it is provide the base url
|
||||
u = '{}://{}{}'.format(prot, site, u)
|
||||
|
||||
@@ -153,9 +153,8 @@ class RecipeShoppingEditor():
|
||||
return True
|
||||
|
||||
for sle in ShoppingListEntry.objects.filter(list_recipe=self._shopping_list_recipe):
|
||||
if sle.ingredient: # TODO temporarily dont scale manual entries until ingredient_amount or some other base amount has been migrated to SLE
|
||||
sle.amount = sle.ingredient.amount * Decimal(self._servings_factor)
|
||||
sle.save()
|
||||
sle.amount = sle.ingredient.amount * Decimal(self._servings_factor)
|
||||
sle.save()
|
||||
self._shopping_list_recipe.servings = self.servings
|
||||
self._shopping_list_recipe.save()
|
||||
return True
|
||||
|
||||
@@ -70,7 +70,7 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
parsed_md = md.markdown(
|
||||
instructions,
|
||||
extensions=[
|
||||
'markdown.extensions.fenced_code', 'markdown.extensions.sane_lists', 'markdown.extensions.nl2br', TableExtension(),
|
||||
'markdown.extensions.fenced_code', TableExtension(),
|
||||
UrlizeExtension(), MarkdownFormatExtension()
|
||||
]
|
||||
)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import imghdr
|
||||
import json
|
||||
import re
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
import requests
|
||||
from PIL import Image
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -128,7 +128,7 @@ class RecetteTek(Integration):
|
||||
url = file['originalPicture']
|
||||
if validate_import_url(url):
|
||||
response = requests.get(url)
|
||||
if Image.open(BytesIO(response.content)).verify():
|
||||
if imghdr.what(BytesIO(response.content)) is not None:
|
||||
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))
|
||||
else:
|
||||
raise Exception("Original image failed to download.")
|
||||
|
||||
@@ -13,7 +13,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2025-03-12 09:58+0000\n"
|
||||
"PO-Revision-Date: 2025-04-29 02:39+0000\n"
|
||||
"Last-Translator: Nico G <aprops@gmail.com>\n"
|
||||
"Language-Team: Catalan <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/ca/>\n"
|
||||
@@ -252,7 +252,7 @@ msgstr ""
|
||||
#: .\cookbook\helper\permission_helper.py:164
|
||||
#: .\cookbook\helper\permission_helper.py:187 .\cookbook\views\views.py:117
|
||||
msgid "You are not logged in and therefore cannot view this page!"
|
||||
msgstr "No heu iniciat la sessió, no podeu veure aquesta pàgina."
|
||||
msgstr "No heu iniciat la sessió, no podeu veure aquesta pàgina!"
|
||||
|
||||
#: .\cookbook\helper\permission_helper.py:168
|
||||
#: .\cookbook\helper\permission_helper.py:174
|
||||
@@ -284,10 +284,8 @@ msgid "You have more users than allowed in your space."
|
||||
msgstr "Tens més usuaris dels permesos al teu espai."
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:310
|
||||
#, fuzzy
|
||||
#| msgid "Use fractions"
|
||||
msgid "reverse rotation"
|
||||
msgstr "Utilitza fraccions"
|
||||
msgstr "Direcció inversa"
|
||||
|
||||
#: .\cookbook\helper\recipe_url_import.py:311
|
||||
msgid "careful rotation"
|
||||
@@ -391,7 +389,7 @@ msgstr "Temps de preparació"
|
||||
|
||||
#: .\cookbook\integration\saffron.py:29 .\cookbook\templates\index.html:7
|
||||
msgid "Cookbook"
|
||||
msgstr "Receptari"
|
||||
msgstr "Llibre de receptes"
|
||||
|
||||
#: .\cookbook\integration\saffron.py:31
|
||||
msgid "Section"
|
||||
@@ -436,10 +434,8 @@ msgid "Other"
|
||||
msgstr "Altres"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#, fuzzy
|
||||
#| msgid "Fats"
|
||||
msgid "Fat"
|
||||
msgstr "Greixos"
|
||||
msgstr "Greix"
|
||||
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:17
|
||||
#: .\cookbook\migrations\0190_auto_20230525_1506.py:18
|
||||
@@ -548,10 +544,8 @@ msgid "Instruction Replace"
|
||||
msgstr "Substituir les Instruccions"
|
||||
|
||||
#: .\cookbook\models.py:1472
|
||||
#, fuzzy
|
||||
#| msgid "New Unit"
|
||||
msgid "Never Unit"
|
||||
msgstr "Nova Unitat"
|
||||
msgstr "Mai unitats"
|
||||
|
||||
#: .\cookbook\models.py:1473
|
||||
msgid "Transpose Words"
|
||||
@@ -562,10 +556,8 @@ msgid "Food Replace"
|
||||
msgstr "Aliment equivalent"
|
||||
|
||||
#: .\cookbook\models.py:1475
|
||||
#, fuzzy
|
||||
#| msgid "Description"
|
||||
msgid "Unit Replace"
|
||||
msgstr "Descripció"
|
||||
msgstr "Reemplaçar la descripció"
|
||||
|
||||
#: .\cookbook\models.py:1476
|
||||
msgid "Name Replace"
|
||||
@@ -577,10 +569,8 @@ msgid "Recipe"
|
||||
msgstr "Recepta"
|
||||
|
||||
#: .\cookbook\models.py:1504
|
||||
#, fuzzy
|
||||
#| msgid "Foods"
|
||||
msgid "Food"
|
||||
msgstr "Menjars"
|
||||
msgstr "Menjar o aliment"
|
||||
|
||||
#: .\cookbook\models.py:1505 .\cookbook\templates\base.html:149
|
||||
msgid "Keyword"
|
||||
@@ -1013,10 +1003,8 @@ msgstr "Historial"
|
||||
#: .\cookbook\templates\base.html:263
|
||||
#: .\cookbook\templates\ingredient_editor.html:7
|
||||
#: .\cookbook\templates\ingredient_editor.html:13
|
||||
#, fuzzy
|
||||
#| msgid "Ingredients"
|
||||
msgid "Ingredient Editor"
|
||||
msgstr "Ingredients"
|
||||
msgstr "Editor d'ingredients"
|
||||
|
||||
#: .\cookbook\templates\base.html:275
|
||||
#: .\cookbook\templates\export_response.html:7
|
||||
@@ -1029,10 +1017,8 @@ msgid "Properties"
|
||||
msgstr "Propietats"
|
||||
|
||||
#: .\cookbook\templates\base.html:301 .\cookbook\views\lists.py:255
|
||||
#, fuzzy
|
||||
#| msgid "Account Connections"
|
||||
msgid "Unit Conversions"
|
||||
msgstr "Connexions de Compte"
|
||||
msgstr "Conversió d'unitats"
|
||||
|
||||
#: .\cookbook\templates\base.html:318 .\cookbook\templates\index.html:47
|
||||
msgid "Import Recipe"
|
||||
@@ -1052,10 +1038,8 @@ msgid "Space Settings"
|
||||
msgstr "Opcions d'espai"
|
||||
|
||||
#: .\cookbook\templates\base.html:340
|
||||
#, fuzzy
|
||||
#| msgid "External Recipes"
|
||||
msgid "External Connectors"
|
||||
msgstr "Receptes Externes"
|
||||
msgstr "Connectors Externs"
|
||||
|
||||
#: .\cookbook\templates\base.html:345 .\cookbook\templates\system.html:13
|
||||
msgid "System"
|
||||
@@ -1067,10 +1051,8 @@ msgstr "Admin"
|
||||
|
||||
#: .\cookbook\templates\base.html:351
|
||||
#: .\cookbook\templates\space_overview.html:25
|
||||
#, fuzzy
|
||||
#| msgid "No Space"
|
||||
msgid "Your Spaces"
|
||||
msgstr "Sense Espai"
|
||||
msgstr "El teu espai"
|
||||
|
||||
#: .\cookbook\templates\base.html:362
|
||||
#: .\cookbook\templates\space_overview.html:6
|
||||
@@ -1121,7 +1103,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\templates\batch\monitor.html:6 .\cookbook\views\edit.py:75
|
||||
msgid "Sync"
|
||||
msgstr "Sync"
|
||||
msgstr "Sincronitzar"
|
||||
|
||||
#: .\cookbook\templates\batch\monitor.html:10
|
||||
msgid "Manage watched Folders"
|
||||
@@ -1337,14 +1319,16 @@ msgid ""
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
"Markdown és un llenguatge de marcatge lleuger que es pot utilitzar per donar "
|
||||
"format a text pla de forma senzilla.\n"
|
||||
" Markdown és un llenguatge de marcatge lleuger que es pot utilitzar "
|
||||
"per donar format a text pla de forma senzilla.\n"
|
||||
"Aquest lloc utilitza la biblioteca <a href=\"https://python-markdown.github."
|
||||
"io/\" target=\"_blank\"> Python Markown</a> per convertir el teu text en un "
|
||||
"bonic format HTML. La documentació completa de Markdown es pot trobar <a "
|
||||
"href=\"https://daringfireball.net/projects/markdown/syntax\" target=\"_blank"
|
||||
"\">aquí</a>.\n"
|
||||
"Pots trobar informació incompleta, encara que suficient més avall."
|
||||
"io/\" target=\"_blank\"> Python Markown</a> \n"
|
||||
"per convertir el teu text en un bonic format HTML. La documentació completa "
|
||||
"de Markdown es pot trobar\n"
|
||||
" <a href=\"https://daringfireball.net/projects/markdown/syntax\" target="
|
||||
"\"_blank\">aquí</a>.\n"
|
||||
"Pots trobar informació incompleta, encara que suficient més avall.\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\markdown_info.html:25
|
||||
msgid "Headers"
|
||||
@@ -1450,9 +1434,8 @@ msgid ""
|
||||
"\"noreferrer noopener\" target=\"_blank\">this one.</a>"
|
||||
msgstr ""
|
||||
"Les taules de rebaixes són difícils de crear a mà. Es recomana utilitzar un "
|
||||
"editor de taules com\n"
|
||||
"<a href=\"https://www.tablesgenerator.com/markdown_tables\" rel=\"noreferrer "
|
||||
"noopener\" target=\"_blank\">aquest.</a>"
|
||||
"editor de taules com <a href=\"https://www.tablesgenerator.com/"
|
||||
"markdown_tables\" rel=\"noreferrer noopener\" target=\"_blank\">aquest.</a>"
|
||||
|
||||
#: .\cookbook\templates\markdown_info.html:155
|
||||
#: .\cookbook\templates\markdown_info.html:157
|
||||
@@ -1519,10 +1502,8 @@ msgid "Back"
|
||||
msgstr "Enrere"
|
||||
|
||||
#: .\cookbook\templates\property_editor.html:7
|
||||
#, fuzzy
|
||||
#| msgid "Ingredients"
|
||||
msgid "Property Editor"
|
||||
msgstr "Ingredients"
|
||||
msgstr "Editor de propietats"
|
||||
|
||||
#: .\cookbook\templates\recipe_view.html:36
|
||||
msgid "Comments"
|
||||
@@ -1539,7 +1520,7 @@ msgstr "Comentari"
|
||||
|
||||
#: .\cookbook\templates\rest_framework\api.html:5
|
||||
msgid "Recipe Home"
|
||||
msgstr "Receptari"
|
||||
msgstr "Pàgina d'inici"
|
||||
|
||||
#: .\cookbook\templates\search_info.html:5
|
||||
#: .\cookbook\templates\search_info.html:9
|
||||
@@ -1887,17 +1868,15 @@ msgstr "Crear compte de superusuari"
|
||||
|
||||
#: .\cookbook\templates\socialaccount\authentication_error.html:7
|
||||
#: .\cookbook\templates\socialaccount\authentication_error.html:23
|
||||
#, fuzzy
|
||||
#| msgid "Social Login"
|
||||
msgid "Social Network Login Failure"
|
||||
msgstr "Accés Social"
|
||||
msgstr "Error d'inici de sessió mitjançant l'inici de sessió social"
|
||||
|
||||
#: .\cookbook\templates\socialaccount\authentication_error.html:25
|
||||
#, fuzzy
|
||||
#| msgid "An error occurred attempting to move "
|
||||
msgid ""
|
||||
"An error occurred while attempting to login via your social network account."
|
||||
msgstr "Error a l'intentar moure "
|
||||
msgstr ""
|
||||
"S'ha produït un error en intentar iniciar sessió mitjançant el teu compte de "
|
||||
"xarxa social."
|
||||
|
||||
#: .\cookbook\templates\socialaccount\connections.html:4
|
||||
#: .\cookbook\templates\socialaccount\connections.html:15
|
||||
@@ -1928,7 +1907,7 @@ msgstr "Afegir Compte de tercers"
|
||||
#: .\cookbook\templates\socialaccount\login.html:5
|
||||
#: .\cookbook\templates\socialaccount\signup.html:5
|
||||
msgid "Signup"
|
||||
msgstr "Registre"
|
||||
msgstr "Registrar"
|
||||
|
||||
#: .\cookbook\templates\socialaccount\login.html:9
|
||||
#, python-format
|
||||
@@ -1985,10 +1964,8 @@ msgid "Sign in using"
|
||||
msgstr "Registrar emprant"
|
||||
|
||||
#: .\cookbook\templates\space_manage.html:7
|
||||
#, fuzzy
|
||||
#| msgid "Members"
|
||||
msgid "Space Management"
|
||||
msgstr "Membres"
|
||||
msgstr "Gestiona de l'espai"
|
||||
|
||||
#: .\cookbook\templates\space_manage.html:26
|
||||
msgid "Space:"
|
||||
@@ -1999,10 +1976,8 @@ msgid "Manage Subscription"
|
||||
msgstr "Administra Subscripció"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:13 .\cookbook\views\delete.py:184
|
||||
#, fuzzy
|
||||
#| msgid "Space:"
|
||||
msgid "Space"
|
||||
msgstr "Espai:"
|
||||
msgstr "Espai"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:17
|
||||
msgid ""
|
||||
@@ -2022,10 +1997,8 @@ msgid "Owner"
|
||||
msgstr "Propietari"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:57
|
||||
#, fuzzy
|
||||
#| msgid "Create Space"
|
||||
msgid "Leave Space"
|
||||
msgstr "Crear Espai"
|
||||
msgstr "Abandonar l'espai"
|
||||
|
||||
#: .\cookbook\templates\space_overview.html:78
|
||||
#: .\cookbook\templates\space_overview.html:88
|
||||
@@ -2118,11 +2091,12 @@ msgid ""
|
||||
" "
|
||||
msgstr ""
|
||||
"No es recomana publicar fitxers multimèdia directament mitjançant "
|
||||
"<b>gunicorn / python.</b>\n"
|
||||
"<b>gunicorn / python</b>!\n"
|
||||
"Seguiu els passos descrits\n"
|
||||
"<a href=\"https://github.com/vabene1111/recipes/releases/tag/0.8.1\">aquí</"
|
||||
"a> per actualitzar\n"
|
||||
"la vostra instal·lació."
|
||||
"la vostra instal·lació.\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\system.html:55 .\cookbook\templates\system.html:70
|
||||
#: .\cookbook\templates\system.html:83 .\cookbook\templates\system.html:94
|
||||
@@ -2147,12 +2121,13 @@ msgid ""
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
"No teniu un <code>SECRET_KEY </code>configurat al fitxer<code> .env.</code> "
|
||||
"Django per defecte ha estat\n"
|
||||
" No teniu un <code>SECRET_KEY </code>configurat al fitxer<code> ."
|
||||
"env.</code> Django per defecte ha estat\n"
|
||||
"clau estàndard\n"
|
||||
"subministrat amb la instal·lació que és coneguda i insegura públicament. "
|
||||
"Estableix-ho\n"
|
||||
"<code>SECRET_KEY</code> al fitxer de configuració<code> .env.</code>"
|
||||
"<code>SECRET_KEY</code> al fitxer de configuració<code> .env.</code>\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\system.html:73
|
||||
msgid "Debug Mode"
|
||||
@@ -2169,10 +2144,11 @@ msgid ""
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
"Aquesta aplicació encara s’executa en mode de depuració. És probable que "
|
||||
"això no sigui necessari. Activa el mode de depuració\n"
|
||||
" Aquesta aplicació encara s’executa en mode de depuració. És "
|
||||
"probable que això no sigui necessari. Activa el mode de depuració\n"
|
||||
"configuració\n"
|
||||
"<code>DEBUG = 0</code> al fitxer de configuració<code> .env.</code>"
|
||||
"<code>DEBUG = 0</code> al fitxer de configuració<code> .env.</code>\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\system.html:86
|
||||
msgid "Allowed Hosts"
|
||||
@@ -2186,6 +2162,11 @@ msgid ""
|
||||
"this.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
" La vostra configuració permet tots els amfitrions, això està bé "
|
||||
"en algunes instal·lacions, però en general cal evitar-ho. Consulteu la "
|
||||
"documentació sobre aquest tema.\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\system.html:97
|
||||
msgid "Database"
|
||||
@@ -2196,10 +2177,8 @@ msgid "Info"
|
||||
msgstr "Info"
|
||||
|
||||
#: .\cookbook\templates\system.html:110 .\cookbook\templates\system.html:127
|
||||
#, fuzzy
|
||||
#| msgid "Use fractions"
|
||||
msgid "Migrations"
|
||||
msgstr "Utilitza fraccions"
|
||||
msgstr "Migracions"
|
||||
|
||||
#: .\cookbook\templates\system.html:116
|
||||
msgid ""
|
||||
@@ -2212,24 +2191,30 @@ msgid ""
|
||||
"issue.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
"\n"
|
||||
" Les migracions de dades no haurien de fallar mai!\n"
|
||||
" Els errors de migració podrien provocar problemes operatius "
|
||||
"importants a l'aplicació.\n"
|
||||
" Si una migració falla, assegureu-vos que teniu l'última versió "
|
||||
"i, si és així, publiqueu el registre de migració i la visió general a "
|
||||
"continuació en un tiquet de GitHub.\n"
|
||||
" "
|
||||
|
||||
#: .\cookbook\templates\system.html:182
|
||||
msgid "False"
|
||||
msgstr ""
|
||||
msgstr "Fals"
|
||||
|
||||
#: .\cookbook\templates\system.html:182
|
||||
msgid "True"
|
||||
msgstr ""
|
||||
msgstr "Cert"
|
||||
|
||||
#: .\cookbook\templates\system.html:207
|
||||
msgid "Hide"
|
||||
msgstr ""
|
||||
msgstr "Amagar"
|
||||
|
||||
#: .\cookbook\templates\system.html:210
|
||||
#, fuzzy
|
||||
#| msgid "Show Log"
|
||||
msgid "Show"
|
||||
msgstr "Mostra Logs"
|
||||
msgstr "Mostra"
|
||||
|
||||
#: .\cookbook\templates\url_import.html:8
|
||||
msgid "URL Import"
|
||||
@@ -2304,16 +2289,17 @@ msgstr "Afegit {obj.name} a la llista de la compra."
|
||||
#: .\cookbook\views\api.py:742
|
||||
msgid "Filter meal plans from date (inclusive) in the format of YYYY-MM-DD."
|
||||
msgstr ""
|
||||
"Filtrar els plans d'àpats des de la data (inclosa) en el format AAAA-MM-DD."
|
||||
|
||||
#: .\cookbook\views\api.py:743
|
||||
msgid "Filter meal plans to date (inclusive) in the format of YYYY-MM-DD."
|
||||
msgstr ""
|
||||
msgstr "Filtreu els plans de menús actuals (inclosos) en el format AAAA-MM-DD."
|
||||
|
||||
#: .\cookbook\views\api.py:744
|
||||
#, fuzzy
|
||||
#| msgid "ID of recipe a step is part of. For multiple repeat parameter."
|
||||
msgid "Filter meal plans with MealType ID. For multiple repeat parameter."
|
||||
msgstr "ID de recepta forma part d'un pas. Per a múltiples repeteix paràmetre."
|
||||
msgstr ""
|
||||
"Filtra els plans d'àpats amb MealType ID. Per a múltiples paràmetres de "
|
||||
"repetició."
|
||||
|
||||
#: .\cookbook\views\api.py:872
|
||||
msgid "ID of recipe a step is part of. For multiple repeat parameter."
|
||||
@@ -2332,34 +2318,40 @@ msgstr ""
|
||||
"també cerca text complet."
|
||||
|
||||
#: .\cookbook\views\api.py:910
|
||||
#, fuzzy
|
||||
#| msgid "ID of keyword a recipe should have. For multiple repeat parameter."
|
||||
msgid ""
|
||||
"ID of keyword a recipe should have. For multiple repeat parameter. "
|
||||
"Equivalent to keywords_or"
|
||||
msgstr ""
|
||||
"ID de la paraula clau que hauria de tenir una recepta. Per a múltiples "
|
||||
"repeteix paràmetre."
|
||||
"repeteix paràmetre. Equivalent a keywords_or"
|
||||
|
||||
#: .\cookbook\views\api.py:911
|
||||
msgid ""
|
||||
"Keyword IDs, repeat for multiple. Return recipes with any of the keywords"
|
||||
msgstr ""
|
||||
"Identificadors (IDs) de paraules clau, Paraules clau d'identificació, "
|
||||
"repetiu-ne per a múltiples. Retorna receptes amb qualsevol paraula clau"
|
||||
|
||||
#: .\cookbook\views\api.py:912
|
||||
msgid ""
|
||||
"Keyword IDs, repeat for multiple. Return recipes with all of the keywords."
|
||||
msgstr ""
|
||||
"Paraules clau d'identificació (IDs), repetiu-ne per a múltiples. Torna "
|
||||
"receptes que contenen totes les paraules clau."
|
||||
|
||||
#: .\cookbook\views\api.py:913
|
||||
msgid ""
|
||||
"Keyword IDs, repeat for multiple. Exclude recipes with any of the keywords."
|
||||
msgstr ""
|
||||
"Identificador (ID) de les paraules clau, repetiu-ne per a diversos. Exclou "
|
||||
"les receptes que continguin alguna de les paraules clau."
|
||||
|
||||
#: .\cookbook\views\api.py:914
|
||||
msgid ""
|
||||
"Keyword IDs, repeat for multiple. Exclude recipes with all of the keywords."
|
||||
msgstr ""
|
||||
"Identificació (ID) de les paraules clau, repetiu-ne per a diversos. Exclou "
|
||||
"les receptes que continguin alguna de les paraules clau."
|
||||
|
||||
#: .\cookbook\views\api.py:915
|
||||
msgid "ID of food a recipe should have. For multiple repeat parameter."
|
||||
@@ -2370,18 +2362,26 @@ msgstr ""
|
||||
#: .\cookbook\views\api.py:916
|
||||
msgid "Food IDs, repeat for multiple. Return recipes with any of the foods"
|
||||
msgstr ""
|
||||
"ID dels aliments, repeteix-lo per a múltiples. Retorna les receptes que "
|
||||
"continguin més d'un aliment"
|
||||
|
||||
#: .\cookbook\views\api.py:917
|
||||
msgid "Food IDs, repeat for multiple. Return recipes with all of the foods."
|
||||
msgstr ""
|
||||
"ID d'aliments, repetiu-ho per a múltiples. Retorna receptes amb tots els "
|
||||
"aliments."
|
||||
|
||||
#: .\cookbook\views\api.py:918
|
||||
msgid "Food IDs, repeat for multiple. Exclude recipes with any of the foods."
|
||||
msgstr ""
|
||||
"ID d'aliments, repetiu-ho per a múltiples. Exclou receptes que contingui "
|
||||
"algun dels aliments."
|
||||
|
||||
#: .\cookbook\views\api.py:919
|
||||
msgid "Food IDs, repeat for multiple. Exclude recipes with all of the foods."
|
||||
msgstr ""
|
||||
"Identificació (ID) dels aliments, repetiu-ne per a diversos. Exclou les "
|
||||
"receptes que continguin tots els aliments."
|
||||
|
||||
#: .\cookbook\views\api.py:920
|
||||
msgid "ID of unit a recipe should have."
|
||||
@@ -2392,6 +2392,8 @@ msgid ""
|
||||
"Rating a recipe should have or greater. [0 - 5] Negative value filters "
|
||||
"rating less than."
|
||||
msgstr ""
|
||||
"Tingues en compte que una recepta hauria de tenir o ser superior. [0 - 5] Un "
|
||||
"valor negatiu filtra una puntuació inferior a."
|
||||
|
||||
#: .\cookbook\views\api.py:922
|
||||
msgid "ID of book a recipe should be in. For multiple repeat parameter."
|
||||
@@ -2402,70 +2404,94 @@ msgstr ""
|
||||
#: .\cookbook\views\api.py:923
|
||||
msgid "Book IDs, repeat for multiple. Return recipes with any of the books"
|
||||
msgstr ""
|
||||
"Identificadors de llibre (IDs), repeteix per a diversos. Retorna receptes "
|
||||
"amb qualsevol dels llibres"
|
||||
|
||||
#: .\cookbook\views\api.py:924
|
||||
msgid "Book IDs, repeat for multiple. Return recipes with all of the books."
|
||||
msgstr ""
|
||||
"Identificador (IDs) de llibres, repetiu-ho per a diversos. Torna receptes "
|
||||
"amb tots els llibres."
|
||||
|
||||
#: .\cookbook\views\api.py:925
|
||||
msgid "Book IDs, repeat for multiple. Exclude recipes with any of the books."
|
||||
msgstr ""
|
||||
"ID del llibre. Es pot especificar diverses vegades. Exclou les receptes dels "
|
||||
"llibres amb l'ID especificat."
|
||||
|
||||
#: .\cookbook\views\api.py:926
|
||||
msgid "Book IDs, repeat for multiple. Exclude recipes with all of the books."
|
||||
msgstr ""
|
||||
"ID dels llibres, es pot especificar diverses vegades. Exclou les receptes "
|
||||
"amb tots els llibres amb les ID especificades."
|
||||
|
||||
#: .\cookbook\views\api.py:927
|
||||
msgid "If only internal recipes should be returned. [true/<b>false</b>]"
|
||||
msgstr ""
|
||||
msgstr "Només cal retornar les receptes internes. [sí/<b>no</b>]"
|
||||
|
||||
#: .\cookbook\views\api.py:928
|
||||
msgid "Returns the results in randomized order. [true/<b>false</b>]"
|
||||
msgstr ""
|
||||
msgstr "Retorna resultats en ordre aleatori. [sí/<b>no</b>]"
|
||||
|
||||
#: .\cookbook\views\api.py:929
|
||||
msgid "Returns new results first in search results. [true/<b>false</b>]"
|
||||
msgstr ""
|
||||
"En primer lloc, retorna nous resultats als resultats de la cerca. [cert/"
|
||||
"<b>fals</b>]"
|
||||
|
||||
#: .\cookbook\views\api.py:930
|
||||
msgid ""
|
||||
"Filter recipes cooked X times or more. Negative values returns cooked less "
|
||||
"than X times"
|
||||
msgstr ""
|
||||
"Filtra les receptes cuinades X vegades o més. Els valors negatius retornen "
|
||||
"les receptes cuinades menys de X vegades"
|
||||
|
||||
#: .\cookbook\views\api.py:931
|
||||
msgid ""
|
||||
"Filter recipes last cooked on or after YYYY-MM-DD. Prepending - filters on "
|
||||
"or before date."
|
||||
msgstr ""
|
||||
"Filtra les receptes que s'han cuinat per última vegada el AAAA-MM-DD o "
|
||||
"després. Prefixació: filtra la data exacta o abans de la data."
|
||||
|
||||
#: .\cookbook\views\api.py:932
|
||||
msgid ""
|
||||
"Filter recipes created on or after YYYY-MM-DD. Prepending - filters on or "
|
||||
"before date."
|
||||
msgstr ""
|
||||
"Filtra les receptes que s'han creat el AAAA-MM-DD o després. Prefixació: "
|
||||
"filtra la data exacta o abans de la data."
|
||||
|
||||
#: .\cookbook\views\api.py:933
|
||||
msgid ""
|
||||
"Filter recipes updated on or after YYYY-MM-DD. Prepending - filters on or "
|
||||
"before date."
|
||||
msgstr ""
|
||||
"Filtra les receptes que s'han actualitzat el AAAA-MM-DD o després. "
|
||||
"Prefixació: filtra la data exacta o abans de la data."
|
||||
|
||||
#: .\cookbook\views\api.py:934
|
||||
msgid ""
|
||||
"Filter recipes lasts viewed on or after YYYY-MM-DD. Prepending - filters on "
|
||||
"or before date."
|
||||
msgstr ""
|
||||
"Filtra les receptes que s'han visitat per última vegada el AAAA-MM-DD o "
|
||||
"després. Prefixació: filtra la data exacta o abans de la data."
|
||||
|
||||
#: .\cookbook\views\api.py:935
|
||||
msgid "Filter recipes that can be made with OnHand food. [true/<b>false</b>]"
|
||||
msgstr ""
|
||||
"Filtra receptes que poden elaborar-se amb aliments disponibles. [true/"
|
||||
"<b>false</b>]"
|
||||
|
||||
#: .\cookbook\views\api.py:1122
|
||||
msgid ""
|
||||
"Returns the shopping list entry with a primary key of id. Multiple values "
|
||||
"allowed."
|
||||
msgstr ""
|
||||
"Retorna l'entrada de la llista de la compra amb una clau primària "
|
||||
"d'identificador. Es permeten diversos valors."
|
||||
|
||||
#: .\cookbook\views\api.py:1125
|
||||
msgid ""
|
||||
@@ -2473,20 +2499,27 @@ msgid ""
|
||||
"b>]<br> - recent includes unchecked items and recently "
|
||||
"completed items."
|
||||
msgstr ""
|
||||
"Filtreu les entrades de la llista de compres per marcades. [cert, fals, "
|
||||
"ambdues, <b>recent</b>]<br> -recent inclou elements no "
|
||||
"marcats i elements completats recentment."
|
||||
|
||||
#: .\cookbook\views\api.py:1128
|
||||
msgid "Returns the shopping list entries sorted by supermarket category order."
|
||||
msgstr ""
|
||||
"Retorna les entrades de la llista de la compra ordenades per comanda de "
|
||||
"categoria de supermercat."
|
||||
|
||||
#: .\cookbook\views\api.py:1210
|
||||
msgid "Filter for entries with the given recipe"
|
||||
msgstr ""
|
||||
msgstr "Filtrar les entrades amb la recepta especificada"
|
||||
|
||||
#: .\cookbook\views\api.py:1292
|
||||
msgid ""
|
||||
"Return the Automations matching the automation type. Multiple values "
|
||||
"allowed."
|
||||
msgstr ""
|
||||
"Retorna l'entrada de la llista de la compra amb una clau primària de "
|
||||
"l'identificador. Es permeten múltiples valors."
|
||||
|
||||
#: .\cookbook\views\api.py:1415
|
||||
msgid "Nothing to do."
|
||||
@@ -2494,7 +2527,7 @@ msgstr "Res a fer."
|
||||
|
||||
#: .\cookbook\views\api.py:1445
|
||||
msgid "Invalid Url"
|
||||
msgstr ""
|
||||
msgstr "Url Invàlida"
|
||||
|
||||
#: .\cookbook\views\api.py:1449
|
||||
msgid "Connection Refused."
|
||||
@@ -2502,7 +2535,7 @@ msgstr "Connexió Refusada."
|
||||
|
||||
#: .\cookbook\views\api.py:1451
|
||||
msgid "Bad URL Schema."
|
||||
msgstr ""
|
||||
msgstr "Esquema URL erroni."
|
||||
|
||||
#: .\cookbook\views\api.py:1474
|
||||
msgid "No usable data could be found."
|
||||
@@ -2510,7 +2543,7 @@ msgstr "No s'han trobat dades utilitzables."
|
||||
|
||||
#: .\cookbook\views\api.py:1549
|
||||
msgid "File is above space limit"
|
||||
msgstr ""
|
||||
msgstr "El fitxer supera el límit d'emmagatzematge"
|
||||
|
||||
#: .\cookbook\views\api.py:1566 .\cookbook\views\import_export.py:114
|
||||
msgid "Importing is not implemented for this provider"
|
||||
@@ -2525,7 +2558,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\views\api.py:1671
|
||||
msgid "Sync successful!"
|
||||
msgstr "Sincronització correcte"
|
||||
msgstr "Sincronització correcta!"
|
||||
|
||||
#: .\cookbook\views\api.py:1674
|
||||
msgid "Error synchronizing with Storage"
|
||||
@@ -2556,42 +2589,36 @@ msgstr ""
|
||||
"almenys un monitor."
|
||||
|
||||
#: .\cookbook\views\delete.py:135
|
||||
#, fuzzy
|
||||
#| msgid "Storage Backend"
|
||||
msgid "Connectors Config Backend"
|
||||
msgstr "Backend d'emmagatzematge"
|
||||
msgstr "Back-end de configuració de connectors"
|
||||
|
||||
#: .\cookbook\views\delete.py:157
|
||||
msgid "Invite Link"
|
||||
msgstr "Enllaç de invitació"
|
||||
|
||||
#: .\cookbook\views\delete.py:168
|
||||
#, fuzzy
|
||||
#| msgid "Members"
|
||||
msgid "Space Membership"
|
||||
msgstr "Membres"
|
||||
msgstr "Membres de l'espai"
|
||||
|
||||
#: .\cookbook\views\edit.py:84
|
||||
msgid "You cannot edit this storage!"
|
||||
msgstr "No podeu editar aquest emmagatzematge."
|
||||
msgstr "No podeu editar aquest emmagatzematge!"
|
||||
|
||||
#: .\cookbook\views\edit.py:108
|
||||
msgid "Storage saved!"
|
||||
msgstr "Emmagatzematge desat."
|
||||
msgstr "Emmagatzematge desat!"
|
||||
|
||||
#: .\cookbook\views\edit.py:110
|
||||
msgid "There was an error updating this storage backend!"
|
||||
msgstr "S'ha produït un error en actualitzar aquest backend d'emmagatzematge."
|
||||
msgstr "S'ha produït un error en actualitzar aquest backend d'emmagatzematge!"
|
||||
|
||||
#: .\cookbook\views\edit.py:134
|
||||
#, fuzzy
|
||||
#| msgid "Changes saved!"
|
||||
msgid "Config saved!"
|
||||
msgstr "Canvis desats!"
|
||||
msgstr "Configuració desada!"
|
||||
|
||||
#: .\cookbook\views\edit.py:142
|
||||
msgid "ConnectorConfig"
|
||||
msgstr ""
|
||||
msgstr "Configuració del connector"
|
||||
|
||||
#: .\cookbook\views\edit.py:198
|
||||
msgid "Changes saved!"
|
||||
@@ -2622,10 +2649,8 @@ msgid "Shopping List"
|
||||
msgstr "Llista de la Compra"
|
||||
|
||||
#: .\cookbook\views\lists.py:77 .\cookbook\views\new.py:98
|
||||
#, fuzzy
|
||||
#| msgid "Storage Backend"
|
||||
msgid "Connector Config Backend"
|
||||
msgstr "Backend d'emmagatzematge"
|
||||
msgstr "Configuració del backend per a connectors"
|
||||
|
||||
#: .\cookbook\views\lists.py:91
|
||||
msgid "Invite Links"
|
||||
@@ -2640,10 +2665,8 @@ msgid "Shopping Categories"
|
||||
msgstr "Categories de Compres"
|
||||
|
||||
#: .\cookbook\views\lists.py:202
|
||||
#, fuzzy
|
||||
#| msgid "Filter"
|
||||
msgid "Custom Filters"
|
||||
msgstr "Filtre"
|
||||
msgstr "Filtres personalitzats"
|
||||
|
||||
#: .\cookbook\views\lists.py:239
|
||||
msgid "Steps"
|
||||
@@ -2651,13 +2674,11 @@ msgstr "Passos"
|
||||
|
||||
#: .\cookbook\views\lists.py:270
|
||||
msgid "Property Types"
|
||||
msgstr ""
|
||||
msgstr "Tipus de propietat"
|
||||
|
||||
#: .\cookbook\views\new.py:86
|
||||
#, fuzzy
|
||||
#| msgid "This feature is not available in the demo version!"
|
||||
msgid "This feature is not enabled by the server admin!"
|
||||
msgstr "Funció no està disponible a la versió de demostració!"
|
||||
msgstr "Aquesta funció no està activada per l'administrador del servidor!"
|
||||
|
||||
#: .\cookbook\views\new.py:123
|
||||
msgid "Imported new recipe!"
|
||||
@@ -2673,11 +2694,9 @@ msgid "This feature is not available in the demo version!"
|
||||
msgstr "Funció no està disponible a la versió de demostració!"
|
||||
|
||||
#: .\cookbook\views\views.py:74
|
||||
#, fuzzy
|
||||
#| msgid "You have reached the maximum number of recipes for your space."
|
||||
msgid ""
|
||||
"You have the reached the maximum amount of spaces that can be owned by you."
|
||||
msgstr "Has arribat al nombre màxim de receptes per al vostre espai."
|
||||
msgstr "Has assolit la quantitat màxima d'espais que pots tenir."
|
||||
|
||||
#: .\cookbook\views\views.py:89
|
||||
msgid ""
|
||||
@@ -2715,47 +2734,36 @@ msgstr "Cerca difusa no és compatible amb aquest mètode de cerca!"
|
||||
#, python-format
|
||||
msgid "PostgreSQL %(v)s is deprecated. Upgrade to a fully supported version!"
|
||||
msgstr ""
|
||||
"PostgreSQL %(v)s està obsolet. Actualitza a una versió totalment compatible!"
|
||||
|
||||
#: .\cookbook\views\views.py:309
|
||||
#, python-format
|
||||
msgid "You are running PostgreSQL %(v1)s. PostgreSQL %(v2)s is recommended"
|
||||
msgstr ""
|
||||
msgstr "Estàs fent servir PostgreSQL %(v1)s. Es recomana PostgreSQL %(v2)s"
|
||||
|
||||
#: .\cookbook\views\views.py:313
|
||||
msgid "Unable to determine PostgreSQL version."
|
||||
msgstr ""
|
||||
msgstr "No es pot determinar la versió de PostgreSQL."
|
||||
|
||||
#: .\cookbook\views\views.py:317
|
||||
#, fuzzy
|
||||
#| msgid ""
|
||||
#| "\n"
|
||||
#| " This application is not running with a Postgres database "
|
||||
#| "backend. This is ok but not recommended as some\n"
|
||||
#| " features only work with postgres databases.\n"
|
||||
#| " "
|
||||
msgid ""
|
||||
"This application is not running with a Postgres database backend. This is ok "
|
||||
"but not recommended as some features only work with postgres databases."
|
||||
msgstr ""
|
||||
"\n"
|
||||
"Aquesta aplicació no s’executa amb un backend de base de dades Postgres. "
|
||||
"Això està bé, però no es recomana com alguns\n"
|
||||
"les funcions només funcionen amb bases de dades postgres."
|
||||
"Aquesta aplicació no s'executa amb un backend de base de dades Postgres. "
|
||||
"Això està bé, però no es recomanable, ja que algunes funcions només "
|
||||
"funcionen amb bases de dades Postgres."
|
||||
|
||||
#: .\cookbook\views\views.py:360
|
||||
#, fuzzy
|
||||
#| msgid ""
|
||||
#| "The setup page can only be used to create the first user! If you have "
|
||||
#| "forgotten your superuser credentials please consult the django "
|
||||
#| "documentation on how to reset passwords."
|
||||
msgid ""
|
||||
"The setup page can only be used to create the first "
|
||||
"user! If you have forgotten your superuser credentials "
|
||||
"please consult the django documentation on how to reset passwords."
|
||||
msgstr ""
|
||||
"La pàgina de configuració només es pot utilitzar per crear el primer usuari. "
|
||||
"Si heu oblidat les vostres credencials de superusuari, consulteu la "
|
||||
"documentació de django sobre com restablir les contrasenyes."
|
||||
"La pàgina de configuració només es pot utilitzar per crear el primer "
|
||||
"usuari! Si heu oblidat les vostres credencials de "
|
||||
"superusuari, consulteu la documentació de django sobre com restablir les "
|
||||
"contrasenyes."
|
||||
|
||||
#: .\cookbook\views\views.py:369
|
||||
msgid "Passwords dont match!"
|
||||
@@ -2767,7 +2775,7 @@ msgstr "L'usuari s'ha creat, si us plau inicieu la sessió!"
|
||||
|
||||
#: .\cookbook\views\views.py:393
|
||||
msgid "Malformed Invite Link supplied!"
|
||||
msgstr "S'ha proporcionat un enllaç d'invitació mal format."
|
||||
msgstr "S'ha proporcionat un enllaç d'invitació amb un format incorrecte!"
|
||||
|
||||
#: .\cookbook\views\views.py:410
|
||||
msgid "Successfully joined space."
|
||||
@@ -2775,7 +2783,7 @@ msgstr "Unit correctament a l'espai."
|
||||
|
||||
#: .\cookbook\views\views.py:416
|
||||
msgid "Invite Link not valid or already used!"
|
||||
msgstr "L'enllaç d'invitació no és vàlid o ja s'ha utilitzat."
|
||||
msgstr "L'enllaç d'invitació no és vàlid o ja s'ha utilitzat!"
|
||||
|
||||
#: .\cookbook\views\views.py:432
|
||||
msgid ""
|
||||
@@ -2795,7 +2803,7 @@ msgstr ""
|
||||
|
||||
#: .\cookbook\views\views.py:451
|
||||
msgid "Manage recipes, shopping list, meal plans and more."
|
||||
msgstr ""
|
||||
msgstr "Gestiona receptes, llistes de la compra, menús setmanals i molt més."
|
||||
|
||||
#: .\cookbook\views\views.py:458
|
||||
msgid "Plan"
|
||||
@@ -2803,17 +2811,15 @@ msgstr "Pla"
|
||||
|
||||
#: .\cookbook\views\views.py:458
|
||||
msgid "View your meal Plan"
|
||||
msgstr ""
|
||||
msgstr "Veure la planificació de menús"
|
||||
|
||||
#: .\cookbook\views\views.py:459
|
||||
msgid "View your cookbooks"
|
||||
msgstr ""
|
||||
msgstr "Veure els meus llibres de receptes"
|
||||
|
||||
#: .\cookbook\views\views.py:460
|
||||
#, fuzzy
|
||||
#| msgid "New Shopping List"
|
||||
msgid "View your shopping lists"
|
||||
msgstr "Nova Llista de Compra"
|
||||
msgstr "Veure la teva llista de la compra"
|
||||
|
||||
#~ msgid "Default unit"
|
||||
#~ msgstr "Unitat per defecte"
|
||||
|
||||
@@ -12,7 +12,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-08-01 15:04+0200\n"
|
||||
"PO-Revision-Date: 2024-12-09 00:58+0000\n"
|
||||
"PO-Revision-Date: 2025-05-18 10:58+0000\n"
|
||||
"Last-Translator: Vincenzo Reale <smart2128vr@gmail.com>\n"
|
||||
"Language-Team: Italian <http://translate.tandoor.dev/projects/tandoor/"
|
||||
"recipes-backend/it/>\n"
|
||||
@@ -1439,7 +1439,7 @@ msgid ""
|
||||
"editor like <a href=\"https://www.tablesgenerator.com/markdown_tables\" rel="
|
||||
"\"noreferrer noopener\" target=\"_blank\">this one.</a>"
|
||||
msgstr ""
|
||||
"Le tabelle in markdown sono difficili da creare a mano. Si raccomanda "
|
||||
"Le tabelle in markdown sono difficili da creare a mano. Si consiglia "
|
||||
"l'utilizzo di un editor di tabelle come <a href=\"https://www.tablesgenerator"
|
||||
".com/markdown_tables\" rel=\"noreferrer noopener\" target=\"_blank\""
|
||||
">questo.</a>"
|
||||
@@ -2098,7 +2098,7 @@ msgid ""
|
||||
" your installation.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
"<b>Non è raccomandato</b> erogare i file multimediali con gunicorn/python!\n"
|
||||
"<b>Non è consigliato</b> erogare i file multimediali con gunicorn/python!\n"
|
||||
" Segui i passi descritti\n"
|
||||
" <a href=\"https://github.com/vabene1111/recipes/releases/tag/0.8."
|
||||
"1\">qui</a> per aggiornare\n"
|
||||
|
||||
@@ -126,7 +126,7 @@ class TreeModel(MP_Node):
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_name(self) -> str:
|
||||
def full_name(self):
|
||||
"""
|
||||
Returns a string representation of a tree node and it's ancestors,
|
||||
e.g. 'Cuisine > Asian > Chinese > Catonese'.
|
||||
@@ -912,19 +912,12 @@ class PropertyType(models.Model, PermissionModelMixin, MergeModelMixin):
|
||||
GOAL = 'GOAL'
|
||||
OTHER = 'OTHER'
|
||||
|
||||
CHOICES = (
|
||||
(NUTRITION, _('Nutrition')),
|
||||
(ALLERGEN, _('Allergen')),
|
||||
(PRICE, _('Price')),
|
||||
(GOAL, _('Goal')),
|
||||
(OTHER, _('Other')),
|
||||
)
|
||||
|
||||
name = models.CharField(max_length=128)
|
||||
unit = models.CharField(max_length=64, blank=True, null=True)
|
||||
order = models.IntegerField(default=0)
|
||||
description = models.CharField(max_length=512, blank=True, null=True)
|
||||
category = models.CharField(max_length=64, choices=CHOICES, null=True, blank=True)
|
||||
category = models.CharField(max_length=64, choices=((NUTRITION, _('Nutrition')), (ALLERGEN, _('Allergen')),
|
||||
(PRICE, _('Price')), (GOAL, _('Goal')), (OTHER, _('Other'))), null=True, blank=True)
|
||||
open_data_slug = models.CharField(max_length=128, null=True, blank=True, default=None)
|
||||
|
||||
fdc_id = models.IntegerField(null=True, default=None, blank=True)
|
||||
@@ -1457,21 +1450,19 @@ class Automation(ExportModelOperationsMixin('automations'), models.Model, Permis
|
||||
UNIT_REPLACE = 'UNIT_REPLACE'
|
||||
NAME_REPLACE = 'NAME_REPLACE'
|
||||
|
||||
automation_types = (
|
||||
(FOOD_ALIAS, _('Food Alias')),
|
||||
(UNIT_ALIAS, _('Unit Alias')),
|
||||
(KEYWORD_ALIAS, _('Keyword Alias')),
|
||||
(DESCRIPTION_REPLACE, _('Description Replace')),
|
||||
(INSTRUCTION_REPLACE, _('Instruction Replace')),
|
||||
(NEVER_UNIT, _('Never Unit')),
|
||||
(TRANSPOSE_WORDS, _('Transpose Words')),
|
||||
(FOOD_REPLACE, _('Food Replace')),
|
||||
(UNIT_REPLACE, _('Unit Replace')),
|
||||
(NAME_REPLACE, _('Name Replace')),
|
||||
)
|
||||
|
||||
type = models.CharField(max_length=128,
|
||||
choices=automation_types)
|
||||
choices=(
|
||||
(FOOD_ALIAS, _('Food Alias')),
|
||||
(UNIT_ALIAS, _('Unit Alias')),
|
||||
(KEYWORD_ALIAS, _('Keyword Alias')),
|
||||
(DESCRIPTION_REPLACE, _('Description Replace')),
|
||||
(INSTRUCTION_REPLACE, _('Instruction Replace')),
|
||||
(NEVER_UNIT, _('Never Unit')),
|
||||
(TRANSPOSE_WORDS, _('Transpose Words')),
|
||||
(FOOD_REPLACE, _('Food Replace')),
|
||||
(UNIT_REPLACE, _('Unit Replace')),
|
||||
(NAME_REPLACE, _('Name Replace')),
|
||||
))
|
||||
name = models.CharField(max_length=128, default='')
|
||||
description = models.TextField(blank=True, null=True)
|
||||
|
||||
|
||||
69
cookbook/schemas.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from rest_framework.schemas.openapi import AutoSchema
|
||||
from rest_framework.schemas.utils import is_list_view
|
||||
|
||||
|
||||
class QueryParam(object):
|
||||
def __init__(self, name, description=None, qtype='string', required=False):
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.qtype = qtype
|
||||
self.required = required
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.name}, {self.qtype}, {self.description}'
|
||||
|
||||
|
||||
class QueryParamAutoSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super().get_path_parameters(path, method)
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
for q in self.view.query_params:
|
||||
parameters.append({
|
||||
"name": q.name, "in": "query", "required": q.required,
|
||||
"description": q.description,
|
||||
'schema': {'type': q.qtype, },
|
||||
})
|
||||
|
||||
return parameters
|
||||
|
||||
|
||||
class TreeSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super(TreeSchema, self).get_path_parameters(path, method)
|
||||
|
||||
api_name = path.split('/')[2]
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
parameters.append({
|
||||
"name": 'query', "in": "query", "required": False,
|
||||
"description": 'Query string matched against {} name.'.format(api_name),
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'root', "in": "query", "required": False,
|
||||
"description": 'Return first level children of {obj} with ID [int]. Integer 0 will return root {obj}s.'.format(
|
||||
obj=api_name),
|
||||
'schema': {'type': 'integer', },
|
||||
})
|
||||
parameters.append({
|
||||
"name": 'tree', "in": "query", "required": False,
|
||||
"description": 'Return all self and children of {} with ID [int].'.format(api_name),
|
||||
'schema': {'type': 'integer', },
|
||||
})
|
||||
return parameters
|
||||
|
||||
|
||||
class FilterSchema(AutoSchema):
|
||||
def get_path_parameters(self, path, method):
|
||||
if not is_list_view(path, method, self.view):
|
||||
return super(FilterSchema, self).get_path_parameters(path, method)
|
||||
|
||||
api_name = path.split('/')[2]
|
||||
parameters = super().get_path_parameters(path, method)
|
||||
parameters.append({
|
||||
"name": 'query', "in": "query", "required": False,
|
||||
"description": 'Query string matched against {} name.'.format(api_name),
|
||||
'schema': {'type': 'string', },
|
||||
})
|
||||
return parameters
|
||||
@@ -4,8 +4,7 @@ from decimal import Decimal
|
||||
from gettext import gettext as _
|
||||
from html import escape
|
||||
from smtplib import SMTPException
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from django.forms.models import model_to_dict
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser, Group, User
|
||||
from django.core.cache import caches
|
||||
from django.core.mail import send_mail
|
||||
@@ -14,8 +13,7 @@ from django.http import BadHeaderError
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from django_scopes import scopes_disabled
|
||||
from drf_writable_nested import UniqueFieldsMixin
|
||||
from drf_writable_nested import WritableNestedModelSerializer as WNMS
|
||||
from drf_writable_nested import UniqueFieldsMixin, WritableNestedModelSerializer
|
||||
from oauth2_provider.models import AccessToken
|
||||
from PIL import Image
|
||||
from rest_framework import serializers
|
||||
@@ -38,34 +36,7 @@ from cookbook.models import (Automation, BookmarkletImport, Comment, CookLog, Cu
|
||||
SupermarketCategoryRelation, Sync, SyncLog, Unit, UnitConversion,
|
||||
UserFile, UserPreference, UserSpace, ViewLog, ConnectorConfig)
|
||||
from cookbook.templatetags.custom_tags import markdown
|
||||
from recipes.settings import AWS_ENABLED, MEDIA_URL, EMAIL_HOST
|
||||
|
||||
|
||||
class WritableNestedModelSerializer(WNMS):
|
||||
|
||||
# overload to_internal_value to allow using PK only on nested object
|
||||
def to_internal_value(self, data):
|
||||
# iterate through every field on the posted object
|
||||
for f in list(data):
|
||||
if f not in self.fields:
|
||||
continue
|
||||
elif issubclass(self.fields[f].__class__, serializers.Serializer):
|
||||
# if the field is a serializer and an integer, assume its an ID of an existing object
|
||||
if isinstance(data[f], int):
|
||||
# only retrieve serializer required fields
|
||||
required_fields = ['id'] + [field_name for field_name, field in self.fields[f].__class__().fields.items() if field.required]
|
||||
data[f] = model_to_dict(self.fields[f].Meta.model.objects.get(id=data[f]), fields=required_fields)
|
||||
elif issubclass(self.fields[f].__class__, serializers.ListSerializer):
|
||||
# if the field is a ListSerializer get dict values of PKs provided
|
||||
if any(isinstance(x, int) for x in data[f]):
|
||||
# only retrieve serializer required fields
|
||||
required_fields = ['id'] + [field_name for field_name, field in self.fields[f].child.__class__().fields.items() if field.required]
|
||||
# filter values to integer values
|
||||
pk_data = [x for x in data[f] if isinstance(x, int)]
|
||||
# merge non-pk values with retrieved values
|
||||
data[f] = [x for x in data[f] if not isinstance(x, int)] \
|
||||
+ list(self.fields[f].child.Meta.model.objects.filter(id__in=pk_data).values(*required_fields))
|
||||
return super().to_internal_value(data)
|
||||
from recipes.settings import AWS_ENABLED, MEDIA_URL
|
||||
|
||||
|
||||
class ExtendedRecipeMixin(serializers.ModelSerializer):
|
||||
@@ -76,7 +47,7 @@ class ExtendedRecipeMixin(serializers.ModelSerializer):
|
||||
images = None
|
||||
|
||||
image = serializers.SerializerMethodField('get_image')
|
||||
numrecipe = serializers.IntegerField(source='recipe_count', read_only=True)
|
||||
numrecipe = serializers.ReadOnlyField(source='recipe_count')
|
||||
|
||||
def get_fields(self, *args, **kwargs):
|
||||
fields = super().get_fields(*args, **kwargs)
|
||||
@@ -86,7 +57,8 @@ class ExtendedRecipeMixin(serializers.ModelSerializer):
|
||||
api_serializer = None
|
||||
# extended values are computationally expensive and not needed in normal circumstances
|
||||
try:
|
||||
if str2bool(self.context['request'].query_params.get('extended', False)) and self.__class__ == api_serializer:
|
||||
if str2bool(
|
||||
self.context['request'].query_params.get('extended', False)) and self.__class__ == api_serializer:
|
||||
return fields
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
@@ -120,7 +92,6 @@ class OpenDataModelMixin(serializers.ModelSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
@extend_schema_field(float)
|
||||
class CustomDecimalField(serializers.Field):
|
||||
"""
|
||||
Custom decimal field to normalize useless decimal places
|
||||
@@ -144,7 +115,6 @@ class CustomDecimalField(serializers.Field):
|
||||
raise ValidationError('A valid number is required')
|
||||
|
||||
|
||||
@extend_schema_field(bool)
|
||||
class CustomOnHandField(serializers.Field):
|
||||
def get_attribute(self, instance):
|
||||
return instance
|
||||
@@ -153,12 +123,16 @@ class CustomOnHandField(serializers.Field):
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
return []
|
||||
shared_users = []
|
||||
if c := caches['default'].get(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
if c := caches['default'].get(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', None):
|
||||
shared_users = c
|
||||
else:
|
||||
try:
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [self.context['request'].user.id]
|
||||
caches['default'].set(f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}', shared_users, timeout=5 * 60)
|
||||
shared_users = [x.id for x in list(self.context['request'].user.get_shopping_share())] + [
|
||||
self.context['request'].user.id]
|
||||
caches['default'].set(
|
||||
f'shopping_shared_users_{self.context["request"].space.id}_{self.context["request"].user.id}',
|
||||
shared_users, timeout=5 * 60)
|
||||
# TODO ugly hack that improves API performance significantly, should be done properly
|
||||
except AttributeError: # Anonymous users (using share links) don't have shared users
|
||||
pass
|
||||
@@ -169,39 +143,34 @@ class CustomOnHandField(serializers.Field):
|
||||
|
||||
|
||||
class SpaceFilterSerializer(serializers.ListSerializer):
|
||||
|
||||
def to_representation(self, data):
|
||||
if self.context.get('request', None) is None:
|
||||
return
|
||||
|
||||
if (isinstance(data, QuerySet) and data.query.is_sliced):
|
||||
# if query is sliced it came from api request not nested serializer
|
||||
return super().to_representation(data)
|
||||
|
||||
if self.child.Meta.model == User:
|
||||
# Don't return User details to anonymous users
|
||||
if isinstance(self.context['request'].user, AnonymousUser):
|
||||
data = []
|
||||
else:
|
||||
data = data.filter(userspace__space=self.context['request'].user.get_active_space()).all()
|
||||
elif isinstance(data, list):
|
||||
data = [d for d in data if getattr(d, self.child.Meta.model.get_space_key()[0]) == self.context['request'].space]
|
||||
else:
|
||||
data = data.filter(**{'__'.join(self.child.Meta.model.get_space_key()): self.context['request'].space})
|
||||
data = data.filter(**{'__'.join(data.model.get_space_key()): self.context['request'].space})
|
||||
return super().to_representation(data)
|
||||
|
||||
|
||||
class UserSerializer(WritableNestedModelSerializer):
|
||||
display_name = serializers.SerializerMethodField('get_user_label')
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_user_label(self, obj):
|
||||
return obj.get_user_display_name()
|
||||
|
||||
class Meta:
|
||||
list_serializer_class = SpaceFilterSerializer
|
||||
model = User
|
||||
fields = ('id', 'username', 'first_name', 'last_name', 'display_name', 'is_staff', 'is_superuser', 'is_active')
|
||||
read_only_fields = ('id', 'username', 'display_name', 'is_staff', 'is_superuser', 'is_active')
|
||||
fields = ('id', 'username', 'first_name', 'last_name', 'display_name')
|
||||
read_only_fields = ('username',)
|
||||
|
||||
|
||||
class GroupSerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
@@ -214,7 +183,6 @@ class GroupSerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = ('id', 'name')
|
||||
read_only_fields = ('id', 'name')
|
||||
|
||||
|
||||
class FoodInheritFieldSerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
@@ -234,16 +202,13 @@ class FoodInheritFieldSerializer(UniqueFieldsMixin, WritableNestedModelSerialize
|
||||
|
||||
|
||||
class UserFileSerializer(serializers.ModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
file = serializers.FileField(write_only=True, required=False)
|
||||
file = serializers.FileField(write_only=True)
|
||||
file_download = serializers.SerializerMethodField('get_download_link')
|
||||
preview = serializers.SerializerMethodField('get_preview_link')
|
||||
|
||||
@extend_schema_field(serializers.CharField(read_only=True))
|
||||
def get_download_link(self, obj):
|
||||
return self.context['request'].build_absolute_uri(reverse('api_download_file', args={obj.pk}))
|
||||
|
||||
@extend_schema_field(serializers.CharField(read_only=True))
|
||||
def get_preview_link(self, obj):
|
||||
try:
|
||||
Image.open(obj.file.file.file)
|
||||
@@ -285,21 +250,18 @@ class UserFileSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = UserFile
|
||||
fields = ('id', 'name', 'file', 'file_download', 'preview', 'file_size_kb', 'created_by', 'created_at')
|
||||
read_only_fields = ('id', 'file_download', 'preview', 'file_size_kb', 'created_by', 'created_at')
|
||||
fields = ('id', 'name', 'file', 'file_download', 'preview', 'file_size_kb')
|
||||
read_only_fields = ('id', 'file_size_kb')
|
||||
extra_kwargs = {"file": {"required": False, }}
|
||||
|
||||
|
||||
class UserFileViewSerializer(serializers.ModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
file_download = serializers.SerializerMethodField('get_download_link')
|
||||
preview = serializers.SerializerMethodField('get_preview_link')
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_download_link(self, obj):
|
||||
return self.context['request'].build_absolute_uri(reverse('api_download_file', args={obj.pk}))
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_preview_link(self, obj):
|
||||
try:
|
||||
Image.open(obj.file.file.file)
|
||||
@@ -316,12 +278,11 @@ class UserFileViewSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = UserFile
|
||||
fields = ('id', 'name', 'file_download', 'preview', 'file_size_kb', 'created_by', 'created_at')
|
||||
read_only_fields = ('id', 'file', 'file_download', 'file_size_kb', 'preview', 'created_by', 'created_at')
|
||||
fields = ('id', 'name', 'file_download', 'preview')
|
||||
read_only_fields = ('id', 'file')
|
||||
|
||||
|
||||
class SpaceSerializer(WritableNestedModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
user_count = serializers.SerializerMethodField('get_user_count')
|
||||
recipe_count = serializers.SerializerMethodField('get_recipe_count')
|
||||
file_size_mb = serializers.SerializerMethodField('get_file_size_mb')
|
||||
@@ -337,15 +298,12 @@ class SpaceSerializer(WritableNestedModelSerializer):
|
||||
logo_color_512 = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
logo_color_svg = UserFileViewSerializer(required=False, many=False, allow_null=True)
|
||||
|
||||
@extend_schema_field(int)
|
||||
def get_user_count(self, obj):
|
||||
return UserSpace.objects.filter(space=obj).count()
|
||||
|
||||
@extend_schema_field(int)
|
||||
def get_recipe_count(self, obj):
|
||||
return Recipe.objects.filter(space=obj).count()
|
||||
|
||||
@extend_schema_field(float)
|
||||
def get_file_size_mb(self, obj):
|
||||
try:
|
||||
return UserFile.objects.filter(space=obj).aggregate(Sum('file_size_kb'))['file_size_kb__sum'] / 1000
|
||||
@@ -408,18 +366,15 @@ class MealTypeSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
|
||||
|
||||
class UserPreferenceSerializer(WritableNestedModelSerializer):
|
||||
user = UserSerializer(read_only=True)
|
||||
food_inherit_default = serializers.SerializerMethodField('get_food_inherit_defaults')
|
||||
plan_share = UserSerializer(many=True, allow_null=True, required=False)
|
||||
shopping_share = UserSerializer(many=True, allow_null=True, required=False)
|
||||
food_children_exist = serializers.SerializerMethodField('get_food_children_exist')
|
||||
image = UserFileViewSerializer(required=False, allow_null=True, many=False)
|
||||
|
||||
@extend_schema_field(FoodInheritFieldSerializer)
|
||||
def get_food_inherit_defaults(self, obj):
|
||||
return FoodInheritFieldSerializer(obj.user.get_active_space().food_inherit.all(), many=True).data
|
||||
|
||||
@extend_schema_field(bool)
|
||||
def get_food_children_exist(self, obj):
|
||||
space = getattr(self.context.get('request', None), 'space', None)
|
||||
return Food.objects.filter(depth__gt=0, space=space).exists()
|
||||
@@ -444,7 +399,6 @@ class UserPreferenceSerializer(WritableNestedModelSerializer):
|
||||
'filter_to_supermarket', 'shopping_add_onhand', 'left_handed', 'show_step_ingredients',
|
||||
'food_children_exist'
|
||||
)
|
||||
read_only_fields = ('user',)
|
||||
|
||||
|
||||
class StorageSerializer(SpacedModelSerializer):
|
||||
@@ -507,24 +461,22 @@ class SyncLogSerializer(SpacedModelSerializer):
|
||||
class KeywordLabelSerializer(serializers.ModelSerializer):
|
||||
label = serializers.SerializerMethodField('get_label')
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_label(self, obj):
|
||||
return str(obj)
|
||||
|
||||
class Meta:
|
||||
list_serializer_class = SpaceFilterSerializer
|
||||
model = Keyword
|
||||
fields = ('id', 'label')
|
||||
fields = (
|
||||
'id', 'label',
|
||||
)
|
||||
read_only_fields = ('id', 'label')
|
||||
|
||||
|
||||
class KeywordSerializer(UniqueFieldsMixin, ExtendedRecipeMixin):
|
||||
label = serializers.SerializerMethodField('get_label', allow_null=False)
|
||||
parent = IntegerField(read_only=True)
|
||||
|
||||
label = serializers.SerializerMethodField('get_label')
|
||||
recipe_filter = 'keywords'
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_label(self, obj):
|
||||
return str(obj)
|
||||
|
||||
@@ -648,9 +600,8 @@ class PropertySerializer(UniqueFieldsMixin, WritableNestedModelSerializer):
|
||||
class RecipeSimpleSerializer(WritableNestedModelSerializer):
|
||||
url = serializers.SerializerMethodField('get_url')
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_url(self, obj):
|
||||
return f'recipe/{obj.pk}'
|
||||
return reverse('view_recipe', args=[obj.id])
|
||||
|
||||
def create(self, validated_data):
|
||||
# don't allow writing to Recipe via this API
|
||||
@@ -658,28 +609,13 @@ class RecipeSimpleSerializer(WritableNestedModelSerializer):
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# don't allow writing to Recipe via this API
|
||||
return instance
|
||||
return Recipe.objects.get(**validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ('id', 'name', 'url')
|
||||
|
||||
|
||||
class RecipeFlatSerializer(WritableNestedModelSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
# don't allow writing to Recipe via this API
|
||||
return Recipe.objects.get(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# don't allow writing to Recipe via this API
|
||||
return Recipe.objects.get(**validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ('id', 'name', 'image')
|
||||
|
||||
|
||||
class FoodSimpleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Food
|
||||
@@ -689,13 +625,12 @@ class FoodSimpleSerializer(serializers.ModelSerializer):
|
||||
class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedRecipeMixin, OpenDataModelMixin):
|
||||
supermarket_category = SupermarketCategorySerializer(allow_null=True, required=False)
|
||||
recipe = RecipeSimpleSerializer(allow_null=True, required=False)
|
||||
shopping = serializers.CharField(source='shopping_status', read_only=True)
|
||||
shopping = serializers.ReadOnlyField(source='shopping_status')
|
||||
inherit_fields = FoodInheritFieldSerializer(many=True, allow_null=True, required=False)
|
||||
child_inherit_fields = FoodInheritFieldSerializer(many=True, allow_null=True, required=False)
|
||||
food_onhand = CustomOnHandField(required=False, allow_null=True)
|
||||
substitute_onhand = serializers.SerializerMethodField('get_substitute_onhand')
|
||||
substitute = FoodSimpleSerializer(many=True, allow_null=True, required=False)
|
||||
parent = IntegerField(read_only=True)
|
||||
|
||||
properties = PropertySerializer(many=True, allow_null=True, required=False)
|
||||
properties_food_unit = UnitSerializer(allow_null=True, required=False)
|
||||
@@ -704,7 +639,6 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
|
||||
recipe_filter = 'steps__ingredients__food'
|
||||
images = ['recipe__image']
|
||||
|
||||
@extend_schema_field(bool)
|
||||
def get_substitute_onhand(self, obj):
|
||||
if not self.context["request"].user.is_authenticated:
|
||||
return []
|
||||
@@ -802,9 +736,12 @@ class FoodSerializer(UniqueFieldsMixin, WritableNestedModelSerializer, ExtendedR
|
||||
class Meta:
|
||||
model = Food
|
||||
fields = (
|
||||
'id', 'name', 'plural_name', 'description', 'shopping', 'recipe', 'url', 'properties', 'properties_food_amount', 'properties_food_unit', 'fdc_id',
|
||||
'food_onhand', 'supermarket_category', 'image', 'parent', 'numchild', 'numrecipe', 'inherit_fields', 'full_name', 'ignore_shopping',
|
||||
'substitute', 'substitute_siblings', 'substitute_children', 'substitute_onhand', 'child_inherit_fields', 'open_data_slug',
|
||||
'id', 'name', 'plural_name', 'description', 'shopping', 'recipe', 'url',
|
||||
'properties', 'properties_food_amount', 'properties_food_unit', 'fdc_id',
|
||||
'food_onhand', 'supermarket_category',
|
||||
'image', 'parent', 'numchild', 'numrecipe', 'inherit_fields', 'full_name', 'ignore_shopping',
|
||||
'substitute', 'substitute_siblings', 'substitute_children', 'substitute_onhand', 'child_inherit_fields',
|
||||
'open_data_slug',
|
||||
)
|
||||
read_only_fields = ('id', 'numchild', 'parent', 'image', 'numrecipe')
|
||||
|
||||
@@ -815,9 +752,7 @@ class IngredientSimpleSerializer(WritableNestedModelSerializer):
|
||||
used_in_recipes = serializers.SerializerMethodField('get_used_in_recipes')
|
||||
amount = CustomDecimalField()
|
||||
conversions = serializers.SerializerMethodField('get_conversions')
|
||||
checked = serializers.BooleanField(read_only=True, default=False, help_text='Just laziness to have a checked field on the frontend API client')
|
||||
|
||||
@extend_schema_field(list)
|
||||
def get_used_in_recipes(self, obj):
|
||||
used_in = []
|
||||
for s in obj.step_set.all():
|
||||
@@ -825,7 +760,6 @@ class IngredientSimpleSerializer(WritableNestedModelSerializer):
|
||||
used_in.append({'id': r.id, 'name': r.name})
|
||||
return used_in
|
||||
|
||||
@extend_schema_field(list)
|
||||
def get_conversions(self, obj):
|
||||
if obj.unit and obj.food:
|
||||
uch = UnitConversionHelper(self.context['request'].space)
|
||||
@@ -870,16 +804,12 @@ class StepSerializer(WritableNestedModelSerializer, ExtendedRecipeMixin):
|
||||
validated_data['space'] = self.context['request'].space
|
||||
return super().create(validated_data)
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_instructions_markdown(self, obj):
|
||||
return obj.get_instruction_render()
|
||||
|
||||
@extend_schema_field(serializers.ListField)
|
||||
def get_step_recipes(self, obj):
|
||||
return list(obj.recipe_set.values_list('id', flat=True).all())
|
||||
|
||||
# couldn't set proper serializer StepRecipeSerializer because of circular reference
|
||||
@extend_schema_field(serializers.JSONField)
|
||||
def get_step_recipe_data(self, obj):
|
||||
# check if root type is recipe to prevent infinite recursion
|
||||
# can be improved later to allow multi level embedding
|
||||
@@ -889,7 +819,8 @@ class StepSerializer(WritableNestedModelSerializer, ExtendedRecipeMixin):
|
||||
class Meta:
|
||||
model = Step
|
||||
fields = (
|
||||
'id', 'name', 'instruction', 'ingredients', 'instructions_markdown', 'time', 'order', 'show_as_header', 'file', 'step_recipe',
|
||||
'id', 'name', 'instruction', 'ingredients', 'instructions_markdown',
|
||||
'time', 'order', 'show_as_header', 'file', 'step_recipe',
|
||||
'step_recipe_data', 'numrecipe', 'show_ingredients_table'
|
||||
)
|
||||
|
||||
@@ -899,7 +830,9 @@ class StepRecipeSerializer(WritableNestedModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = ('id', 'name', 'steps')
|
||||
fields = (
|
||||
'id', 'name', 'steps',
|
||||
)
|
||||
|
||||
|
||||
class UnitConversionSerializer(WritableNestedModelSerializer, OpenDataModelMixin):
|
||||
@@ -910,7 +843,6 @@ class UnitConversionSerializer(WritableNestedModelSerializer, OpenDataModelMixin
|
||||
base_amount = CustomDecimalField()
|
||||
converted_amount = CustomDecimalField()
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_conversion_name(self, obj):
|
||||
text = f'{round(obj.base_amount)} {obj.base_unit} '
|
||||
if obj.food:
|
||||
@@ -952,7 +884,6 @@ class NutritionInformationSerializer(serializers.ModelSerializer):
|
||||
|
||||
class RecipeBaseSerializer(WritableNestedModelSerializer):
|
||||
# TODO make days of new recipe a setting
|
||||
@extend_schema_field(bool)
|
||||
def is_recipe_new(self, obj):
|
||||
if getattr(obj, 'new_recipe', None) or obj.created_at > (timezone.now() - timedelta(days=7)):
|
||||
return True
|
||||
@@ -968,12 +899,12 @@ class CommentSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
keywords = KeywordLabelSerializer(many=True, read_only=True)
|
||||
new = serializers.SerializerMethodField('is_recipe_new', read_only=True)
|
||||
recent = serializers.CharField(read_only=True)
|
||||
rating = CustomDecimalField(required=False, allow_null=True, read_only=True)
|
||||
last_cooked = serializers.DateTimeField(required=False, allow_null=True, read_only=True)
|
||||
created_by = UserSerializer(read_only=True)
|
||||
keywords = KeywordLabelSerializer(many=True)
|
||||
new = serializers.SerializerMethodField('is_recipe_new')
|
||||
recent = serializers.ReadOnlyField()
|
||||
|
||||
rating = CustomDecimalField(required=False, allow_null=True)
|
||||
last_cooked = serializers.DateTimeField(required=False, allow_null=True)
|
||||
|
||||
def create(self, validated_data):
|
||||
pass
|
||||
@@ -988,14 +919,7 @@ class RecipeOverviewSerializer(RecipeBaseSerializer):
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent'
|
||||
)
|
||||
# TODO having these readonly fields makes "RecipeOverview.ts" (API Client) not generate the RecipeOverviewToJSON second else block which leads to errors when using the api
|
||||
# TODO find a solution (custom schema?) to have these fields readonly (to save performance) and generate a proper client (two serializers would probably do the trick)
|
||||
# read_only_fields = ['id', 'name', 'description', 'image', 'keywords', 'working_time',
|
||||
# 'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
# 'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent']
|
||||
read_only_fields = ['image', 'keywords', 'working_time',
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at',
|
||||
'internal', 'servings', 'servings_text', 'rating', 'last_cooked', 'new', 'recent']
|
||||
read_only_fields = ['image', 'created_by', 'created_at']
|
||||
|
||||
|
||||
class RecipeSerializer(RecipeBaseSerializer):
|
||||
@@ -1007,9 +931,7 @@ class RecipeSerializer(RecipeBaseSerializer):
|
||||
rating = CustomDecimalField(required=False, allow_null=True, read_only=True)
|
||||
last_cooked = serializers.DateTimeField(required=False, allow_null=True, read_only=True)
|
||||
food_properties = serializers.SerializerMethodField('get_food_properties')
|
||||
created_by = UserSerializer(read_only=True)
|
||||
|
||||
@extend_schema_field(serializers.JSONField)
|
||||
def get_food_properties(self, obj):
|
||||
fph = FoodPropertyHelper(obj.space) # initialize with object space since recipes might be viewed anonymously
|
||||
return fph.calculate_recipe_properties(obj)
|
||||
@@ -1017,9 +939,12 @@ class RecipeSerializer(RecipeBaseSerializer):
|
||||
class Meta:
|
||||
model = Recipe
|
||||
fields = (
|
||||
'id', 'name', 'description', 'image', 'keywords', 'steps', 'working_time', 'waiting_time', 'created_by', 'created_at', 'updated_at', 'source_url',
|
||||
'internal', 'show_ingredient_overview', 'nutrition', 'properties', 'food_properties', 'servings', 'file_path', 'servings_text', 'rating',
|
||||
'last_cooked', 'private', 'shared'
|
||||
'id', 'name', 'description', 'image', 'keywords', 'steps', 'working_time',
|
||||
'waiting_time', 'created_by', 'created_at', 'updated_at', 'source_url',
|
||||
'internal', 'show_ingredient_overview', 'nutrition', 'properties', 'food_properties', 'servings',
|
||||
'file_path', 'servings_text', 'rating',
|
||||
'last_cooked',
|
||||
'private', 'shared',
|
||||
)
|
||||
read_only_fields = ['image', 'created_by', 'created_at', 'food_properties']
|
||||
|
||||
@@ -1042,7 +967,7 @@ class RecipeImageSerializer(WritableNestedModelSerializer):
|
||||
def create(self, validated_data):
|
||||
if 'image' in validated_data and not is_file_type_allowed(validated_data['image'].name, image_only=True):
|
||||
return None
|
||||
return super().create(validated_data)
|
||||
return super().create( validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if 'image' in validated_data and not is_file_type_allowed(validated_data['image'].name, image_only=True):
|
||||
@@ -1074,7 +999,6 @@ class CustomFilterSerializer(SpacedModelSerializer, WritableNestedModelSerialize
|
||||
|
||||
|
||||
class RecipeBookSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
created_by = UserSerializer(read_only=True)
|
||||
shared = UserSerializer(many=True)
|
||||
filter = CustomFilterSerializer(allow_null=True, required=False)
|
||||
|
||||
@@ -1092,11 +1016,9 @@ class RecipeBookEntrySerializer(serializers.ModelSerializer):
|
||||
book_content = serializers.SerializerMethodField(method_name='get_book_content', read_only=True)
|
||||
recipe_content = serializers.SerializerMethodField(method_name='get_recipe_content', read_only=True)
|
||||
|
||||
@extend_schema_field(RecipeBookSerializer)
|
||||
def get_book_content(self, obj):
|
||||
return RecipeBookSerializer(context={'request': self.context['request']}).to_representation(obj.book)
|
||||
|
||||
@extend_schema_field(RecipeOverviewSerializer)
|
||||
def get_recipe_content(self, obj):
|
||||
return RecipeOverviewSerializer(context={'request': self.context['request']}).to_representation(obj.recipe)
|
||||
|
||||
@@ -1116,22 +1038,19 @@ class RecipeBookEntrySerializer(serializers.ModelSerializer):
|
||||
|
||||
class MealPlanSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
recipe = RecipeOverviewSerializer(required=False, allow_null=True)
|
||||
recipe_name = serializers.CharField(source='recipe.name', read_only=True)
|
||||
recipe_name = serializers.ReadOnlyField(source='recipe.name')
|
||||
meal_type = MealTypeSerializer()
|
||||
meal_type_name = serializers.CharField(source='meal_type.name', read_only=True) # TODO deprecate once old meal plan was removed
|
||||
meal_type_name = serializers.ReadOnlyField(source='meal_type.name') # TODO deprecate once old meal plan was removed
|
||||
note_markdown = serializers.SerializerMethodField('get_note_markdown')
|
||||
servings = CustomDecimalField()
|
||||
shared = UserSerializer(many=True, required=False, allow_null=True)
|
||||
shopping = serializers.SerializerMethodField('in_shopping')
|
||||
addshopping = serializers.BooleanField(write_only=True, required=False)
|
||||
|
||||
to_date = serializers.DateTimeField(required=False)
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_note_markdown(self, obj):
|
||||
return markdown(obj.note)
|
||||
|
||||
@extend_schema_field(bool)
|
||||
def in_shopping(self, obj):
|
||||
return ShoppingListRecipe.objects.filter(mealplan=obj.id).exists()
|
||||
|
||||
@@ -1141,31 +1060,18 @@ class MealPlanSerializer(SpacedModelSerializer, WritableNestedModelSerializer):
|
||||
if 'to_date' not in validated_data or validated_data['to_date'] is None:
|
||||
validated_data['to_date'] = validated_data['from_date']
|
||||
|
||||
add_to_shopping = False
|
||||
try:
|
||||
add_to_shopping = validated_data.pop('addshopping', False)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
mealplan = super().create(validated_data)
|
||||
if add_to_shopping and self.context['request'].data.get('recipe', None):
|
||||
if self.context['request'].data.get('addshopping', False) and self.context['request'].data.get('recipe', None):
|
||||
SLR = RecipeShoppingEditor(user=validated_data['created_by'], space=validated_data['space'])
|
||||
SLR.create(mealplan=mealplan, servings=validated_data['servings'])
|
||||
return mealplan
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
if sr := ShoppingListRecipe.objects.filter(mealplan=obj.id).first():
|
||||
SLR = RecipeShoppingEditor(user=obj.created_by, space=obj.space, id=sr.id)
|
||||
SLR.edit(mealplan=obj, servings=validated_data['servings'])
|
||||
|
||||
return super().update(obj, validated_data)
|
||||
|
||||
class Meta:
|
||||
model = MealPlan
|
||||
fields = (
|
||||
'id', 'title', 'recipe', 'servings', 'note', 'note_markdown',
|
||||
'from_date', 'to_date', 'meal_type', 'created_by', 'shared', 'recipe_name',
|
||||
'meal_type_name', 'shopping', 'addshopping'
|
||||
'meal_type_name', 'shopping'
|
||||
)
|
||||
read_only_fields = ('created_by',)
|
||||
|
||||
@@ -1181,17 +1087,33 @@ class AutoMealPlanSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class ShoppingListRecipeSerializer(serializers.ModelSerializer):
|
||||
recipe_data = RecipeOverviewSerializer(source='recipe', read_only=True, required=False)
|
||||
meal_plan_data = MealPlanSerializer(source='mealplan', read_only=True, required=False)
|
||||
name = serializers.SerializerMethodField('get_name') # should this be done at the front end?
|
||||
recipe_name = serializers.ReadOnlyField(source='recipe.name')
|
||||
mealplan_note = serializers.ReadOnlyField(source='mealplan.note')
|
||||
mealplan_from_date = serializers.ReadOnlyField(source='mealplan.from_date')
|
||||
mealplan_type = serializers.ReadOnlyField(source='mealplan.meal_type.name')
|
||||
servings = CustomDecimalField()
|
||||
created_by = UserSerializer(read_only=True)
|
||||
|
||||
def get_name(self, obj):
|
||||
if not isinstance(value := obj.servings, Decimal):
|
||||
value = Decimal(value)
|
||||
value = value.quantize(
|
||||
Decimal(1)) if value == value.to_integral() else value.normalize() # strips trailing zero
|
||||
return (
|
||||
obj.name
|
||||
or getattr(obj.mealplan, 'title', None)
|
||||
or (d := getattr(obj.mealplan, 'date', None)) and ': '.join([obj.mealplan.recipe.name, str(d)])
|
||||
or obj.recipe.name
|
||||
) + f' ({value:.2g})'
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['space'] = self.context['request'].space
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# TODO remove once old shopping list
|
||||
if 'servings' in validated_data and self.context.get('view', None).__class__.__name__ != 'ShoppingListViewSet':
|
||||
SLR = RecipeShoppingEditor(user=self.context['request'].user, space=self.context['request'].space)
|
||||
SLR.edit_servings(servings=validated_data['servings'], id=instance.id)
|
||||
@@ -1199,19 +1121,18 @@ class ShoppingListRecipeSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = ShoppingListRecipe
|
||||
fields = ('id', 'name', 'recipe', 'recipe_data', 'mealplan', 'meal_plan_data', 'servings', 'created_by',)
|
||||
read_only_fields = ('id', 'created_by',)
|
||||
fields = ('id', 'recipe_name', 'name', 'recipe', 'mealplan', 'servings', 'mealplan_note', 'mealplan_from_date',
|
||||
'mealplan_type', 'created_by')
|
||||
read_only_fields = ('id', 'created_by',)
|
||||
|
||||
|
||||
class ShoppingListEntrySerializer(WritableNestedModelSerializer):
|
||||
food = FoodSerializer(allow_null=True)
|
||||
unit = UnitSerializer(allow_null=True, required=False)
|
||||
list_recipe_data = ShoppingListRecipeSerializer(source='list_recipe', read_only=True)
|
||||
recipe_mealplan = ShoppingListRecipeSerializer(source='list_recipe', read_only=True)
|
||||
amount = CustomDecimalField()
|
||||
created_by = UserSerializer(read_only=True)
|
||||
completed_at = serializers.DateTimeField(allow_null=True, required=False)
|
||||
mealplan_id = serializers.IntegerField(required=False, write_only=True,
|
||||
help_text='If a mealplan id is given try to find existing or create new ShoppingListRecipe with that meal plan and link entry to it')
|
||||
|
||||
def get_fields(self, *args, **kwargs):
|
||||
fields = super().get_fields(*args, **kwargs)
|
||||
@@ -1245,56 +1166,31 @@ class ShoppingListEntrySerializer(WritableNestedModelSerializer):
|
||||
def create(self, validated_data):
|
||||
validated_data['space'] = self.context['request'].space
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
|
||||
if 'mealplan_id' in validated_data:
|
||||
if existing_slr := ShoppingListRecipe.objects.filter(mealplan_id=validated_data['mealplan_id'], space=self.context['request'].space).first():
|
||||
validated_data['list_recipe'] = existing_slr
|
||||
else:
|
||||
validated_data['list_recipe'] = ShoppingListRecipe.objects.create(mealplan_id=validated_data['mealplan_id'], space=self.context['request'].space,
|
||||
created_by=self.context['request'].user)
|
||||
del validated_data['mealplan_id']
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
user = self.context['request'].user
|
||||
|
||||
if 'mealplan_id' in validated_data:
|
||||
del validated_data['mealplan_id']
|
||||
|
||||
# update the onhand for food if shopping_add_onhand is True
|
||||
if user.userpreference.shopping_add_onhand:
|
||||
if checked := validated_data.get('checked', None):
|
||||
validated_data['completed_at'] = timezone.now()
|
||||
instance.food.onhand_users.add(*user.userpreference.shopping_share.all(), user)
|
||||
elif not checked:
|
||||
elif checked == False:
|
||||
instance.food.onhand_users.remove(*user.userpreference.shopping_share.all(), user)
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
class Meta:
|
||||
model = ShoppingListEntry
|
||||
fields = (
|
||||
'id', 'list_recipe', 'food', 'unit', 'amount', 'order', 'checked', 'ingredient',
|
||||
'list_recipe_data', 'created_by', 'created_at', 'updated_at', 'completed_at', 'delay_until', 'mealplan_id'
|
||||
'id', 'list_recipe', 'food', 'unit', 'amount', 'order', 'checked',
|
||||
'recipe_mealplan',
|
||||
'created_by', 'created_at', 'updated_at', 'completed_at', 'delay_until'
|
||||
)
|
||||
read_only_fields = ('id', 'created_by', 'created_at')
|
||||
|
||||
|
||||
class ShoppingListEntrySimpleCreateSerializer(serializers.Serializer):
|
||||
amount = CustomDecimalField()
|
||||
unit_id = serializers.IntegerField(allow_null=True)
|
||||
food_id = serializers.IntegerField(allow_null=True)
|
||||
ingredient_id = serializers.IntegerField(allow_null=True)
|
||||
|
||||
|
||||
class ShoppingListEntryBulkCreateSerializer(serializers.Serializer):
|
||||
entries = serializers.ListField(child=ShoppingListEntrySimpleCreateSerializer())
|
||||
read_only_fields = ('id', 'created_by', 'created_at', 'updated_at',)
|
||||
|
||||
|
||||
class ShoppingListEntryBulkSerializer(serializers.Serializer):
|
||||
ids = serializers.ListField()
|
||||
checked = serializers.BooleanField()
|
||||
timestamp = serializers.DateTimeField(read_only=True, required=False)
|
||||
|
||||
|
||||
# TODO deprecate
|
||||
@@ -1328,13 +1224,7 @@ class ViewLogSerializer(serializers.ModelSerializer):
|
||||
def create(self, validated_data):
|
||||
validated_data['created_by'] = self.context['request'].user
|
||||
validated_data['space'] = self.context['request'].space
|
||||
|
||||
view_log = ViewLog.objects.filter(recipe=validated_data['recipe'], created_by=self.context['request'].user, created_at__gt=(timezone.now() - timezone.timedelta(minutes=5)),
|
||||
space=self.context['request'].space).first()
|
||||
if not view_log:
|
||||
view_log = ViewLog.objects.create(recipe=validated_data['recipe'], created_by=self.context['request'].user, space=self.context['request'].space)
|
||||
|
||||
return view_log
|
||||
return super().create(validated_data)
|
||||
|
||||
class Meta:
|
||||
model = ViewLog
|
||||
@@ -1395,7 +1285,7 @@ class InviteLinkSerializer(WritableNestedModelSerializer):
|
||||
validated_data['space'] = self.context['request'].space
|
||||
obj = super().create(validated_data)
|
||||
|
||||
if obj.email and EMAIL_HOST != '':
|
||||
if obj.email:
|
||||
try:
|
||||
if InviteLink.objects.filter(space=self.context['request'].space,
|
||||
created_at__gte=datetime.now() - timedelta(hours=4)).count() < 20:
|
||||
@@ -1463,13 +1353,9 @@ class AccessTokenSerializer(serializers.ModelSerializer):
|
||||
validated_data['user'] = self.context['request'].user
|
||||
return super().create(validated_data)
|
||||
|
||||
@extend_schema_field(str)
|
||||
def get_token(self, obj):
|
||||
if (timezone.now() - obj.created).seconds < 15:
|
||||
return obj.token
|
||||
if obj.scope == 'bookmarklet':
|
||||
# bookmarklet only tokens are always returned because they have very limited access and are needed for the bookmarklet function to work
|
||||
return obj.token
|
||||
return f'tda_************_******_***********{obj.token[len(obj.token) - 4:]}'
|
||||
|
||||
class Meta:
|
||||
@@ -1478,45 +1364,6 @@ class AccessTokenSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = ('id', 'token',)
|
||||
|
||||
|
||||
class LocalizationSerializer(serializers.Serializer):
|
||||
code = serializers.CharField(max_length=8, read_only=True)
|
||||
language = serializers.CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
fields = '__ALL__'
|
||||
|
||||
|
||||
class ServerSettingsSerializer(serializers.Serializer):
|
||||
# TODO add all other relevant settings including path/url related ones?
|
||||
shopping_min_autosync_interval = serializers.CharField()
|
||||
enable_pdf_export = serializers.BooleanField()
|
||||
enable_ai_import = serializers.BooleanField()
|
||||
disable_external_connectors = serializers.BooleanField()
|
||||
terms_url = serializers.CharField()
|
||||
privacy_url = serializers.CharField()
|
||||
imprint_url = serializers.CharField()
|
||||
hosted = serializers.BooleanField()
|
||||
debug = serializers.BooleanField()
|
||||
version = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
fields = '__ALL__'
|
||||
read_only_fields = '__ALL__'
|
||||
|
||||
|
||||
class FdcQueryFoodsSerializer(serializers.Serializer):
|
||||
fdcId = serializers.IntegerField()
|
||||
description = serializers.CharField()
|
||||
dataType = serializers.CharField()
|
||||
|
||||
|
||||
class FdcQuerySerializer(serializers.Serializer):
|
||||
totalHits = serializers.IntegerField()
|
||||
currentPage = serializers.IntegerField()
|
||||
totalPages = serializers.IntegerField()
|
||||
foods = FdcQueryFoodsSerializer(many=True)
|
||||
|
||||
|
||||
# Export/Import Serializers
|
||||
|
||||
class KeywordExportSerializer(KeywordSerializer):
|
||||
@@ -1599,8 +1446,8 @@ class RecipeExportSerializer(WritableNestedModelSerializer):
|
||||
class RecipeShoppingUpdateSerializer(serializers.ModelSerializer):
|
||||
list_recipe = serializers.IntegerField(write_only=True, allow_null=True, required=False,
|
||||
help_text=_("Existing shopping list to update"))
|
||||
ingredients = serializers.ListField(child=serializers.IntegerField(write_only=True, allow_null=True, required=False, help_text=_(
|
||||
"List of ingredient IDs from the recipe to add, if not provided all ingredients will be added.")))
|
||||
ingredients = serializers.IntegerField(write_only=True, allow_null=True, required=False, help_text=_(
|
||||
"List of ingredient IDs from the recipe to add, if not provided all ingredients will be added."))
|
||||
servings = serializers.IntegerField(default=1, write_only=True, allow_null=True, required=False, help_text=_(
|
||||
"Providing a list_recipe ID and servings of 0 will delete that shopping list."))
|
||||
|
||||
@@ -1628,139 +1475,3 @@ class RecipeFromSourceSerializer(serializers.Serializer):
|
||||
url = serializers.CharField(max_length=4096, required=False, allow_null=True, allow_blank=True)
|
||||
data = serializers.CharField(required=False, allow_null=True, allow_blank=True)
|
||||
bookmarklet = serializers.IntegerField(required=False, allow_null=True, )
|
||||
|
||||
|
||||
class SourceImportFoodSerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
|
||||
|
||||
class SourceImportUnitSerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
|
||||
|
||||
class SourceImportIngredientSerializer(serializers.Serializer):
|
||||
amount = serializers.FloatField()
|
||||
food = SourceImportFoodSerializer()
|
||||
unit = SourceImportUnitSerializer()
|
||||
note = serializers.CharField(required=False)
|
||||
original_text = serializers.CharField()
|
||||
|
||||
|
||||
class SourceImportStepSerializer(serializers.Serializer):
|
||||
instruction = serializers.CharField()
|
||||
ingredients = SourceImportIngredientSerializer(many=True)
|
||||
show_ingredients_table = serializers.BooleanField(default=True)
|
||||
|
||||
|
||||
class SourceImportKeywordSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField(allow_null=True)
|
||||
label = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
import_keyword = serializers.BooleanField(default=True)
|
||||
|
||||
|
||||
class SourceImportPropertyTypeSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
name = serializers.CharField()
|
||||
|
||||
|
||||
class SourceImportPropertySerializer(serializers.Serializer):
|
||||
property_type = SourceImportPropertyTypeSerializer(many=False)
|
||||
property_amount = serializers.FloatField()
|
||||
|
||||
|
||||
class SourceImportRecipeSerializer(serializers.Serializer):
|
||||
steps = SourceImportStepSerializer(many=True)
|
||||
internal = serializers.BooleanField(default=True)
|
||||
source_url = serializers.URLField()
|
||||
name = serializers.CharField()
|
||||
description = serializers.CharField(default=None)
|
||||
servings = serializers.IntegerField(default=1)
|
||||
servings_text = serializers.CharField(default='')
|
||||
working_time = serializers.IntegerField(default=0)
|
||||
waiting_time = serializers.IntegerField(default=0)
|
||||
image_url = serializers.URLField(default=None)
|
||||
keywords = SourceImportKeywordSerializer(many=True, default=[])
|
||||
|
||||
properties = serializers.ListField(child=SourceImportPropertySerializer(), default=[])
|
||||
|
||||
|
||||
class SourceImportDuplicateSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
name = serializers.CharField()
|
||||
|
||||
|
||||
class RecipeFromSourceResponseSerializer(serializers.Serializer):
|
||||
recipe = SourceImportRecipeSerializer(default=None)
|
||||
recipe_id = serializers.IntegerField(default=None)
|
||||
images = serializers.ListField(child=serializers.CharField(), default=[], allow_null=False)
|
||||
error = serializers.BooleanField(default=False)
|
||||
msg = serializers.CharField(max_length=1024, default='')
|
||||
duplicates = serializers.ListField(child=SourceImportDuplicateSerializer(), default=[], allow_null=False)
|
||||
|
||||
|
||||
class AiImportSerializer(serializers.Serializer):
|
||||
file = serializers.FileField(allow_null=True)
|
||||
text = serializers.CharField(allow_null=True, allow_blank=True)
|
||||
|
||||
|
||||
class ExportRequestSerializer(serializers.Serializer):
|
||||
type = serializers.CharField()
|
||||
all = serializers.BooleanField(default=False)
|
||||
recipes = RecipeFlatSerializer(many=True, default=[])
|
||||
custom_filter = CustomFilterSerializer(many=False, default=None, allow_null=True)
|
||||
|
||||
class ImportOpenDataSerializer(serializers.Serializer):
|
||||
selected_version = serializers.CharField()
|
||||
selected_datatypes = serializers.ListField(child=serializers.CharField())
|
||||
update_existing = serializers.BooleanField(default=True)
|
||||
use_metric = serializers.BooleanField(default=True)
|
||||
|
||||
|
||||
class ImportOpenDataResponseDetailSerializer(serializers.Serializer):
|
||||
total_created = serializers.IntegerField(default=0)
|
||||
total_updated = serializers.IntegerField(default=0)
|
||||
total_untouched = serializers.IntegerField(default=0)
|
||||
total_errored = serializers.IntegerField(default=0)
|
||||
|
||||
|
||||
class ImportOpenDataResponseSerializer(serializers.Serializer):
|
||||
food = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
unit = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
category = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
property = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
store = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
conversion = ImportOpenDataResponseDetailSerializer(required=False)
|
||||
|
||||
|
||||
class ImportOpenDataVersionMetaDataSerializer(serializers.Serializer):
|
||||
food = serializers.IntegerField()
|
||||
unit = serializers.IntegerField()
|
||||
category = serializers.IntegerField()
|
||||
property = serializers.IntegerField()
|
||||
store = serializers.IntegerField()
|
||||
conversion = serializers.IntegerField()
|
||||
|
||||
|
||||
class ImportOpenDataMetaDataSerializer(serializers.Serializer):
|
||||
versions = serializers.ListField(child=serializers.CharField())
|
||||
datatypes = serializers.ListField(child=serializers.CharField())
|
||||
|
||||
base = ImportOpenDataVersionMetaDataSerializer()
|
||||
cs = ImportOpenDataVersionMetaDataSerializer()
|
||||
da = ImportOpenDataVersionMetaDataSerializer()
|
||||
de = ImportOpenDataVersionMetaDataSerializer()
|
||||
el = ImportOpenDataVersionMetaDataSerializer()
|
||||
en = ImportOpenDataVersionMetaDataSerializer()
|
||||
es = ImportOpenDataVersionMetaDataSerializer()
|
||||
fr = ImportOpenDataVersionMetaDataSerializer()
|
||||
hu = ImportOpenDataVersionMetaDataSerializer()
|
||||
it = ImportOpenDataVersionMetaDataSerializer()
|
||||
nb_NO = ImportOpenDataVersionMetaDataSerializer()
|
||||
nl = ImportOpenDataVersionMetaDataSerializer()
|
||||
pl = ImportOpenDataVersionMetaDataSerializer()
|
||||
pt = ImportOpenDataVersionMetaDataSerializer()
|
||||
pt_BR = ImportOpenDataVersionMetaDataSerializer()
|
||||
sk = ImportOpenDataVersionMetaDataSerializer()
|
||||
sl = ImportOpenDataVersionMetaDataSerializer()
|
||||
zh_Hans = ImportOpenDataVersionMetaDataSerializer()
|
||||
|
||||
@@ -120,6 +120,37 @@ def update_food_inheritance(sender, instance=None, created=False, **kwargs):
|
||||
child.save()
|
||||
|
||||
|
||||
@receiver(post_save, sender=MealPlan)
|
||||
def auto_add_shopping(sender, instance=None, created=False, weak=False, **kwargs):
|
||||
print("MEAL_AUTO_ADD Signal trying to auto add to shopping")
|
||||
if not instance:
|
||||
print("MEAL_AUTO_ADD Instance is none")
|
||||
return
|
||||
|
||||
try:
|
||||
space = instance.get_space()
|
||||
user = instance.get_owner()
|
||||
with scope(space=space):
|
||||
slr_exists = instance.shoppinglistrecipe_set.exists()
|
||||
|
||||
if not created and slr_exists:
|
||||
for x in instance.shoppinglistrecipe_set.all():
|
||||
# assuming that permissions checks for the MealPlan have happened upstream
|
||||
if instance.servings != x.servings:
|
||||
SLR = RecipeShoppingEditor(id=x.id, user=user, space=instance.space)
|
||||
SLR.edit_servings(servings=instance.servings)
|
||||
elif not user.userpreference.mealplan_autoadd_shopping or not instance.recipe:
|
||||
print("MEAL_AUTO_ADD No recipe or no setting")
|
||||
return
|
||||
|
||||
if created:
|
||||
SLR = RecipeShoppingEditor(user=user, space=space)
|
||||
SLR.create(mealplan=instance, servings=instance.servings)
|
||||
print("MEAL_AUTO_ADD Created SLR")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
@receiver(post_save, sender=Unit)
|
||||
def clear_unit_cache(sender, instance=None, created=False, **kwargs):
|
||||
if instance:
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 26.0.3, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1"
|
||||
id="svg48" inkscape:export-xdpi="48" inkscape:export-ydpi="48" sodipodi:docname="logo_color_shopping.svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:serif="http://www.serif.com/" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 558.1 558.1"
|
||||
style="enable-background:new 0 0 558.1 558.1;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill-rule:evenodd;clip-rule:evenodd;fill:url(#ellipse2_00000062882008473870802360000001235346600214014370_);}
|
||||
.st1{clip-path:url(#SVGID_00000040562990235419179500000014537515251997333940_);}
|
||||
.st2{fill-rule:evenodd;clip-rule:evenodd;fill:#161616;}
|
||||
.st3{fill-rule:evenodd;clip-rule:evenodd;fill:#FFCB76;}
|
||||
.st4{fill-rule:evenodd;clip-rule:evenodd;fill:#FF6F00;}
|
||||
.st5{clip-path:url(#SVGID_00000026884998257896383920000012290328997039565247_);}
|
||||
.st6{fill-rule:evenodd;clip-rule:evenodd;fill:#FFD100;}
|
||||
.st7{fill:#161616;}
|
||||
</style>
|
||||
<sodipodi:namedview bordercolor="#666666" borderopacity="1" gridtolerance="10" guidetolerance="10" id="namedview50" inkscape:current-layer="svg48" inkscape:cx="256" inkscape:cy="256" inkscape:pageopacity="0" inkscape:pageshadow="2" inkscape:window-height="1377" inkscape:window-maximized="1" inkscape:window-width="2560" inkscape:window-x="2552" inkscape:window-y="-8" inkscape:zoom="2.0039062" objecttolerance="10" pagecolor="#ffffff" showgrid="false">
|
||||
</sodipodi:namedview>
|
||||
<g id="Kreis" transform="matrix(0.92371046,0,0,0.95776263,3.7134303,-54.329713)">
|
||||
|
||||
<linearGradient id="ellipse2_00000072976586691886204630000005673338158137684613_" gradientUnits="userSpaceOnUse" x1="-24.1585" y1="348.0664" x2="-23.1585" y2="348.0664" gradientTransform="matrix(2.147900e-06 0 0 -2.227081e-06 4347.1548 66.3621)">
|
||||
<stop offset="0" style="stop-color:#272727"/>
|
||||
<stop offset="1" style="stop-color:#6C6C6C"/>
|
||||
</linearGradient>
|
||||
|
||||
<ellipse id="ellipse2" style="fill-rule:evenodd;clip-rule:evenodd;fill:url(#ellipse2_00000072976586691886204630000005673338158137684613_);" cx="298.1" cy="348.1" rx="302.1" ry="291.3"/>
|
||||
<g>
|
||||
<defs>
|
||||
<circle id="SVGID_1_" cx="298.1" cy="348.1" r="279"/>
|
||||
</defs>
|
||||
<clipPath id="SVGID_00000080899089203538361760000014164288875061347472_">
|
||||
<use xlink:href="#SVGID_1_" style="overflow:visible;"/>
|
||||
</clipPath>
|
||||
<g id="g18" style="clip-path:url(#SVGID_00000080899089203538361760000014164288875061347472_);">
|
||||
<g id="Shadow" transform="matrix(1.10322,0,0,1.064,-5.58287,50.5786)">
|
||||
<path id="path7" class="st2" d="M163.2,477.5l271.2,271.2L759.1,557L416.4,214.2L163.2,477.5z"/>
|
||||
<g id="g11" transform="translate(-4.22105,0.775864)">
|
||||
<path id="path9" class="st2" d="M223.4,188.6L545.8,511l121-106.1L326,64.1l-3.2,78.4L223.4,188.6z"/>
|
||||
</g>
|
||||
<g id="g15" transform="translate(-85.3876,27.8512)">
|
||||
<path id="path13" class="st2" d="M328.5,154.7l322.4,322.4l3.1-71.6L313.3,64.7l-3.6,82.2L328.5,154.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g23" transform="matrix(0.93750213,0,0,0.93750213,15.953517,15.99888)">
|
||||
<path id="path21" class="st3" d="M280.6,238.7c35.1,0,65.8-14.8,85.3-30.1c19.7-15.6,48.3-12.5,64.2,6.9
|
||||
c26.2,31.7,41.8,71.9,41.8,115.6c0,71.6-44.2,159.9-105.6,190.9c-26.4,13.3-53.8,19.6-85.6,19.6l0,0h-0.1h-0.1l0,0
|
||||
c-31.8,0-59.2-6.3-85.6-19.6C133.5,491.1,89.3,402.7,89.3,331.2c0-43.7,15.7-83.9,41.8-115.6c15.9-19.4,44.5-22.5,64.2-6.9
|
||||
C214.8,224,245.5,238.7,280.6,238.7L280.6,238.7z"/>
|
||||
</g>
|
||||
<g id="Flame-2" transform="matrix(0.61547875,0,0,0.56833279,-138.25728,-438.60298)" serif:id="Flame 2">
|
||||
<path id="path25" class="st4" d="M636,823.4c-2.8-4-2.8-9.6-0.1-13.7c2.8-4.1,7.7-5.6,12.1-3.9c22.2,8.9,51.2,22.5,73.8,40.9
|
||||
c46.9,38.3,59.7,63.9,70.2,90.3c12.4,31.2,14.2,63.5,11.6,86c-7.6,64.6-56,117.9-125,117.9c-69,0-123.9-52.8-125-117.9
|
||||
c-0.7-39.2,12.1-70.5,26.1-92.8c3.5-5.6,10-7.8,15.8-5.6c5.8,2.3,9.5,8.6,8.8,15.3c-2,14.1-3.3,28.8-2.7,40.6
|
||||
c2.2,39.8,25.9,50,50.2,49.8c25.9-0.2,52.1-22.2,42.7-78.4C686.3,902.9,656.8,853.5,636,823.4L636,823.4z"/>
|
||||
<g>
|
||||
<defs>
|
||||
<path id="SVGID_00000067227953264718972400000007310393595166440114_" d="M636,823.4c-2.8-4-2.8-9.6-0.1-13.7
|
||||
c2.8-4.1,7.7-5.6,12.1-3.9c22.2,8.9,51.2,22.5,73.8,40.9c46.9,38.3,59.7,63.9,70.2,90.3c12.4,31.2,14.2,63.5,11.6,86
|
||||
c-7.6,64.6-56,117.9-125,117.9c-69,0-123.9-52.8-125-117.9c-0.7-39.2,12.1-70.5,26.1-92.8c3.5-5.6,10-7.8,15.8-5.6
|
||||
c5.8,2.3,9.5,8.6,8.8,15.3c-2,14.1-3.3,28.8-2.7,40.6c2.2,39.8,25.9,50,50.2,49.8c25.9-0.2,52.1-22.2,42.7-78.4
|
||||
C686.3,902.9,656.8,853.5,636,823.4L636,823.4z"/>
|
||||
</defs>
|
||||
<clipPath id="SVGID_00000178886517717031376290000002615817110574070691_">
|
||||
<use xlink:href="#SVGID_00000067227953264718972400000007310393595166440114_" style="overflow:visible;"/>
|
||||
</clipPath>
|
||||
<g id="g34" style="clip-path:url(#SVGID_00000178886517717031376290000002615817110574070691_);">
|
||||
<g id="g32" transform="matrix(1.28784,-0.270602,0.285942,1.59598,247.349,825.209)">
|
||||
<path id="path30" class="st6" d="M279.8,36.7c28.5,13.5,59.3,44.8,67.8,85.1c14.1,67-25.3,85.6-59.1,84
|
||||
c-54.2-2.6-72.4-45.5-36.2-97.1C274.8,76.8,253.9,24.5,279.8,36.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st7" d="M245.7,280.1c6.2,0,11.1,5,11.1,11.1v11.1h44.5v-11.1c0-6.2,5-11.1,11.1-11.1s11.1,5,11.1,11.1v11.1h16.7
|
||||
c9.2,0,16.7,7.5,16.7,16.7v16.7H201.2V319c0-9.2,7.5-16.7,16.7-16.7h16.7v-11.1C234.5,285,239.5,280.1,245.7,280.1z M201.2,346.8
|
||||
h155.8v94.6c0,9.2-7.5,16.7-16.7,16.7H217.8c-9.2,0-16.7-7.5-16.7-16.7V346.8z M223.4,374.6v11.1c0,3.1,2.5,5.6,5.6,5.6h11.1
|
||||
c3.1,0,5.6-2.5,5.6-5.6v-11.1c0-3.1-2.5-5.6-5.6-5.6H229C225.9,369.1,223.4,371.6,223.4,374.6z M267.9,374.6v11.1
|
||||
c0,3.1,2.5,5.6,5.6,5.6h11.1c3.1,0,5.6-2.5,5.6-5.6v-11.1c0-3.1-2.5-5.6-5.6-5.6h-11.1C270.4,369.1,267.9,371.6,267.9,374.6z
|
||||
M318,369.1c-3.1,0-5.6,2.5-5.6,5.6v11.1c0,3.1,2.5,5.6,5.6,5.6h11.1c3.1,0,5.6-2.5,5.6-5.6v-11.1c0-3.1-2.5-5.6-5.6-5.6H318z
|
||||
M223.4,419.1v11.1c0,3.1,2.5,5.6,5.6,5.6h11.1c3.1,0,5.6-2.5,5.6-5.6v-11.1c0-3.1-2.5-5.6-5.6-5.6H229
|
||||
C225.9,413.6,223.4,416.1,223.4,419.1z M273.5,413.6c-3.1,0-5.6,2.5-5.6,5.6v11.1c0,3.1,2.5,5.6,5.6,5.6h11.1c3.1,0,5.6-2.5,5.6-5.6
|
||||
v-11.1c0-3.1-2.5-5.6-5.6-5.6H273.5z M312.4,419.1v11.1c0,3.1,2.5,5.6,5.6,5.6h11.1c3.1,0,5.6-2.5,5.6-5.6v-11.1
|
||||
c0-3.1-2.5-5.6-5.6-5.6H318C314.9,413.6,312.4,416.1,312.4,419.1z"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 6.5 KiB |
|
Before Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 20 KiB |
@@ -1,83 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 26.0.3, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1"
|
||||
id="svg48" inkscape:export-xdpi="48" inkscape:export-ydpi="48" sodipodi:docname="logo_color_shopping.svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:serif="http://www.serif.com/" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 558.1 558.1"
|
||||
style="enable-background:new 0 0 558.1 558.1;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill-rule:evenodd;clip-rule:evenodd;fill:url(#ellipse2_00000123406314579419878720000015292012387505797789_);}
|
||||
.st1{clip-path:url(#SVGID_00000069378195777491616980000011609770225407185072_);}
|
||||
.st2{fill-rule:evenodd;clip-rule:evenodd;fill:#161616;}
|
||||
.st3{fill-rule:evenodd;clip-rule:evenodd;fill:#FFCB76;}
|
||||
.st4{fill-rule:evenodd;clip-rule:evenodd;fill:#FF6F00;}
|
||||
.st5{clip-path:url(#SVGID_00000129168706913539839680000006972943119257724314_);}
|
||||
.st6{fill-rule:evenodd;clip-rule:evenodd;fill:#FFD100;}
|
||||
</style>
|
||||
<sodipodi:namedview bordercolor="#666666" borderopacity="1" gridtolerance="10" guidetolerance="10" id="namedview50" inkscape:current-layer="svg48" inkscape:cx="256" inkscape:cy="256" inkscape:pageopacity="0" inkscape:pageshadow="2" inkscape:window-height="1377" inkscape:window-maximized="1" inkscape:window-width="2560" inkscape:window-x="2552" inkscape:window-y="-8" inkscape:zoom="2.0039062" objecttolerance="10" pagecolor="#ffffff" showgrid="false">
|
||||
</sodipodi:namedview>
|
||||
<g id="Kreis" transform="matrix(0.92371046,0,0,0.95776263,3.7134303,-54.329713)">
|
||||
|
||||
<linearGradient id="ellipse2_00000092432231847401152480000013557045245219773118_" gradientUnits="userSpaceOnUse" x1="-24.1585" y1="348.0664" x2="-23.1585" y2="348.0664" gradientTransform="matrix(2.147900e-06 0 0 -2.227081e-06 4347.1548 66.3621)">
|
||||
<stop offset="0" style="stop-color:#272727"/>
|
||||
<stop offset="1" style="stop-color:#6C6C6C"/>
|
||||
</linearGradient>
|
||||
|
||||
<ellipse id="ellipse2" style="fill-rule:evenodd;clip-rule:evenodd;fill:url(#ellipse2_00000092432231847401152480000013557045245219773118_);" cx="298.1" cy="348.1" rx="302.1" ry="291.3"/>
|
||||
<g>
|
||||
<defs>
|
||||
<circle id="SVGID_1_" cx="298.1" cy="348.1" r="279"/>
|
||||
</defs>
|
||||
<clipPath id="SVGID_00000078750348294658121660000005213362532985289101_">
|
||||
<use xlink:href="#SVGID_1_" style="overflow:visible;"/>
|
||||
</clipPath>
|
||||
<g id="g18" style="clip-path:url(#SVGID_00000078750348294658121660000005213362532985289101_);">
|
||||
<g id="Shadow" transform="matrix(1.10322,0,0,1.064,-5.58287,50.5786)">
|
||||
<path id="path7" class="st2" d="M163.2,477.5l271.2,271.2L759.1,557L416.4,214.2L163.2,477.5z"/>
|
||||
<g id="g11" transform="translate(-4.22105,0.775864)">
|
||||
<path id="path9" class="st2" d="M223.4,188.6L545.8,511l121-106.1L326,64.1l-3.2,78.4L223.4,188.6z"/>
|
||||
</g>
|
||||
<g id="g15" transform="translate(-85.3876,27.8512)">
|
||||
<path id="path13" class="st2" d="M328.5,154.7l322.4,322.4l3.1-71.6L313.3,64.7l-3.6,82.2L328.5,154.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g23" transform="matrix(0.93750213,0,0,0.93750213,15.953517,15.99888)">
|
||||
<path id="path21" class="st3" d="M280.6,238.7c35.1,0,65.8-14.8,85.3-30.1c19.7-15.6,48.3-12.5,64.2,6.9
|
||||
c26.2,31.7,41.8,71.9,41.8,115.6c0,71.6-44.2,159.9-105.6,190.9c-26.4,13.3-53.8,19.6-85.6,19.6l0,0h-0.1h-0.1l0,0
|
||||
c-31.8,0-59.2-6.3-85.6-19.6C133.5,491.1,89.3,402.7,89.3,331.2c0-43.7,15.7-83.9,41.8-115.6c15.9-19.4,44.5-22.5,64.2-6.9
|
||||
C214.8,224,245.5,238.7,280.6,238.7L280.6,238.7z"/>
|
||||
</g>
|
||||
<g id="Flame-2" transform="matrix(0.61547875,0,0,0.56833279,-138.25728,-438.60298)" serif:id="Flame 2">
|
||||
<path id="path25" class="st4" d="M636,823.4c-2.8-4-2.8-9.6-0.1-13.7c2.8-4.1,7.7-5.6,12.1-3.9c22.2,8.9,51.2,22.5,73.8,40.9
|
||||
c46.9,38.3,59.7,63.9,70.2,90.3c12.4,31.2,14.2,63.5,11.6,86c-7.6,64.6-56,117.9-125,117.9c-69,0-123.9-52.8-125-117.9
|
||||
c-0.7-39.2,12.1-70.5,26.1-92.8c3.5-5.6,10-7.8,15.8-5.6c5.8,2.3,9.5,8.6,8.8,15.3c-2,14.1-3.3,28.8-2.7,40.6
|
||||
c2.2,39.8,25.9,50,50.2,49.8c25.9-0.2,52.1-22.2,42.7-78.4C686.3,902.9,656.8,853.5,636,823.4L636,823.4z"/>
|
||||
<g>
|
||||
<defs>
|
||||
<path id="SVGID_00000170974775404888027640000005842834300324528060_" d="M636,823.4c-2.8-4-2.8-9.6-0.1-13.7
|
||||
c2.8-4.1,7.7-5.6,12.1-3.9c22.2,8.9,51.2,22.5,73.8,40.9c46.9,38.3,59.7,63.9,70.2,90.3c12.4,31.2,14.2,63.5,11.6,86
|
||||
c-7.6,64.6-56,117.9-125,117.9c-69,0-123.9-52.8-125-117.9c-0.7-39.2,12.1-70.5,26.1-92.8c3.5-5.6,10-7.8,15.8-5.6
|
||||
c5.8,2.3,9.5,8.6,8.8,15.3c-2,14.1-3.3,28.8-2.7,40.6c2.2,39.8,25.9,50,50.2,49.8c25.9-0.2,52.1-22.2,42.7-78.4
|
||||
C686.3,902.9,656.8,853.5,636,823.4L636,823.4z"/>
|
||||
</defs>
|
||||
<clipPath id="SVGID_00000137101924108689735560000002063526552501109413_">
|
||||
<use xlink:href="#SVGID_00000170974775404888027640000005842834300324528060_" style="overflow:visible;"/>
|
||||
</clipPath>
|
||||
<g id="g34" style="clip-path:url(#SVGID_00000137101924108689735560000002063526552501109413_);">
|
||||
<g id="g32" transform="matrix(1.28784,-0.270602,0.285942,1.59598,247.349,825.209)">
|
||||
<path id="path30" class="st6" d="M279.8,36.7c28.5,13.5,59.3,44.8,67.8,85.1c14.1,67-25.3,85.6-59.1,84
|
||||
c-54.2-2.6-72.4-45.5-36.2-97.1C274.8,76.8,253.9,24.5,279.8,36.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path id="path958" class="st2" d="M182.4,290.6c0-4.5,3.6-8.1,8.1-8.1h15.4c7.4,0,14,4.3,17.1,10.8h139.1c8.9,0,15.4,8.5,13.1,17.1
|
||||
l-13.9,51.5c-2.9,10.6-12.5,18-23.5,18h-97.6l1.8,9.6c0.7,3.8,4.1,6.6,8,6.6h97.6c4.5,0,8.1,3.6,8.1,8.1s-3.6,8.1-8.1,8.1H250
|
||||
c-11.7,0-21.8-8.3-23.9-19.8L208.6,301c-0.2-1.3-1.4-2.2-2.7-2.2h-15.4C186,298.8,182.4,295.1,182.4,290.6L182.4,290.6z
|
||||
M225.7,439.5c0-9,7.3-16.3,16.2-16.3c9,0,16.3,7.3,16.3,16.2c0,0,0,0,0,0c0,9-7.3,16.3-16.2,16.3
|
||||
C233,455.8,225.7,448.5,225.7,439.5C225.7,439.6,225.7,439.5,225.7,439.5z M339.4,423.3c9,0,16.2,7.3,16.3,16.2
|
||||
c0,9-7.3,16.2-16.2,16.3c0,0,0,0,0,0c-9,0-16.2-7.3-16.3-16.2C323.2,430.6,330.4,423.3,339.4,423.3
|
||||
C339.4,423.3,339.4,423.3,339.4,423.3z"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 20 KiB |
@@ -1,177 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
Before Width: | Height: | Size: 415 B After Width: | Height: | Size: 415 B |
|
Before Width: | Height: | Size: 883 B After Width: | Height: | Size: 883 B |
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
|
Before Width: | Height: | Size: 408 B After Width: | Height: | Size: 408 B |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 426 B After Width: | Height: | Size: 426 B |
|
Before Width: | Height: | Size: 158 B After Width: | Height: | Size: 158 B |
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
BIN
cookbook/static/pdfjs/images/findbarButton-next.png
Normal file
|
After Width: | Height: | Size: 193 B |
BIN
cookbook/static/pdfjs/images/findbarButton-next@2x.png
Normal file
|
After Width: | Height: | Size: 296 B |
BIN
cookbook/static/pdfjs/images/findbarButton-previous.png
Normal file
|
After Width: | Height: | Size: 199 B |
BIN
cookbook/static/pdfjs/images/findbarButton-previous@2x.png
Normal file
|
After Width: | Height: | Size: 304 B |
BIN
cookbook/static/pdfjs/images/grab.cur
Normal file
|
After Width: | Height: | Size: 326 B |
BIN
cookbook/static/pdfjs/images/grabbing.cur
Normal file
|
After Width: | Height: | Size: 326 B |
|
Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB |
BIN
cookbook/static/pdfjs/images/loading-small.png
Normal file
|
After Width: | Height: | Size: 7.2 KiB |
BIN
cookbook/static/pdfjs/images/loading-small@2x.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
|
After Width: | Height: | Size: 403 B |
|
After Width: | Height: | Size: 933 B |
|
After Width: | Height: | Size: 179 B |
|
After Width: | Height: | Size: 266 B |
BIN
cookbook/static/pdfjs/images/secondaryToolbarButton-handTool.png
Normal file
|
After Width: | Height: | Size: 301 B |
|
After Width: | Height: | Size: 583 B |
BIN
cookbook/static/pdfjs/images/secondaryToolbarButton-lastPage.png
Normal file
|
After Width: | Height: | Size: 175 B |
|
After Width: | Height: | Size: 276 B |
|
After Width: | Height: | Size: 360 B |
|
After Width: | Height: | Size: 731 B |
BIN
cookbook/static/pdfjs/images/secondaryToolbarButton-rotateCw.png
Normal file
|
After Width: | Height: | Size: 359 B |
|
After Width: | Height: | Size: 714 B |
|
After Width: | Height: | Size: 218 B |
|
After Width: | Height: | Size: 332 B |
|
After Width: | Height: | Size: 228 B |
|
After Width: | Height: | Size: 349 B |
|
After Width: | Height: | Size: 297 B |
|
After Width: | Height: | Size: 490 B |
|
After Width: | Height: | Size: 461 B |
|
After Width: | Height: | Size: 1.0 KiB |
|
After Width: | Height: | Size: 347 B |
|
After Width: | Height: | Size: 694 B |
|
After Width: | Height: | Size: 179 B |
|
After Width: | Height: | Size: 261 B |
|
After Width: | Height: | Size: 344 B |
|
After Width: | Height: | Size: 621 B |
BIN
cookbook/static/pdfjs/images/shadow.png
Normal file
|
After Width: | Height: | Size: 290 B |
BIN
cookbook/static/pdfjs/images/texture.png
Normal file
|
After Width: | Height: | Size: 2.4 KiB |
BIN
cookbook/static/pdfjs/images/toolbarButton-bookmark.png
Normal file
|
After Width: | Height: | Size: 174 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-bookmark@2x.png
Normal file
|
After Width: | Height: | Size: 260 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-download.png
Normal file
|
After Width: | Height: | Size: 259 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-download@2x.png
Normal file
|
After Width: | Height: | Size: 425 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-menuArrows.png
Normal file
|
After Width: | Height: | Size: 107 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-menuArrows@2x.png
Normal file
|
After Width: | Height: | Size: 152 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-openFile.png
Normal file
|
After Width: | Height: | Size: 295 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-openFile@2x.png
Normal file
|
After Width: | Height: | Size: 550 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-pageDown.png
Normal file
|
After Width: | Height: | Size: 238 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-pageDown@2x.png
Normal file
|
After Width: | Height: | Size: 396 B |
BIN
cookbook/static/pdfjs/images/toolbarButton-pageUp.png
Normal file
|
After Width: | Height: | Size: 246 B |