Compare commits

..

1 Commits

Author SHA1 Message Date
vabene1111
bc82049f42 work in progress, playing around 2022-10-02 16:39:56 +02:00
540 changed files with 46729 additions and 98512 deletions

View File

@@ -1,11 +0,0 @@
[run]
omit =
*/apps.py,
*/migrations/*,
*/settings*,
*/test*,
*/tests/*,
*urls.py,
*/wsgi*,
manage.py,
*__init__*

View File

@@ -1,26 +0,0 @@
FROM python:3.10-alpine3.18
#Install all dependencies.
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git yarn
#Print all logs without buffering it.
ENV PYTHONUNBUFFERED 1
#This port will be used by gunicorn.
EXPOSE 8000
#This port will be used by vue
EXPOSE 8080
#Install all python dependencies to the image
COPY requirements.txt /tmp/pip-tmp/
RUN \
if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt && \
rm -rf /tmp/pip-tmp && \
apk --purge del .build-deps

View File

@@ -1,27 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json.
{
"name": "Tandoor Dev Container",
"build": { "context": "..", "dockerfile": "Dockerfile" },
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [8000, 8080],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "pip3 install --user -r requirements.txt"
// Configure tool-specific properties.
"customizations": {
"vscode": {
"extensions": [
"ms-python.debugpy",
"ms-python.python"
]
}
}
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

View File

@@ -3,6 +3,7 @@ npm-debug.log
Dockerfile*
docker-compose*
.dockerignore
.git
.gitignore
README.md
LICENSE

View File

@@ -1,18 +1,167 @@
# ---------------------------------------------------------------------------
# This template contains only required options.
# Visit the docs to find more https://docs.tandoor.dev/system/configuration/
# ---------------------------------------------------------------------------
# only set this to true when testing/debugging
# when unset: 1 (true) - dont unset this, just for development
DEBUG=0
SQL_DEBUG=0
# HTTP port to bind to
# TANDOOR_PORT=8080
# hosts the application can run under e.g. recipes.mydomain.com,cooking.mydomain.com,...
ALLOWED_HOSTS=*
# random secret key, use for example `base64 /dev/urandom | head -c50` to generate one
# ---------------------------- REQUIRED -------------------------
SECRET_KEY=
# ---------------------------------------------------------------
# allowed hosts (see documentation), should be set to your hostname(s) but might be * (default) for some proxies/providers
# ALLOWED_HOSTS=recipes.mydomain.com
# your default timezone See https://timezonedb.com/time-zones for a list of timezones
TIMEZONE=Europe/Berlin
# add only a database password if you want to run with the default postgres, otherwise change settings accordingly
DB_ENGINE=django.db.backends.postgresql
# DB_OPTIONS= {} # e.g. {"sslmode":"require"} to enable ssl
POSTGRES_HOST=db_recipes
POSTGRES_DB=djangodb
POSTGRES_PORT=5432
POSTGRES_USER=djangouser
# ---------------------------- REQUIRED -------------------------
POSTGRES_PASSWORD=
# ---------------------------------------------------------------
POSTGRES_DB=djangodb
# database connection string, when used overrides other database settings.
# format might vary depending on backend
# DATABASE_URL = engine://username:password@host:port/dbname
# the default value for the user preference 'fractions' (enable/disable fraction support)
# default: disabled=0
FRACTION_PREF_DEFAULT=0
# the default value for the user preference 'comments' (enable/disable commenting system)
# default comments enabled=1
COMMENT_PREF_DEFAULT=1
# Users can set a amount of time after which the shopping list is refreshed when they are in viewing mode
# This is the minimum interval users can set. Setting this to low will allow users to refresh very frequently which
# might cause high load on the server. (Technically they can obviously refresh as often as they want with their own scripts)
SHOPPING_MIN_AUTOSYNC_INTERVAL=5
# Default for user setting sticky navbar
# STICKY_NAV_PREF_DEFAULT=1
# If base URL is something other than just / (you are serving a subfolder in your proxy for instance http://recipe_app/recipes/)
# Be sure to not have a trailing slash: e.g. '/recipes' instead of '/recipes/'
# SCRIPT_NAME=/recipes
# If staticfiles are stored at a different location uncomment and change accordingly, MUST END IN /
# this is not required if you are just using a subfolder
# This can either be a relative path from the applications base path or the url of an external host
# STATIC_URL=/static/
# If mediafiles are stored at a different location uncomment and change accordingly, MUST END IN /
# this is not required if you are just using a subfolder
# This can either be a relative path from the applications base path or the url of an external host
# MEDIA_URL=/media/
# Serve mediafiles directly using gunicorn. Basically everyone recommends not doing this. Please use any of the examples
# provided that include an additional nxginx container to handle media file serving.
# If you know what you are doing turn this back on (1) to serve media files using djangos serve() method.
# when unset: 1 (true) - this is temporary until an appropriate amount of time has passed for everyone to migrate
GUNICORN_MEDIA=0
# GUNICORN SERVER RELATED SETTINGS (see https://docs.gunicorn.org/en/stable/design.html#how-many-workers for recommended settings)
# GUNICORN_WORKERS=1
# GUNICORN_THREADS=1
# S3 Media settings: store mediafiles in s3 or any compatible storage backend (e.g. minio)
# as long as S3_ACCESS_KEY is not set S3 features are disabled
# S3_ACCESS_KEY=
# S3_SECRET_ACCESS_KEY=
# S3_BUCKET_NAME=
# S3_REGION_NAME= # default none, set your region might be required
# S3_QUERYSTRING_AUTH=1 # default true, set to 0 to serve media from a public bucket without signed urls
# S3_QUERYSTRING_EXPIRE=3600 # number of seconds querystring are valid for
# S3_ENDPOINT_URL= # when using a custom endpoint like minio
# S3_CUSTOM_DOMAIN= # when using a CDN/proxy to S3 (see https://github.com/TandoorRecipes/recipes/issues/1943)
# Email Settings, see https://docs.djangoproject.com/en/3.2/ref/settings/#email-host
# Required for email confirmation and password reset (automatically activates if host is set)
# EMAIL_HOST=
# EMAIL_PORT=
# EMAIL_HOST_USER=
# EMAIL_HOST_PASSWORD=
# EMAIL_USE_TLS=0
# EMAIL_USE_SSL=0
# email sender address (default 'webmaster@localhost')
# DEFAULT_FROM_EMAIL=
# prefix used for account related emails (default "[Tandoor Recipes] ")
# ACCOUNT_EMAIL_SUBJECT_PREFIX=
# allow authentication via reverse proxy (e.g. authelia), leave off if you dont know what you are doing
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
# when unset: 0 (false)
REVERSE_PROXY_AUTH=0
# Default settings for spaces, apply per space and can be changed in the admin view
# SPACE_DEFAULT_MAX_RECIPES=0 # 0=unlimited recipes
# SPACE_DEFAULT_MAX_USERS=0 # 0=unlimited users per space
# SPACE_DEFAULT_MAX_FILES=0 # Maximum file storage for space in MB. 0 for unlimited, -1 to disable file upload.
# SPACE_DEFAULT_ALLOW_SHARING=1 # Allow users to share recipes with public links
# allow people to create accounts on your application instance (without an invite link)
# when unset: 0 (false)
# ENABLE_SIGNUP=0
# If signup is enabled you might want to add a captcha to it to prevent spam
# HCAPTCHA_SITEKEY=
# HCAPTCHA_SECRET=
# if signup is enabled you might want to provide urls to data protection policies or terms and conditions
# TERMS_URL=
# PRIVACY_URL=
# IMPRINT_URL=
# enable serving of prometheus metrics under the /metrics path
# ATTENTION: view is not secured (as per the prometheus default way) so make sure to secure it
# trough your web server (or leave it open of you dont care if the stats are exposed)
# ENABLE_METRICS=0
# allows you to setup OAuth providers
# see docs for more information https://vabene1111.github.io/recipes/features/authentication/
# SOCIAL_PROVIDERS = allauth.socialaccount.providers.github, allauth.socialaccount.providers.nextcloud,
# Should a newly created user from a social provider get assigned to the default space and given permission by default ?
# ATTENTION: This feature might be deprecated in favor of a space join and public viewing system in the future
# default 0 (false), when 1 (true) users will be assigned space and group
# SOCIAL_DEFAULT_ACCESS = 1
# if SOCIAL_DEFAULT_ACCESS is used, which group should be added
# SOCIAL_DEFAULT_GROUP=guest
# Django session cookie settings. Can be changed to allow a single django application to authenticate several applications
# when running under the same database
# SESSION_COOKIE_DOMAIN=.example.com
# SESSION_COOKIE_NAME=sessionid # use this only to not interfere with non unified django applications under the same top level domain
# by default SORT_TREE_BY_NAME is disabled this will store all Keywords and Food in the order they are created
# enabling this setting makes saving new keywords and foods very slow, which doesn't matter in most usecases.
# however, when doing large imports of recipes that will create new objects, can increase total run time by 10-15x
# Keywords and Food can be manually sorted by name in Admin
# This value can also be temporarily changed in Admin, it will revert the next time the application is started
# This will be fixed/changed in the future by changing the implementation or finding a better workaround for sorting
# SORT_TREE_BY_NAME=0
# LDAP authentication
# default 0 (false), when 1 (true) list of allowed users will be fetched from LDAP server
#LDAP_AUTH=
#AUTH_LDAP_SERVER_URI=
#AUTH_LDAP_BIND_DN=
#AUTH_LDAP_BIND_PASSWORD=
#AUTH_LDAP_USER_SEARCH_BASE_DN=
#AUTH_LDAP_TLS_CACERTFILE=
# Enables exporting PDF (see export docs)
# Disabled by default, uncomment to enable
# ENABLE_PDF_EXPORT=1
# Recipe exports are cached for a certain time by default, adjust time if needed
# EXPORT_FILE_CACHE_DURATION=600

22
.flake8
View File

@@ -1,22 +0,0 @@
[flake8]
extend-ignore =
# Whitespace before ':' - Required for black compatibility
E203,
# Line break occurred before a binary operator - Required for black compatibility
W503,
# Comparison to False should be 'if cond is False:' or 'if not cond:'
E712
exclude =
.git,
**/__pycache__,
**/.git,
**/.svn,
**/.hg,
**/CVS,
**/.DS_Store,
.vscode,
**/*.pyc
per-file-ignores=
cookbook/apps.py:F401
max-line-length = 179

View File

@@ -1,15 +1,10 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "devcontainers"
directory: "/"
schedule:
interval: weekly
- package-ecosystem: "pip"
directory: "/"
schedule:
@@ -19,8 +14,3 @@ updates:
directory: "/vue/"
schedule:
interval: "monthly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@@ -1,110 +0,0 @@
name: Build Docker Container with open data plugin installed
on: push
jobs:
build-container:
name: Build ${{ matrix.name }} Container
runs-on: ubuntu-latest
if: github.repository_owner == 'TandoorRecipes'
continue-on-error: ${{ matrix.continue-on-error }}
permissions:
contents: read
packages: write
strategy:
matrix:
include:
# Standard build config
- name: Standard
dockerfile: Dockerfile
platforms: linux/amd64,linux/arm64
suffix: ""
continue-on-error: false
steps:
- uses: actions/checkout@v4
- name: Get version number
id: get_version
run: |
if [[ "$GITHUB_REF" = refs/tags/* ]]; then
echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
elif [[ "$GITHUB_REF" = refs/heads/beta ]]; then
echo VERSION=beta >> $GITHUB_OUTPUT
else
echo VERSION=develop >> $GITHUB_OUTPUT
fi
# clone open data plugin
- name: clone open data plugin repo
uses: actions/checkout@master
with:
repository: TandoorRecipes/open_data_plugin
ref: master
path: ./recipes/plugins/open_data_plugin
# Build Vue frontend
- uses: actions/setup-node@v4
with:
node-version: '18'
cache: yarn
cache-dependency-path: vue/yarn.lock
- name: Install dependencies
working-directory: ./vue
run: yarn install --frozen-lockfile
- name: Build dependencies
working-directory: ./vue
run: yarn build
- name: Setup Open Data Plugin Links
working-directory: ./recipes/plugins/open_data_plugin
run: python setup_repo.py
- name: Build Open Data Frontend
working-directory: ./recipes/plugins/open_data_plugin/vue
run: yarn build
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
if: github.secret_source == 'Actions'
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
if: github.secret_source == 'Actions'
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
vabene1111/recipes
ghcr.io/TandoorRecipes/recipes
flavor: |
latest=false
suffix=${{ matrix.suffix }}
tags: |
type=raw,value=latest,suffix=-open-data-plugin,enable=${{ startsWith(github.ref, 'refs/tags/') }}
type=semver,suffix=-open-data-plugin,pattern={{version}}
type=semver,suffix=-open-data-plugin,pattern={{major}}.{{minor}}
type=semver,suffix=-open-data-plugin,pattern={{major}}
type=ref,suffix=-open-data-plugin,event=branch
- name: Build and Push
uses: docker/build-push-action@v5
with:
context: .
file: ${{ matrix.dockerfile }}
pull: true
push: ${{ github.secret_source == 'Actions' }}
platforms: ${{ matrix.platforms }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,126 +0,0 @@
name: Build Docker Container
on: push
jobs:
build-container:
name: Build ${{ matrix.name }} Container
runs-on: ubuntu-latest
if: github.repository_owner == 'TandoorRecipes'
continue-on-error: ${{ matrix.continue-on-error }}
permissions:
contents: read
packages: write
strategy:
matrix:
include:
# Standard build config
- name: Standard
dockerfile: Dockerfile
platforms: linux/amd64,linux/arm64,linux/arm/v7
suffix: ""
continue-on-error: false
steps:
- uses: actions/checkout@v4
- name: Get version number
id: get_version
run: |
if [[ "$GITHUB_REF" = refs/tags/* ]]; then
echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
elif [[ "$GITHUB_REF" = refs/heads/beta ]]; then
echo VERSION=beta >> $GITHUB_OUTPUT
else
echo VERSION=develop >> $GITHUB_OUTPUT
fi
# Build Vue frontend
- uses: actions/setup-node@v4
with:
node-version: '18'
cache: yarn
cache-dependency-path: vue/yarn.lock
- name: Install dependencies
working-directory: ./vue
run: yarn install --frozen-lockfile
- name: Build dependencies
working-directory: ./vue
run: yarn build
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
if: github.secret_source == 'Actions'
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
if: github.secret_source == 'Actions'
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
vabene1111/recipes
ghcr.io/TandoorRecipes/recipes
flavor: |
latest=false
suffix=${{ matrix.suffix }}
tags: |
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=ref,event=branch
- name: Build and Push
uses: docker/build-push-action@v5
with:
context: .
file: ${{ matrix.dockerfile }}
pull: true
push: ${{ github.secret_source == 'Actions' }}
platforms: ${{ matrix.platforms }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
notify-stable:
name: Notify Stable
runs-on: ubuntu-latest
needs: build-container
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Set tag name
run: |
# Strip "refs/tags/" prefix
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
# Send stable discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 Version {{ VERSION }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ VERSION }}'
notify-beta:
name: Notify Beta
runs-on: ubuntu-latest
needs: build-container
if: github.ref == 'refs/heads/beta'
steps:
# Send beta discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 The BETA Image has been updated! 🥳'

View File

@@ -1,81 +1,40 @@
name: Continuous Integration
on: [push, pull_request]
on: [push]
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: ["3.10"]
node-version: ["18"]
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: ['3.10']
steps:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@v1.4.2
with:
packages: libsasl2-dev python3-dev libldap2-dev libssl-dev
version: 1.0
# Setup python & dependencies
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
- name: Install Python Dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Cache StaticFiles
uses: actions/cache@v4
id: django_cache
with:
path: |
./cookbook/static
./vue/webpack-stats.json
./staticfiles
key: |
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.node-version }}-collectstatic-${{ hashFiles('**/*.css', '**/*.js', 'vue/src/*') }}
# Build Vue frontend & Dependencies
- name: Set up Node ${{ matrix.node-version }}
if: steps.django_cache.outputs.cache-hit != 'true'
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
cache-dependency-path: ./vue/yarn.lock
- name: Install Vue dependencies
if: steps.django_cache.outputs.cache-hit != 'true'
working-directory: ./vue
run: yarn install
- name: Build Vue dependencies
if: steps.django_cache.outputs.cache-hit != 'true'
working-directory: ./vue
run: yarn build
- name: Compile Django StaticFiles
if: steps.django_cache.outputs.cache-hit != 'true'
run: |
python3 manage.py collectstatic --noinput
python3 manage.py collectstatic_js_reverse
- uses: actions/cache/save@v4
if: steps.django_cache.outputs.cache-hit != 'true'
with:
path: |
./cookbook/static
./vue/webpack-stats.json
./staticfiles
key: |
${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.node-version }}-collectstatic-${{ hashFiles('**/*.css', '**/*.js', 'vue/src/*') }}
- name: Django Testing project
run: pytest --junitxml=junit/test-results-${{ matrix.python-version }}.xml
steps:
- uses: actions/checkout@v1
- name: Set up Python 3.10
uses: actions/setup-python@v1
with:
python-version: '3.10'
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install Vue dependencies
working-directory: ./vue
run: yarn install
- name: Build Vue dependencies
working-directory: ./vue
run: yarn build
- name: Install Django dependencies
run: |
sudo apt-get -y update
sudo apt-get install -y libsasl2-dev python-dev libldap2-dev libssl-dev
python -m pip install --upgrade pip
pip install -r requirements.txt
python3 manage.py collectstatic --noinput
python3 manage.py collectstatic_js_reverse
- name: Django Testing project
run: |
pytest

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
@@ -25,7 +25,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
with:
languages: python, javascript
@@ -47,6 +47,6 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v1
with:
languages: javascript, python

View File

@@ -0,0 +1,48 @@
name: publish beta raspi image docker
on:
push:
branches:
- 'beta'
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = 'beta'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
tag: beta-raspi
dockerFile: Dockerfile-raspi
platform: linux/arm/v7
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
# Send discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 The BETA Image has been updated! 🥳'

View File

@@ -0,0 +1,47 @@
name: publish beta image docker
on:
push:
branches:
- 'beta'
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = 'beta'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
tag: beta
platform: linux/amd64,linux/arm64
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
# Send discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_BETA_WEBHOOK }}
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 The BETA Image has been updated! 🥳'

View File

@@ -0,0 +1,42 @@
name: publish dev image docker
on:
push:
branches:
- '*'
- '*/*'
- '!master'
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = 'develop'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Clear Cache
working-directory: ./vue
run: yarn cache clean --all
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@2.13
with:
name: vabene1111/recipes
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -0,0 +1,45 @@
name: publish latest raspi image docker
on:
push:
tags:
- '*'
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Get version number
id: get_version
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}-raspi
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}-raspi'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
dockerFile: Dockerfile-raspi
platform: linux/arm/v7
tag: latest-raspi
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -0,0 +1,44 @@
name: publish latest image docker
on:
push:
tags:
- '*'
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Get version number
id: get_version
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
platform: linux/amd64,linux/arm64
tag: latest
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -0,0 +1,47 @@
name: publish tagged raspi release docker
on:
release:
types: [published]
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
name: Build image job
steps:
- name: Checkout master
uses: actions/checkout@master
- name: Get version number
id: get_version
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
dockerFile: Dockerfile-raspi
platform: linux/arm/v7
tag: ${{ steps.get_version.outputs.VERSION }}-raspi
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -0,0 +1,53 @@
name: publish tagged release docker
on:
release:
types: [published]
jobs:
build:
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
name: Build image job
steps:
- name: Checkout master
uses: actions/checkout@master
- name: Get version number
id: get_version
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
# Update Version number
- name: Update version file
uses: DamianReeves/write-file-action@v1.0
with:
path: recipes/version.py
contents: |
VERSION_NUMBER = '${{ steps.get_version.outputs.VERSION }}'
BUILD_REF = '${{ github.sha }}'
write-mode: overwrite
# Build Vue frontend
- uses: actions/setup-node@v2
with:
node-version: '14'
- name: Install dependencies
working-directory: ./vue
run: yarn install
- name: Build dependencies
working-directory: ./vue
run: yarn build
# Build container
- name: Build and publish image
uses: ilteoood/docker_buildx@master
with:
publish: true
imageName: vabene1111/recipes
platform: linux/amd64,linux/arm64
tag: ${{ steps.get_version.outputs.VERSION }}
dockerUser: ${{ secrets.DOCKER_USERNAME }}
dockerPassword: ${{ secrets.DOCKER_PASSWORD }}
# Send discord notification
- name: Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
uses: Ilshidur/action-discord@0.3.2
with:
args: '🚀 Version {{ EVENT_PAYLOAD.release.tag_name }} of tandoor has been released 🥳 Check it out https://github.com/vabene1111/recipes/releases/tag/{{ EVENT_PAYLOAD.release.tag_name }}'

View File

@@ -1,20 +1,17 @@
name: Make Docs
on:
# the 1st condition
workflow_run:
workflows: ["Continuous Integration"]
branches: [master]
types:
- completed
push:
branches:
- master
jobs:
deploy:
if: github.repository_owner == 'TandoorRecipes' && ${{ github.event.workflow_run.conclusion == 'success' }}
if: github.repository_owner == 'TandoorRecipes'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- run: pip install mkdocs-material mkdocs-include-markdown-plugin
- run: mkdocs gh-deploy --force
- run: mkdocs gh-deploy --force

34
.gitignore vendored
View File

@@ -43,15 +43,9 @@ htmlcov/
nosetests.xml
coverage.xml
*,cover
docs/reports/**
# Django stuff:
*.log
mediafiles/
*.sqlite3*
staticfiles/
postgresql/
data/
# Sphinx documentation
docs/_build/
@@ -60,27 +54,33 @@ docs/_build/
target/
\.idea/dataSources/
\.idea/dataSources\.xml
\.idea/dataSources\.local\.xml
venv/
mediafiles/
*.sqlite3*
\.idea/workspace\.xml
\.idea/misc\.xml
# Deployment
\.env
cookbook/static/vue
vue/webpack-stats.json
\.env
staticfiles/
postgresql/
/docker-compose.override.yml
vue/node_modules
plugins
vue3/node_modules
cookbook/templates/sw.js
vue/.yarn
vue3/.vite
# Configs
.vscode/
vetur.config.js
venv/
cookbook/static/vue
vue/webpack-stats.json
cookbook/templates/sw.js
.prettierignore

View File

@@ -1,10 +0,0 @@
<component name="ProjectDictionaryState">
<dictionary name="vaben">
<words>
<w>mealplan</w>
<w>pinia</w>
<w>selfhosted</w>
<w>unapplied</w>
</words>
</dictionary>
</component>

View File

@@ -3,6 +3,8 @@
<words>
<w>autosync</w>
<w>chowdown</w>
<w>cookingmachine</w>
<w>cookit</w>
<w>csrftoken</w>
<w>gunicorn</w>
<w>ical</w>

View File

@@ -1,6 +1,5 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="PROJECT_PROFILE" value="Default" />
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>

2
.idea/recipes.iml generated
View File

@@ -18,7 +18,7 @@
<excludeFolder url="file://$MODULE_DIR$/staticfiles" />
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="jdk" jdkName="Python 3.9 (recipes)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TemplatesService">

2
.idea/vcs.xml generated
View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

31
.idea/watcherTasks.xml generated
View File

@@ -1,31 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectTasksOptions">
<TaskOptions isEnabled="false">
<option name="arguments" value="-m flake8 $FilePath$ --config $ContentRoot$\.flake8" />
<option name="checkSyntaxErrors" value="true" />
<option name="description" />
<option name="exitCodeBehavior" value="ALWAYS" />
<option name="fileExtension" value="py" />
<option name="immediateSync" value="false" />
<option name="name" value="Flake8 Watcher" />
<option name="output" value="$FilePath$" />
<option name="outputFilters">
<array>
<FilterInfo>
<option name="description" value="" />
<option name="name" value="" />
<option name="regExp" value="$FILE_PATH$:$LINE$:$COLUMN$: $MESSAGE$" />
</FilterInfo>
</array>
</option>
<option name="outputFromStdout" value="false" />
<option name="program" value="$PyInterpreterDirectory$/python" />
<option name="runOnExternalChanges" value="false" />
<option name="scopeName" value="Current File" />
<option name="trackOnlyRoot" value="false" />
<option name="workingDir" value="" />
<envs />
</TaskOptions>
</component>
</project>

View File

@@ -1,13 +0,0 @@
# generated files
api.ts
vue/src/apps/*.js
vue/node_modules
staticfiles/
docs/reports/
/vue3/src/openapi/
# ignored files - prettier interferes with django templates and github actions
*.html
*.yml
*.yaml

View File

@@ -1,7 +0,0 @@
{
"printWidth": 179,
"trailingComma": "es5",
"tabWidth": 2,
"semi": false,
"experimentalTernaries": true
}

33
.vscode/launch.json vendored
View File

@@ -1,33 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: Django",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/manage.py",
"args": ["runserver"],
"django": true,
"justMyCode": true
},
{
"name": "Python: Debug Tests",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"purpose": [
"debug-test"
],
"console": "integratedTerminal",
"env": {
// coverage and pytest can't both be running at the same time
"PYTEST_ADDOPTS": "--no-cov"
},
"django": true,
"justMyCode": true
},
]
}

14
.vscode/settings.json vendored
View File

@@ -1,14 +0,0 @@
{
"python.testing.pytestArgs": [
"cookbook/tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[python]": {
"editor.defaultFormatter": "eeyore.yapf",
},
"yapf.args": [],
"isort.args": []
}

75
.vscode/tasks.json vendored
View File

@@ -1,75 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Run Migrations",
"type": "shell",
"command": "python3 manage.py migrate",
},
{
"label": "Collect Static Files",
"type": "shell",
"command": "python3 manage.py collectstatic",
"dependsOn": ["Yarn Build"],
},
{
"label": "Setup Dev Server",
"dependsOn": ["Run Migrations", "Yarn Build"],
},
{
"label": "Run Dev Server",
"type": "shell",
"dependsOn": ["Setup Dev Server"],
"command": "python3 manage.py runserver",
},
{
"label": "Yarn Install",
"type": "shell",
"command": "yarn install",
"options": {
"cwd": "${workspaceFolder}/vue"
}
},
{
"label": "Yarn Serve",
"type": "shell",
"command": "yarn serve",
"dependsOn": ["Yarn Install"],
"options": {
"cwd": "${workspaceFolder}/vue"
}
},
{
"label": "Yarn Build",
"type": "shell",
"command": "yarn build",
"dependsOn": ["Yarn Install"],
"options": {
"cwd": "${workspaceFolder}/vue"
},
"group": "build",
},
{
"label": "Setup Tests",
"dependsOn": ["Run Migrations", "Collect Static Files"],
},
{
"label": "Run all pytests",
"type": "shell",
"command": "python3 -m pytest cookbook/tests",
"dependsOn": ["Setup Tests"],
"group": "test",
},
{
"label": "Setup Documentation Dependencies",
"type": "shell",
"command": "pip install mkdocs-material mkdocs-include-markdown-plugin",
},
{
"label": "Serve Documentation",
"type": "shell",
"command": "mkdocs serve",
"dependsOn": ["Setup Documentation Dependencies"],
}
]
}

View File

@@ -20,7 +20,6 @@ Below are some of the larger contributions made yet.
- [murphy83] added support for IPv6 #1490
- [TheHaf] added custom serving size component #1411
- [lostlont] added LDAP support #960
- [c0mputerguru] added devcontainers for ease of development
## Translations

View File

@@ -1,13 +1,11 @@
FROM python:3.12-alpine3.19
FROM python:3.10-alpine3.15
#Install all dependencies.
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev openldap git
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap
#Print all logs without buffering it.
ENV PYTHONUNBUFFERED 1
ENV DOCKER true
#This port will be used by gunicorn.
EXPOSE 8080
@@ -17,28 +15,16 @@ WORKDIR /opt/recipes
COPY requirements.txt ./
RUN \
if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
# remove Development dependencies from requirements.txt
RUN sed -i '/# Development/,$d' requirements.txt
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev git && \
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
python -m venv venv && \
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
venv/bin/pip install wheel==0.42.0 && \
venv/bin/pip install setuptools_rust==1.9.0 && \
venv/bin/pip install wheel==0.37.1 && \
venv/bin/pip install setuptools_rust==1.1.2 && \
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
apk --purge del .build-deps
#Copy project and execute it.
COPY . ./
# collect information from git repositories
RUN /opt/recipes/venv/bin/python version.py
# delete git repositories to reduce image size
RUN find . -type d -name ".git" | xargs rm -rf
RUN chmod +x boot.sh
ENTRYPOINT ["/opt/recipes/boot.sh"]

33
Dockerfile-raspi Normal file
View File

@@ -0,0 +1,33 @@
# builds of cryptography for raspberry pi (or better arm v7) fail for some
FROM python:3.9-alpine3.15
#Install all dependencies.
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap gcompat
#Print all logs without buffering it.
ENV PYTHONUNBUFFERED 1
#This port will be used by gunicorn.
EXPOSE 8080
#Create app dir and install requirements.
RUN mkdir /opt/recipes
WORKDIR /opt/recipes
COPY requirements.txt ./
RUN \
if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
RUN apk add --no-cache --virtual .build-deps gcc musl-dev zlib-dev jpeg-dev libwebp-dev python3-dev git && \
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
python -m venv venv && \
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
venv/bin/pip install wheel==0.37.1 && \
venv/bin/pip install -r requirements.txt --no-cache-dir --no-binary=Pillow && \
apk --purge del .build-deps
#Copy project and execute it.
COPY . ./
RUN chmod +x boot.sh
ENTRYPOINT ["/opt/recipes/boot.sh"]

View File

@@ -39,13 +39,13 @@
- 🔍 Powerful & customizable **search** with fulltext support and [TrigramSimilarity](https://docs.djangoproject.com/en/3.0/ref/contrib/postgres/search/#trigram-similarity)
- 🏷️ Create and search for **tags**, assign them in batch to all files matching certain filters
- ↔️ Quickly merge and rename ingredients, tags and units
- ↔️ Quickly merge and rename ingredients, tags and units
- 📥️ **Import recipes** from thousands of websites supporting [ld+json or microdata](https://schema.org/Recipe)
- ➗ Support for **fractions** or decimals
- 🐳 Easy setup with **Docker** and included examples for **Kubernetes**, **Unraid** and **Synology**
- 🎨 Customize your interface with **themes**
- 📦 **Sync** files with Dropbox and Nextcloud
## All the must haves
- 📱Optimized for use on **mobile** devices
@@ -54,7 +54,7 @@
- Many more like recipe scaling, image compression, printing views and supermarkets
This application is meant for people with a collection of recipes they want to share with family and friends or simply
store them in a nicely organized way. A basic permission system exists but this application is not meant to be run as
store them in a nicely organized way. A basic permission system exists but this application is not meant to be run as
a public page.
## Docs
@@ -62,16 +62,16 @@ a public page.
Documentation can be found [here](https://docs.tandoor.dev/).
## Support our work
Tandoor is developed by volunteers in their free time just because its fun. That said earning
Tandoor is developed by volunteers in their free time just because its fun. That said earning
some money with the project allows us to spend more time on it and thus make improvements we otherwise couldn't.
Because of that there are several ways you can support us
- **GitHub Sponsors** You can sponsor contributors of this project on GitHub: [vabene1111](https://github.com/sponsors/vabene1111)
- **Host at Hetzner** We have been very happy customers of Hetzner for multiple years for all of our projects. If you want to get into self-hosting or are tired of the expensive big providers, their cloud servers are a great place to get started. When you sign up via our [referral link](https://hetzner.cloud/?ref=ISdlrLmr9kGj) you will get 20€ worth of cloud credits and we get a small kickback too.
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
- **Let us host for you** We are offering a [hosted version](https://app.tandoor.dev) where all profits support us and the development of tandoor (currently only available in germany).
## Contributing
Contributions are welcome but please read [this](https://docs.tandoor.dev/contribute/guidelines/) **BEFORE** contributing anything!
Contributions are welcome but please read [this](https://docs.tandoor.dev/contribute/#contributing-code) **BEFORE** contributing anything!
## Your Feedback
@@ -96,11 +96,11 @@ Share some information on how you use Tandoor to help me improve the application
Beginning with version 0.10.0 the code in this repository is licensed under the [GNU AGPL v3](https://www.gnu.org/licenses/agpl-3.0.de.html) license with a
[common clause](https://commonsclause.com/) selling exception. See [LICENSE.md](https://github.com/vabene1111/recipes/blob/develop/LICENSE.md) for details.
> NOTE: There appears to be a whole range of legal issues with licensing anything other than the standard completely open licenses.
> NOTE: There appears to be a whole range of legal issues with licensing anything else then the standard completely open licenses.
> I am in the process of getting some professional legal advice to sort out these issues.
> Please also see [Issue 238](https://github.com/vabene1111/recipes/issues/238) for some discussion and **reasoning** regarding the topic.
**Reasoning**
**Reasoning**
**This software and *all* its features are and will always be free for everyone to use and enjoy.**
The reason for the selling exception is that a significant amount of time was spend over multiple years to develop this software.

View File

@@ -6,4 +6,5 @@ Since this software is still considered beta/WIP support is always only given fo
## Reporting a Vulnerability
Please use GitHub Security Advisories to report any kind of security vulnerabilities.
Please open a normal public issue if you have any security related concerns. If you feel like the issue should not be discussed in
public just open a generic issue and we will discuss further communication there (since GitHub does not allow everyone to create a security advisory :/).

32
boot.sh
View File

@@ -4,7 +4,6 @@ source venv/bin/activate
TANDOOR_PORT="${TANDOOR_PORT:-8080}"
GUNICORN_WORKERS="${GUNICORN_WORKERS:-3}"
GUNICORN_THREADS="${GUNICORN_THREADS:-2}"
GUNICORN_LOG_LEVEL="${GUNICORN_LOG_LEVEL:-'info'}"
NGINX_CONF_FILE=/opt/recipes/nginx/conf.d/Recipes.conf
display_warning() {
@@ -19,14 +18,9 @@ if [ ! -f "$NGINX_CONF_FILE" ] && [ $GUNICORN_MEDIA -eq 0 ]; then
display_warning "Nginx configuration file could not be found at the default location!\nPath: ${NGINX_CONF_FILE}"
fi
# SECRET_KEY (or a valid file at SECRET_KEY_FILE) must be set in .env file
if [ -f "${SECRET_KEY_FILE}" ]; then
export SECRET_KEY=$(cat "$SECRET_KEY_FILE")
fi
# SECRET_KEY must be set in .env file
if [ -z "${SECRET_KEY}" ]; then
display_warning "The environment variable 'SECRET_KEY' (or 'SECRET_KEY_FILE' that points to an existing file) is not set but REQUIRED for running Tandoor!"
display_warning "The environment variable 'SECRET_KEY' is not set but REQUIRED for running Tandoor!"
fi
@@ -35,16 +29,11 @@ echo "Waiting for database to be ready..."
attempt=0
max_attempts=20
if [ "${DB_ENGINE}" == 'django.db.backends.postgresql' ] || [ "${DATABASE_URL}" == 'postgres'* ]; then
# POSTGRES_PASSWORD (or a valid file at POSTGRES_PASSWORD_FILE) must be set in .env file
if [ -f "${POSTGRES_PASSWORD_FILE}" ]; then
export POSTGRES_PASSWORD=$(cat "$POSTGRES_PASSWORD_FILE")
fi
if [ "${DB_ENGINE}" != 'django.db.backends.sqlite3' ]; then
# POSTGRES_PASSWORD must be set in .env file
if [ -z "${POSTGRES_PASSWORD}" ]; then
display_warning "The environment variable 'POSTGRES_PASSWORD' (or 'POSTGRES_PASSWORD_FILE' that points to an existing file) is not set but REQUIRED for running Tandoor!"
display_warning "The environment variable 'POSTGRES_PASSWORD' is not set but REQUIRED for running Tandoor!"
fi
while pg_isready --host=${POSTGRES_HOST} --port=${POSTGRES_PORT} --user=${POSTGRES_USER} -q; status=$?; attempt=$((attempt+1)); [ $status -ne 0 ] && [ $attempt -le $max_attempts ]; do
@@ -67,7 +56,7 @@ echo "Migrating database"
python manage.py migrate
echo "Collecting static files, this may take a while..."
echo "Generating static files"
python manage.py collectstatic_js_reverse
python manage.py collectstatic --noinput
@@ -76,11 +65,4 @@ echo "Done"
chmod -R 755 /opt/recipes/mediafiles
ipv6_disable=$(cat /sys/module/ipv6/parameters/disable)
# Check if IPv6 is enabled, only then run gunicorn with ipv6 support
if [ "$ipv6_disable" -eq 0 ]; then
exec gunicorn -b "[::]:$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
else
exec gunicorn -b ":$TANDOOR_PORT" --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level $GUNICORN_LOG_LEVEL recipes.wsgi
fi
exec gunicorn -b :$TANDOOR_PORT --workers $GUNICORN_WORKERS --threads $GUNICORN_THREADS --access-logfile - --error-logfile - --log-level INFO recipes.wsgi

View File

@@ -10,13 +10,12 @@ from treebeard.forms import movenodeform_factory
from cookbook.managers import DICTIONARY
from .models import (BookmarkletImport, Comment, CookLog, Food, ImportLog, Ingredient, InviteLink,
Keyword, MealPlan, MealType, NutritionInformation, Property, PropertyType,
from .models import (BookmarkletImport, Comment, CookLog, Food, FoodInheritField, ImportLog,
Ingredient, InviteLink, Keyword, MealPlan, MealType, NutritionInformation,
Recipe, RecipeBook, RecipeBookEntry, RecipeImport, SearchPreference, ShareLink,
ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
ShoppingList, ShoppingListEntry, ShoppingListRecipe, Space, Step, Storage,
Supermarket, SupermarketCategory, SupermarketCategoryRelation, Sync, SyncLog,
TelegramBot, Unit, UnitConversion, UserFile, UserPreference, UserSpace,
ViewLog, ConnectorConfig)
TelegramBot, Unit, UserFile, UserPreference, ViewLog, Automation, UserSpace)
class CustomUserAdmin(UserAdmin):
@@ -33,14 +32,12 @@ admin.site.unregister(Group)
@admin.action(description='Delete all data from a space')
def delete_space_action(modeladmin, request, queryset):
for space in queryset:
space.safe_delete()
space.save()
class SpaceAdmin(admin.ModelAdmin):
list_display = ('name', 'created_by', 'max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
search_fields = ('name', 'created_by__username')
autocomplete_fields = ('created_by',)
filter_horizontal = ('food_inherit',)
list_filter = ('max_recipes', 'max_users', 'max_file_storage_mb', 'allow_sharing')
date_hierarchy = 'created_at'
actions = [delete_space_action]
@@ -52,19 +49,16 @@ admin.site.register(Space, SpaceAdmin)
class UserSpaceAdmin(admin.ModelAdmin):
list_display = ('user', 'space',)
search_fields = ('user__username', 'space__name',)
filter_horizontal = ('groups',)
autocomplete_fields = ('user', 'space',)
admin.site.register(UserSpace, UserSpaceAdmin)
class UserPreferenceAdmin(admin.ModelAdmin):
list_display = ('name', 'theme', 'default_page')
list_display = ('name', 'theme', 'nav_color', 'default_page',)
search_fields = ('user__username',)
list_filter = ('theme', 'default_page',)
list_filter = ('theme', 'nav_color', 'default_page',)
date_hierarchy = 'created_at'
filter_horizontal = ('plan_share', 'shopping_share',)
@staticmethod
def name(obj):
@@ -95,14 +89,6 @@ class StorageAdmin(admin.ModelAdmin):
admin.site.register(Storage, StorageAdmin)
class ConnectorConfigAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'type', 'enabled', 'url')
search_fields = ('name', 'url')
admin.site.register(ConnectorConfig, ConnectorConfigAdmin)
class SyncAdmin(admin.ModelAdmin):
list_display = ('storage', 'path', 'active', 'last_checked')
search_fields = ('storage__name', 'path')
@@ -116,16 +102,11 @@ class SupermarketCategoryInline(admin.TabularInline):
class SupermarketAdmin(admin.ModelAdmin):
list_display = ('name', 'space',)
inlines = (SupermarketCategoryInline,)
class SupermarketCategoryAdmin(admin.ModelAdmin):
list_display = ('name', 'space',)
admin.site.register(Supermarket, SupermarketAdmin)
admin.site.register(SupermarketCategory, SupermarketCategoryAdmin)
admin.site.register(SupermarketCategory)
class SyncLogAdmin(admin.ModelAdmin):
@@ -169,24 +150,9 @@ class KeywordAdmin(TreeAdmin):
admin.site.register(Keyword, KeywordAdmin)
@admin.action(description='Delete Steps not part of a Recipe.')
def delete_unattached_steps(modeladmin, request, queryset):
with scopes_disabled():
Step.objects.filter(recipe=None).delete()
class StepAdmin(admin.ModelAdmin):
list_display = ('recipe_and_name', 'order', 'space')
ordering = ('recipe__name', 'name', 'space',)
search_fields = ('name', 'recipe__name')
actions = [delete_unattached_steps]
@staticmethod
@admin.display(description="Name")
def recipe_and_name(obj):
if not obj.recipe_set.exists():
return "Orphaned Step" + ('' if not obj.name else f': {obj.name}')
return f"{obj.recipe_set.first().name}: {obj.name}" if obj.name else obj.recipe_set.first().name
list_display = ('name', 'order',)
search_fields = ('name',)
admin.site.register(Step, StepAdmin)
@@ -204,9 +170,8 @@ def rebuild_index(modeladmin, request, queryset):
class RecipeAdmin(admin.ModelAdmin):
list_display = ('name', 'internal', 'created_by', 'storage', 'space')
list_display = ('name', 'internal', 'created_by', 'storage')
search_fields = ('name', 'created_by__username')
ordering = ('name', 'created_by__username',)
list_filter = ('internal',)
date_hierarchy = 'created_at'
@@ -214,20 +179,13 @@ class RecipeAdmin(admin.ModelAdmin):
def created_by(obj):
return obj.created_by.get_user_display_name()
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql':
if settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']:
actions = [rebuild_index]
admin.site.register(Recipe, RecipeAdmin)
class UnitAdmin(admin.ModelAdmin):
list_display = ('name', 'space')
ordering = ('name', 'space',)
search_fields = ('name',)
admin.site.register(Unit, UnitAdmin)
admin.site.register(Unit)
# admin.site.register(FoodInheritField)
@@ -243,30 +201,9 @@ class FoodAdmin(TreeAdmin):
admin.site.register(Food, FoodAdmin)
class UnitConversionAdmin(admin.ModelAdmin):
list_display = ('base_amount', 'base_unit', 'food', 'converted_amount', 'converted_unit')
search_fields = ('food__name', 'unit__name')
admin.site.register(UnitConversion, UnitConversionAdmin)
@admin.action(description='Delete Ingredients not part of a Recipe.')
def delete_unattached_ingredients(modeladmin, request, queryset):
with scopes_disabled():
Ingredient.objects.filter(step__recipe=None).delete()
class IngredientAdmin(admin.ModelAdmin):
list_display = ('recipe_name', 'amount', 'unit', 'food', 'space')
search_fields = ('food__name', 'unit__name', 'step__recipe__name')
actions = [delete_unattached_ingredients]
@staticmethod
@admin.display(description="Recipe")
def recipe_name(obj):
recipes = obj.step_set.first().recipe_set.all() if obj.step_set.exists() else None
return recipes.first().name if recipes else 'Orphaned Ingredient'
list_display = ('food', 'amount', 'unit')
search_fields = ('food__name', 'unit__name')
admin.site.register(Ingredient, IngredientAdmin)
@@ -293,7 +230,7 @@ admin.site.register(RecipeImport, RecipeImportAdmin)
class RecipeBookAdmin(admin.ModelAdmin):
list_display = ('name', 'user_name', 'space')
list_display = ('name', 'user_name')
search_fields = ('name', 'created_by__username')
@staticmethod
@@ -312,7 +249,7 @@ admin.site.register(RecipeBookEntry, RecipeBookEntryAdmin)
class MealPlanAdmin(admin.ModelAdmin):
list_display = ('user', 'recipe', 'meal_type', 'from_date', 'to_date')
list_display = ('user', 'recipe', 'meal_type', 'date')
@staticmethod
def user(obj):
@@ -323,8 +260,8 @@ admin.site.register(MealPlan, MealPlanAdmin)
class MealTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'space', 'created_by', 'order')
search_fields = ('name', 'space', 'created_by__username')
list_display = ('name', 'created_by', 'order')
search_fields = ('name', 'created_by__username')
admin.site.register(MealType, MealTypeAdmin)
@@ -349,7 +286,6 @@ admin.site.register(InviteLink, InviteLinkAdmin)
class CookLogAdmin(admin.ModelAdmin):
list_display = ('recipe', 'created_by', 'created_at', 'rating', 'servings')
search_fields = ('recipe__name', 'space__name',)
admin.site.register(CookLog, CookLogAdmin)
@@ -369,6 +305,13 @@ class ShoppingListEntryAdmin(admin.ModelAdmin):
admin.site.register(ShoppingListEntry, ShoppingListEntryAdmin)
class ShoppingListAdmin(admin.ModelAdmin):
list_display = ('id', 'created_by', 'created_at')
admin.site.register(ShoppingList, ShoppingListAdmin)
class ShareLinkAdmin(admin.ModelAdmin):
list_display = ('recipe', 'created_by', 'uuid', 'created_at',)
@@ -376,29 +319,6 @@ class ShareLinkAdmin(admin.ModelAdmin):
admin.site.register(ShareLink, ShareLinkAdmin)
@admin.action(description='Delete all properties with type')
def delete_properties_with_type(modeladmin, request, queryset):
for pt in queryset:
Property.objects.filter(property_type=pt).delete()
class PropertyTypeAdmin(admin.ModelAdmin):
search_fields = ('name',)
list_display = ('id', 'space', 'name', 'fdc_id')
actions = [delete_properties_with_type]
admin.site.register(PropertyType, PropertyTypeAdmin)
class PropertyAdmin(admin.ModelAdmin):
list_display = ('property_amount', 'property_type')
admin.site.register(Property, PropertyAdmin)
class NutritionInformationAdmin(admin.ModelAdmin):
list_display = ('id',)

View File

@@ -3,7 +3,6 @@ import traceback
from django.apps import AppConfig
from django.conf import settings
from django.db import OperationalError, ProgrammingError
from django.db.models.signals import post_save, post_delete
from django_scopes import scopes_disabled
from recipes.settings import DEBUG
@@ -15,16 +14,6 @@ class CookbookConfig(AppConfig):
def ready(self):
import cookbook.signals # noqa
if not settings.DISABLE_EXTERNAL_CONNECTORS:
try:
from cookbook.connectors.connector_manager import ConnectorManager # Needs to be here to prevent loading race condition of oauth2 modules in models.py
handler = ConnectorManager()
post_save.connect(handler, dispatch_uid="connector_manager")
post_delete.connect(handler, dispatch_uid="connector_manager")
except Exception as e:
traceback.print_exc()
print('Failed to initialize connectors')
pass
# if not settings.DISABLE_TREE_FIX_STARTUP:
# # when starting up run fix_tree to:
# # a) make sure that nodes are sorted when switching between sort modes

View File

@@ -1,29 +0,0 @@
from abc import ABC, abstractmethod
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
# A Connector is 'destroyed' & recreated each time 'any' ConnectorConfig in a space changes.
class Connector(ABC):
@abstractmethod
def __init__(self, config: ConnectorConfig):
pass
@abstractmethod
async def on_shopping_list_entry_created(self, space: Space, instance: ShoppingListEntry) -> None:
pass
# This method might not trigger on 'direct' entry updates: https://stackoverflow.com/a/35238823
@abstractmethod
async def on_shopping_list_entry_updated(self, space: Space, instance: ShoppingListEntry) -> None:
pass
@abstractmethod
async def on_shopping_list_entry_deleted(self, space: Space, instance: ShoppingListEntry) -> None:
pass
@abstractmethod
async def close(self) -> None:
pass
# TODO: Add Recipes & possibly Meal Place listeners/hooks (And maybe more?)

View File

@@ -1,188 +0,0 @@
import asyncio
import logging
import queue
import threading
from asyncio import Task
from dataclasses import dataclass
from enum import Enum
from logging import Logger
from types import UnionType
from typing import List, Any, Dict, Optional, Type
from django.conf import settings
from django_scopes import scope
from cookbook.connectors.connector import Connector
from cookbook.connectors.homeassistant import HomeAssistant
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
REGISTERED_CLASSES: UnionType | Type = ShoppingListEntry
class ActionType(Enum):
CREATED = 1
UPDATED = 2
DELETED = 3
@dataclass
class Work:
instance: REGISTERED_CLASSES | ConnectorConfig
actionType: ActionType
# The way ConnectionManager works is as follows:
# 1. On init, it starts a worker & creates a queue for 'Work'
# 2. Then any time its called, it verifies the type of action (create/update/delete) and if the item is of interest, pushes the Work (non-blocking) to the queue.
# 3. The worker consumes said work from the queue.
# 3.1 If the work is of type ConnectorConfig, it flushes its cache of known connectors (per space.id)
# 3.2 If work is of type REGISTERED_CLASSES, it asynchronously fires of all connectors and wait for them to finish (runtime should depend on the 'slowest' connector)
# 4. Work is marked as consumed, and next entry of the queue is consumed.
# Each 'Work' is processed in sequential by the worker, so the throughput is about [workers * the slowest connector]
class ConnectorManager:
_logger: Logger
_queue: queue.Queue
_listening_to_classes = REGISTERED_CLASSES | ConnectorConfig
def __init__(self):
self._logger = logging.getLogger("recipes.connector")
self._queue = queue.Queue(maxsize=settings.EXTERNAL_CONNECTORS_QUEUE_SIZE)
self._worker = threading.Thread(target=self.worker, args=(0, self._queue,), daemon=True)
self._worker.start()
# Called by post save & post delete signals
def __call__(self, instance: Any, **kwargs) -> None:
if not isinstance(instance, self._listening_to_classes) or not hasattr(instance, "space"):
return
action_type: ActionType
if "created" in kwargs and kwargs["created"]:
action_type = ActionType.CREATED
elif "created" in kwargs and not kwargs["created"]:
action_type = ActionType.UPDATED
elif "origin" in kwargs:
action_type = ActionType.DELETED
else:
return
try:
self._queue.put_nowait(Work(instance, action_type))
except queue.Full:
self._logger.info(f"queue was full, so skipping {action_type} of type {type(instance)}")
return
def stop(self):
self._queue.join()
self._worker.join()
@staticmethod
def worker(worker_id: int, worker_queue: queue.Queue):
logger = logging.getLogger("recipes.connector.worker")
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
logger.info(f"started ConnectionManager worker {worker_id}")
# When multiple workers are used, please make sure the cache is shared across all threads, otherwise it might lead to un-expected behavior.
_connectors_cache: Dict[int, List[Connector]] = dict()
while True:
try:
item: Optional[Work] = worker_queue.get()
except KeyboardInterrupt:
break
if item is None:
break
logger.debug(f"received {item.instance=} with {item.actionType=}")
# If a Connector was changed/updated, refresh connector from the database for said space
refresh_connector_cache = isinstance(item.instance, ConnectorConfig)
space: Space = item.instance.space
connectors: Optional[List[Connector]] = _connectors_cache.get(space.id)
if connectors is None or refresh_connector_cache:
if connectors is not None:
loop.run_until_complete(close_connectors(connectors))
with scope(space=space):
connectors: List[Connector] = list()
for config in space.connectorconfig_set.all():
config: ConnectorConfig = config
if not config.enabled:
continue
try:
connector: Optional[Connector] = ConnectorManager.get_connected_for_config(config)
except BaseException:
logger.exception(f"failed to initialize {config.name}")
continue
if connector is not None:
connectors.append(connector)
_connectors_cache[space.id] = connectors
if len(connectors) == 0 or refresh_connector_cache:
worker_queue.task_done()
continue
logger.debug(f"running {len(connectors)} connectors for {item.instance=} with {item.actionType=}")
loop.run_until_complete(run_connectors(connectors, space, item.instance, item.actionType))
worker_queue.task_done()
logger.info(f"terminating ConnectionManager worker {worker_id}")
asyncio.set_event_loop(None)
loop.close()
@staticmethod
def get_connected_for_config(config: ConnectorConfig) -> Optional[Connector]:
match config.type:
case ConnectorConfig.HOMEASSISTANT:
return HomeAssistant(config)
case _:
return None
async def close_connectors(connectors: List[Connector]):
tasks: List[Task] = [asyncio.create_task(connector.close()) for connector in connectors]
if len(tasks) == 0:
return
try:
await asyncio.gather(*tasks, return_exceptions=False)
except BaseException:
logging.exception("received an exception while closing one of the connectors")
async def run_connectors(connectors: List[Connector], space: Space, instance: REGISTERED_CLASSES, action_type: ActionType):
tasks: List[Task] = list()
if isinstance(instance, ShoppingListEntry):
shopping_list_entry: ShoppingListEntry = instance
match action_type:
case ActionType.CREATED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_created(space, shopping_list_entry)))
case ActionType.UPDATED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_updated(space, shopping_list_entry)))
case ActionType.DELETED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_deleted(space, shopping_list_entry)))
if len(tasks) == 0:
return
try:
# Wait for all async tasks to finish, if one fails, the others still continue.
await asyncio.gather(*tasks, return_exceptions=False)
except BaseException:
logging.exception("received an exception from one of the connectors")

View File

@@ -1,104 +0,0 @@
import logging
from logging import Logger
from typing import Dict, Tuple
from urllib.parse import urljoin
from aiohttp import ClientError, request
from cookbook.connectors.connector import Connector
from cookbook.models import ShoppingListEntry, ConnectorConfig, Space
class HomeAssistant(Connector):
_config: ConnectorConfig
_logger: Logger
def __init__(self, config: ConnectorConfig):
if not config.token or not config.url or not config.todo_entity:
raise ValueError("config for HomeAssistantConnector in incomplete")
self._logger = logging.getLogger(f"recipes.connector.homeassistant.{config.name}")
if config.url[-1] != "/":
config.url += "/"
self._config = config
async def homeassistant_api_call(self, method: str, path: str, data: Dict) -> str:
headers = {
"Authorization": f"Bearer {self._config.token}",
"Content-Type": "application/json"
}
async with request(method, urljoin(self._config.url, path), headers=headers, json=data) as response:
response.raise_for_status()
return await response.json()
async def on_shopping_list_entry_created(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
if not self._config.on_shopping_list_entry_created_enabled:
return
item, description = _format_shopping_list_entry(shopping_list_entry)
self._logger.debug(f"adding {item=}")
data = {
"entity_id": self._config.todo_entity,
"item": item,
"description": description,
}
try:
await self.homeassistant_api_call("POST", "services/todo/add_item", data)
except ClientError as err:
self._logger.warning(f"received an exception from the api: {err=}, {type(err)=}")
async def on_shopping_list_entry_updated(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
if not self._config.on_shopping_list_entry_updated_enabled:
return
pass
async def on_shopping_list_entry_deleted(self, space: Space, shopping_list_entry: ShoppingListEntry) -> None:
if not self._config.on_shopping_list_entry_deleted_enabled:
return
if not hasattr(shopping_list_entry._state.fields_cache, "food"):
# Sometimes the food foreign key is not loaded, and we cant load it from an async process
self._logger.debug("required property was not present in ShoppingListEntry")
return
item, _ = _format_shopping_list_entry(shopping_list_entry)
self._logger.debug(f"removing {item=}")
data = {
"entity_id": self._config.todo_entity,
"item": item,
}
try:
await self.homeassistant_api_call("POST", "services/todo/remove_item", data)
except ClientError as err:
# This error will always trigger if the item is not present/found
self._logger.debug(f"received an exception from the api: {err=}, {type(err)=}")
async def close(self) -> None:
pass
def _format_shopping_list_entry(shopping_list_entry: ShoppingListEntry) -> Tuple[str, str]:
item = shopping_list_entry.food.name
if shopping_list_entry.amount > 0:
item += f" ({shopping_list_entry.amount:.2f}".rstrip('0').rstrip('.')
if shopping_list_entry.unit and shopping_list_entry.unit.base_unit and len(shopping_list_entry.unit.base_unit) > 0:
item += f" {shopping_list_entry.unit.base_unit})"
elif shopping_list_entry.unit and shopping_list_entry.unit.name and len(shopping_list_entry.unit.name) > 0:
item += f" {shopping_list_entry.unit.name})"
else:
item += ")"
description = "From TandoorRecipes"
if shopping_list_entry.created_by.first_name and len(shopping_list_entry.created_by.first_name) > 0:
description += f", by {shopping_list_entry.created_by.first_name}"
else:
description += f", by {shopping_list_entry.created_by.username}"
return item, description

View File

@@ -0,0 +1,116 @@
import json
import random
from datetime import timedelta
import requests
from django.utils.timezone import now
from cookbook.models import CookingMachine
# Tandoor is not affiliated in any way or form with the holders of Trademark or other right associated with the mentioned names. All mentioned protected names are purely used to identify to the user a certain device or integration.
class HomeConnectCookit:
AUTH_AUTHORIZE_URL = 'https://api.home-connect.com/security/oauth/authorize'
AUTH_TOKEN_URL = 'https://api.home-connect.com/security/oauth/token'
AUTH_REFRESH_URL = 'https://api.home-connect.com/security/oauth/token'
RECIPE_API_URL = 'https://prod.reu.rest.homeconnectegw.com/user-generated-recipes/server/api/v1/recipes'
IMAGE_API_URL = 'https://prod.reu.rest.homeconnectegw.com/user-generated-recipes/server/api/v1/images'
CLIENT_ID = '' # TODO load from .env settings
_CLIENT_SECRET = '' # TODO load from .env settings
_cooking_machine = None
def __init__(self, cooking_machine):
self._cooking_machine = cooking_machine
def get_auth_link(self):
return f"{self.AUTH_AUTHORIZE_URL}?client_id={self.CLIENT_ID}&response_type=code&scope=IdentifyAppliance%20Settings&state={random.randint(100000, 999999)}"
def _validate_token(self):
if self._cooking_machine.access_token is None and self._cooking_machine.refresh_token is None:
return False # user needs to login
elif self._cooking_machine.access_token_expiry < now() + timedelta(minutes=10):
return False # refresh token
def _refresh_access_token(self):
token_response = requests.post(self.AUTH_REFRESH_URL, {
'grant_type': 'refresh_token',
'refresh_token': self._cooking_machine.refresh_token,
'client_secret': self._CLIENT_SECRET,
})
if token_response.status_code == 200:
token_response_body = json.loads(token_response.content)
self._cooking_machine.access_token = token_response_body['access_token']
self._cooking_machine.access_token_expiry = now() + timedelta(seconds=(token_response_body['expires_in'] - (60 * 10)))
self._cooking_machine.refresh_token = token_response_body['refresh_token']
self._cooking_machine.refresh_token_expiry = now() + timedelta(days=58)
self._cooking_machine.save()
def get_access_token(self, code):
token_response = requests.post(self.AUTH_TOKEN_URL, {
'grant_type': 'authorization_code',
'code': code,
'client_id': self.CLIENT_ID,
'client_secret': self._CLIENT_SECRET,
})
if token_response.status_code == 200:
token_response_body = json.loads(token_response.content)
self._cooking_machine.access_token = token_response_body['access_token']
self._cooking_machine.access_token_expiry = now() + timedelta(seconds=(token_response_body['expires_in'] - (60 * 10)))
self._cooking_machine.refresh_token = token_response_body['refresh_token']
self._cooking_machine.refresh_token_expiry = now() + timedelta(days=58)
self._cooking_machine.save()
def _get_default_headers(self):
auth_token = ''
return {
'authorization': f'Bearer {self._cooking_machine.access_token}',
'accept-language': "en-US",
"referer": "https://prod.reu.rest.homeconnectegw.com/user-generated- recipes/client/editor/recipedetails",
"user-agent": "Mozilla/5.0 (Linux; Android 10; Android SDK built for x86 Build/QSR1.210802.001; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.185 Mobile Safari/537.36",
"x-requested-with": "com.bshg.homeconnect.android.release"
}
def push_recipe(self, recipe):
data = {
"title": recipe.name,
"description": recipe.description,
"ingredients": [],
"steps": [],
"durations": {
"totalTime": (recipe.working_time + recipe.waiting_time) * 60,
"cookingTime": 0, # recipe.waiting_time * 60, #TODO cooking time must be sum of step duration attribute, otherwise creation fails
"preparationTime": recipe.working_time * 60,
},
"keywords": [],
"servings": {
"amount": int(recipe.servings),
"unit": 'portion', # required
},
"servingTipsStep": {
"textInstructions": "Serve"
},
"complexityLevel": "medium",
"image": {
"url": "https://media3.bsh-group.com/Recipes/800x480/17062805_210217_My-own-recipe_Picture_188ppi.jpg",
"mimeType": "image/jpeg"
# TODO add image upload
},
"accessories": [], # TODO add once tandoor supports tools
"legalDisclaimerApproved": True # TODO force user to approve disclaimer
}
for step in recipe.steps.all():
data['steps'].append({
"textInstructions": step.instruction
})
for i in step.ingredients.all():
data['ingredients'].append({
"amount": int(i.amount),
"unit": i.unit.name,
"name": i.food.name,
})
# TODO create synced recipe
response = requests.post(f'{self.RECIPE_API_URL}', json=data, headers=self._get_default_headers())
return response

View File

@@ -1,6 +1,5 @@
from datetime import datetime
from allauth.account.forms import ResetPasswordForm, SignupForm
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
@@ -10,19 +9,18 @@ from django_scopes import scopes_disabled
from django_scopes.forms import SafeModelChoiceField, SafeModelMultipleChoiceField
from hcaptcha.fields import hCaptchaField
from .models import Comment, InviteLink, Keyword, Recipe, SearchPreference, Space, Storage, Sync, User, UserPreference, ConnectorConfig
from .models import (Comment, Food, InviteLink, Keyword, MealPlan, MealType, Recipe, RecipeBook,
RecipeBookEntry, SearchPreference, Space, Storage, Sync, User, UserPreference)
class SelectWidget(widgets.Select):
class Media:
js = ('custom/js/form_select.js', )
js = ('custom/js/form_select.js',)
class MultiSelectWidget(widgets.SelectMultiple):
class Media:
js = ('custom/js/form_multiselect.js', )
js = ('custom/js/form_multiselect.js',)
# Yes there are some stupid browsers that still dont support this but
@@ -35,6 +33,64 @@ class DateWidget(forms.DateInput):
super().__init__(**kwargs)
class UserPreferenceForm(forms.ModelForm):
prefix = 'preference'
def __init__(self, *args, **kwargs):
space = kwargs.pop('space')
super().__init__(*args, **kwargs)
self.fields['plan_share'].queryset = User.objects.filter(userspace__space=space).all()
class Meta:
model = UserPreference
fields = (
'default_unit', 'use_fractions', 'use_kj', 'theme', 'nav_color',
'sticky_navbar', 'default_page', 'plan_share', 'ingredient_decimals', 'comments', 'left_handed',
)
labels = {
'default_unit': _('Default unit'),
'use_fractions': _('Use fractions'),
'use_kj': _('Use KJ'),
'theme': _('Theme'),
'nav_color': _('Navbar color'),
'sticky_navbar': _('Sticky navbar'),
'default_page': _('Default page'),
'plan_share': _('Plan sharing'),
'ingredient_decimals': _('Ingredient decimal places'),
'shopping_auto_sync': _('Shopping list auto sync period'),
'comments': _('Comments'),
'left_handed': _('Left-handed mode')
}
help_texts = {
'nav_color': _('Color of the top navigation bar. Not all colors work with all themes, just try them out!'),
'default_unit': _('Default Unit to be used when inserting a new ingredient into a recipe.'),
'use_fractions': _(
'Enables support for fractions in ingredient amounts (e.g. convert decimals to fractions automatically)'),
'use_kj': _('Display nutritional energy amounts in joules instead of calories'),
'plan_share': _('Users with whom newly created meal plans should be shared by default.'),
'shopping_share': _('Users with whom to share shopping lists.'),
'ingredient_decimals': _('Number of decimals to round ingredients.'),
'comments': _('If you want to be able to create and see comments underneath recipes.'),
'shopping_auto_sync': _(
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
'of mobile data. If lower than instance limit it is reset when saving.'
),
'sticky_navbar': _('Makes the navbar stick to the top of the page.'),
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
'mealplan_autoexclude_onhand': _('Exclude ingredients that are on hand.'),
'left_handed': _('Will optimize the UI for use with your left hand.')
}
widgets = {
'plan_share': MultiSelectWidget,
'shopping_share': MultiSelectWidget,
}
class UserNameForm(forms.ModelForm):
prefix = 'name'
@@ -42,7 +98,9 @@ class UserNameForm(forms.ModelForm):
model = User
fields = ('first_name', 'last_name')
help_texts = {'first_name': _('Both fields are optional. If none are given the username will be displayed instead')}
help_texts = {
'first_name': _('Both fields are optional. If none are given the username will be displayed instead')
}
class ExternalRecipeForm(forms.ModelForm):
@@ -56,14 +114,23 @@ class ExternalRecipeForm(forms.ModelForm):
class Meta:
model = Recipe
fields = ('name', 'description', 'servings', 'working_time', 'waiting_time', 'file_path', 'file_uid', 'keywords')
fields = (
'name', 'description', 'servings', 'working_time', 'waiting_time',
'file_path', 'file_uid', 'keywords'
)
labels = {
'name': _('Name'), 'keywords': _('Keywords'), 'working_time': _('Preparation time in minutes'), 'waiting_time': _('Waiting time (cooking/baking) in minutes'),
'file_path': _('Path'), 'file_uid': _('Storage UID'),
'name': _('Name'),
'keywords': _('Keywords'),
'working_time': _('Preparation time in minutes'),
'waiting_time': _('Waiting time (cooking/baking) in minutes'),
'file_path': _('Path'),
'file_uid': _('Storage UID'),
}
widgets = {'keywords': MultiSelectWidget}
field_classes = {'keywords': SafeModelMultipleChoiceField, }
field_classes = {
'keywords': SafeModelMultipleChoiceField,
}
class ImportExportBase(forms.Form):
@@ -87,39 +154,23 @@ class ImportExportBase(forms.Form):
COOKBOOKAPP = 'COOKBOOKAPP'
COPYMETHAT = 'COPYMETHAT'
COOKMATE = 'COOKMATE'
REZEPTSUITEDE = 'REZEPTSUITEDE'
PDF = 'PDF'
type = forms.ChoiceField(choices=((DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'), (MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'),
(SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'), (PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'),
(DOMESTICA, 'Domestica'), (MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
(COOKMATE, 'Cookmate'), (REZEPTSUITEDE, 'Recipesuite.de')))
class MultipleFileInput(forms.ClearableFileInput):
allow_multiple_selected = True
class MultipleFileField(forms.FileField):
def __init__(self, *args, **kwargs):
kwargs.setdefault("widget", MultipleFileInput())
super().__init__(*args, **kwargs)
def clean(self, data, initial=None):
single_file_clean = super().clean
if isinstance(data, (list, tuple)):
result = [single_file_clean(d, initial) for d in data]
else:
result = single_file_clean(data, initial)
return result
type = forms.ChoiceField(choices=(
(DEFAULT, _('Default')), (PAPRIKA, 'Paprika'), (NEXTCLOUD, 'Nextcloud Cookbook'),
(MEALIE, 'Mealie'), (CHOWDOWN, 'Chowdown'), (SAFFRON, 'Saffron'), (CHEFTAP, 'ChefTap'),
(PEPPERPLATE, 'Pepperplate'), (RECETTETEK, 'RecetteTek'), (RECIPESAGE, 'Recipe Sage'), (DOMESTICA, 'Domestica'),
(MEALMASTER, 'MealMaster'), (REZKONV, 'RezKonv'), (OPENEATS, 'Openeats'), (RECIPEKEEPER, 'Recipe Keeper'),
(PLANTOEAT, 'Plantoeat'), (COOKBOOKAPP, 'CookBookApp'), (COPYMETHAT, 'CopyMeThat'), (PDF, 'PDF'), (MELARECIPES, 'Melarecipes'),
(COOKMATE, 'Cookmate')
))
class ImportForm(ImportExportBase):
files = MultipleFileField(required=True)
duplicates = forms.BooleanField(help_text=_('To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
required=False)
files = forms.FileField(required=True, widget=forms.ClearableFileInput(attrs={'multiple': True}))
duplicates = forms.BooleanField(help_text=_(
'To prevent duplicates recipes with the same name as existing ones are ignored. Check this box to import everything.'),
required=False)
class ExportForm(ImportExportBase):
@@ -138,71 +189,59 @@ class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('text', )
fields = ('text',)
labels = {'text': _('Add your comment: '), }
widgets = {'text': forms.Textarea(attrs={'rows': 2, 'cols': 15}), }
labels = {
'text': _('Add your comment: '),
}
widgets = {
'text': forms.Textarea(attrs={'rows': 2, 'cols': 15}),
}
class StorageForm(forms.ModelForm):
username = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'new-password'}), required=False)
password = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'new-password', 'type': 'password'}),
required=False,
help_text=_('Leave empty for dropbox and enter app password for nextcloud.'))
token = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'new-password', 'type': 'password'}),
required=False,
help_text=_('Leave empty for nextcloud and enter api token for dropbox.'))
username = forms.CharField(
widget=forms.TextInput(attrs={'autocomplete': 'new-password'}),
required=False
)
password = forms.CharField(
widget=forms.TextInput(attrs={'autocomplete': 'new-password', 'type': 'password'}),
required=False,
help_text=_('Leave empty for dropbox and enter app password for nextcloud.')
)
token = forms.CharField(
widget=forms.TextInput(
attrs={'autocomplete': 'new-password', 'type': 'password'}
),
required=False,
help_text=_('Leave empty for nextcloud and enter api token for dropbox.')
)
class Meta:
model = Storage
fields = ('name', 'method', 'username', 'password', 'token', 'url', 'path')
help_texts = {'url': _('Leave empty for dropbox and enter only base url for nextcloud (<code>/remote.php/webdav/</code> is added automatically)'), }
help_texts = {
'url': _(
'Leave empty for dropbox and enter only base url for nextcloud (<code>/remote.php/webdav/</code> is added automatically)'),
}
# TODO: Deprecate
class RecipeBookEntryForm(forms.ModelForm):
prefix = 'bookmark'
class ConnectorConfigForm(forms.ModelForm):
enabled = forms.BooleanField(
help_text="Is the connector enabled",
required=False,
)
on_shopping_list_entry_created_enabled = forms.BooleanField(
help_text="Enable action for ShoppingListEntry created events",
required=False,
)
on_shopping_list_entry_updated_enabled = forms.BooleanField(
help_text="Enable action for ShoppingListEntry updated events",
required=False,
)
on_shopping_list_entry_deleted_enabled = forms.BooleanField(
help_text="Enable action for ShoppingListEntry deleted events",
required=False,
)
update_token = forms.CharField(
widget=forms.TextInput(attrs={'autocomplete': 'update-token', 'type': 'password'}),
required=False,
help_text=_('<a href="https://www.home-assistant.io/docs/authentication/#your-account-profile">Long Lived Access Token</a> for your HomeAssistant instance')
)
url = forms.URLField(
required=False,
help_text=_('Something like http://homeassistant.local:8123/api'),
)
def __init__(self, *args, **kwargs):
space = kwargs.pop('space')
super().__init__(*args, **kwargs)
self.fields['book'].queryset = RecipeBook.objects.filter(space=space).all()
class Meta:
model = ConnectorConfig
model = RecipeBookEntry
fields = ('book',)
fields = (
'name', 'type', 'enabled', 'on_shopping_list_entry_created_enabled', 'on_shopping_list_entry_updated_enabled',
'on_shopping_list_entry_deleted_enabled', 'url', 'todo_entity',
)
help_texts = {
'url': _('http://homeassistant.local:8123/api for example'),
field_classes = {
'book': SafeModelChoiceField,
}
@@ -217,14 +256,25 @@ class SyncForm(forms.ModelForm):
model = Sync
fields = ('storage', 'path', 'active')
field_classes = {'storage': SafeModelChoiceField, }
field_classes = {
'storage': SafeModelChoiceField,
}
labels = {'storage': _('Storage'), 'path': _('Path'), 'active': _('Active')}
labels = {
'storage': _('Storage'),
'path': _('Path'),
'active': _('Active')
}
# TODO deprecate
class BatchEditForm(forms.Form):
search = forms.CharField(label=_('Search String'))
keywords = forms.ModelMultipleChoiceField(queryset=Keyword.objects.none(), required=False, widget=MultiSelectWidget)
keywords = forms.ModelMultipleChoiceField(
queryset=Keyword.objects.none(),
required=False,
widget=MultiSelectWidget
)
def __init__(self, *args, **kwargs):
space = kwargs.pop('space')
@@ -233,7 +283,6 @@ class BatchEditForm(forms.Form):
class ImportRecipeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
space = kwargs.pop('space')
super().__init__(*args, **kwargs)
@@ -243,13 +292,63 @@ class ImportRecipeForm(forms.ModelForm):
model = Recipe
fields = ('name', 'keywords', 'file_path', 'file_uid')
labels = {'name': _('Name'), 'keywords': _('Keywords'), 'file_path': _('Path'), 'file_uid': _('File ID'), }
labels = {
'name': _('Name'),
'keywords': _('Keywords'),
'file_path': _('Path'),
'file_uid': _('File ID'),
}
widgets = {'keywords': MultiSelectWidget}
field_classes = {'keywords': SafeModelChoiceField, }
field_classes = {
'keywords': SafeModelChoiceField,
}
# TODO deprecate
class MealPlanForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
space = kwargs.pop('space')
super().__init__(*args, **kwargs)
self.fields['recipe'].queryset = Recipe.objects.filter(space=space).all()
self.fields['meal_type'].queryset = MealType.objects.filter(space=space).all()
self.fields['shared'].queryset = User.objects.filter(userpreference__space=space).all()
def clean(self):
cleaned_data = super(MealPlanForm, self).clean()
if cleaned_data['title'] == '' and cleaned_data['recipe'] is None:
raise forms.ValidationError(
_('You must provide at least a recipe or a title.')
)
return cleaned_data
class Meta:
model = MealPlan
fields = (
'recipe', 'title', 'meal_type', 'note',
'servings', 'date', 'shared'
)
help_texts = {
'shared': _('You can list default users to share recipes with in the settings.'),
'note': _('You can use markdown to format this field. See the <a href="/docs/markdown/">docs here</a>')
}
widgets = {
'recipe': SelectWidget,
'date': DateWidget,
'shared': MultiSelectWidget
}
field_classes = {
'recipe': SafeModelChoiceField,
'meal_type': SafeModelChoiceField,
'shared': SafeModelMultipleChoiceField,
}
class InviteLinkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
user = kwargs.pop('user')
super().__init__(*args, **kwargs)
@@ -257,8 +356,8 @@ class InviteLinkForm(forms.ModelForm):
def clean(self):
space = self.cleaned_data['space']
if space.max_users != 0 and (UserPreference.objects.filter(space=space).count()
+ InviteLink.objects.filter(valid_until__gte=datetime.today(), used_by=None, space=space).count()) >= space.max_users:
if space.max_users != 0 and (UserPreference.objects.filter(space=space).count() +
InviteLink.objects.filter(valid_until__gte=datetime.today(), used_by=None, space=space).count()) >= space.max_users:
raise ValidationError(_('Maximum number of users for this space reached.'))
def clean_email(self):
@@ -272,8 +371,12 @@ class InviteLinkForm(forms.ModelForm):
class Meta:
model = InviteLink
fields = ('email', 'group', 'valid_until', 'space')
help_texts = {'email': _('An email address is not required but if present the invite link will be sent to the user.'), }
field_classes = {'space': SafeModelChoiceField, }
help_texts = {
'email': _('An email address is not required but if present the invite link will be sent to the user.'),
}
field_classes = {
'space': SafeModelChoiceField,
}
class SpaceCreateForm(forms.Form):
@@ -293,12 +396,12 @@ class SpaceJoinForm(forms.Form):
token = forms.CharField()
class AllAuthSignupForm(SignupForm):
class AllAuthSignupForm(forms.Form):
captcha = hCaptchaField()
terms = forms.BooleanField(label=_('Accept Terms and Privacy'))
def __init__(self, **kwargs):
super().__init__(**kwargs)
super(AllAuthSignupForm, self).__init__(**kwargs)
if settings.PRIVACY_URL == '' and settings.TERMS_URL == '':
self.fields.pop('terms')
if settings.HCAPTCHA_SECRET == '':
@@ -308,50 +411,134 @@ class AllAuthSignupForm(SignupForm):
pass
class CustomPasswordResetForm(ResetPasswordForm):
captcha = hCaptchaField()
def __init__(self, **kwargs):
super(CustomPasswordResetForm, self).__init__(**kwargs)
if settings.HCAPTCHA_SECRET == '':
self.fields.pop('captcha')
class UserCreateForm(forms.Form):
name = forms.CharField(label='Username')
password = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'new-password', 'type': 'password'}))
password_confirm = forms.CharField(widget=forms.TextInput(attrs={'autocomplete': 'new-password', 'type': 'password'}))
password = forms.CharField(
widget=forms.TextInput(
attrs={'autocomplete': 'new-password', 'type': 'password'}
)
)
password_confirm = forms.CharField(
widget=forms.TextInput(
attrs={'autocomplete': 'new-password', 'type': 'password'}
)
)
class SearchPreferenceForm(forms.ModelForm):
prefix = 'search'
trigram_threshold = forms.DecimalField(min_value=0.01,
max_value=1,
decimal_places=2,
trigram_threshold = forms.DecimalField(min_value=0.01, max_value=1, decimal_places=2,
widget=NumberInput(attrs={'class': "form-control-range", 'type': 'range'}),
help_text=_('Determines how fuzzy a search is if it uses trigram similarity matching (e.g. low values mean more typos are ignored).'))
help_text=_(
'Determines how fuzzy a search is if it uses trigram similarity matching (e.g. low values mean more typos are ignored).'))
preset = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = SearchPreference
fields = ('search', 'lookup', 'unaccent', 'icontains', 'istartswith', 'trigram', 'fulltext', 'trigram_threshold')
fields = (
'search', 'lookup', 'unaccent', 'icontains', 'istartswith', 'trigram', 'fulltext', 'trigram_threshold')
help_texts = {
'search': _('Select type method of search. Click <a href="/docs/search/">here</a> for full description of choices.'), 'lookup':
_('Use fuzzy matching on units, keywords and ingredients when editing and importing recipes.'), 'unaccent':
_('Fields to search ignoring accents. Selecting this option can improve or degrade search quality depending on language'), 'icontains':
_("Fields to search for partial matches. (e.g. searching for 'Pie' will return 'pie' and 'piece' and 'soapie')"), 'istartswith':
_("Fields to search for beginning of word matches. (e.g. searching for 'sa' will return 'salad' and 'sandwich')"), 'trigram':
_("Fields to 'fuzzy' search. (e.g. searching for 'recpie' will find 'recipe'.) Note: this option will conflict with 'web' and 'raw' methods of search."), 'fulltext':
_("Fields to full text search. Note: 'web', 'phrase', and 'raw' search methods only function with fulltext fields."),
'search': _(
'Select type method of search. Click <a href="/docs/search/">here</a> for full description of choices.'),
'lookup': _('Use fuzzy matching on units, keywords and ingredients when editing and importing recipes.'),
'unaccent': _(
'Fields to search ignoring accents. Selecting this option can improve or degrade search quality depending on language'),
'icontains': _(
"Fields to search for partial matches. (e.g. searching for 'Pie' will return 'pie' and 'piece' and 'soapie')"),
'istartswith': _(
"Fields to search for beginning of word matches. (e.g. searching for 'sa' will return 'salad' and 'sandwich')"),
'trigram': _(
"Fields to 'fuzzy' search. (e.g. searching for 'recpie' will find 'recipe'.) Note: this option will conflict with 'web' and 'raw' methods of search."),
'fulltext': _(
"Fields to full text search. Note: 'web', 'phrase', and 'raw' search methods only function with fulltext fields."),
}
labels = {
'search': _('Search Method'), 'lookup': _('Fuzzy Lookups'), 'unaccent': _('Ignore Accent'), 'icontains': _("Partial Match"), 'istartswith': _("Starts With"),
'trigram': _("Fuzzy Search"), 'fulltext': _("Full Text")
'search': _('Search Method'),
'lookup': _('Fuzzy Lookups'),
'unaccent': _('Ignore Accent'),
'icontains': _("Partial Match"),
'istartswith': _("Starts With"),
'trigram': _("Fuzzy Search"),
'fulltext': _("Full Text")
}
widgets = {
'search': SelectWidget, 'unaccent': MultiSelectWidget, 'icontains': MultiSelectWidget, 'istartswith': MultiSelectWidget, 'trigram': MultiSelectWidget, 'fulltext':
MultiSelectWidget,
'search': SelectWidget,
'unaccent': MultiSelectWidget,
'icontains': MultiSelectWidget,
'istartswith': MultiSelectWidget,
'trigram': MultiSelectWidget,
'fulltext': MultiSelectWidget,
}
class ShoppingPreferenceForm(forms.ModelForm):
prefix = 'shopping'
class Meta:
model = UserPreference
fields = (
'shopping_share', 'shopping_auto_sync', 'mealplan_autoadd_shopping', 'mealplan_autoexclude_onhand',
'mealplan_autoinclude_related', 'shopping_add_onhand', 'default_delay', 'filter_to_supermarket', 'shopping_recent_days', 'csv_delim', 'csv_prefix'
)
help_texts = {
'shopping_share': _('Users will see all items you add to your shopping list. They must add you to see items on their list.'),
'shopping_auto_sync': _(
'Setting to 0 will disable auto sync. When viewing a shopping list the list is updated every set seconds to sync changes someone else might have made. Useful when shopping with multiple people but might use a little bit '
'of mobile data. If lower than instance limit it is reset when saving.'
),
'mealplan_autoadd_shopping': _('Automatically add meal plan ingredients to shopping list.'),
'mealplan_autoinclude_related': _('When adding a meal plan to the shopping list (manually or automatically), include all related recipes.'),
'mealplan_autoexclude_onhand': _('When adding a meal plan to the shopping list (manually or automatically), exclude ingredients that are on hand.'),
'default_delay': _('Default number of hours to delay a shopping list entry.'),
'filter_to_supermarket': _('Filter shopping list to only include supermarket categories.'),
'shopping_recent_days': _('Days of recent shopping list entries to display.'),
'shopping_add_onhand': _("Mark food 'On Hand' when checked off shopping list."),
'csv_delim': _('Delimiter to use for CSV exports.'),
'csv_prefix': _('Prefix to add when copying list to the clipboard.'),
}
labels = {
'shopping_share': _('Share Shopping List'),
'shopping_auto_sync': _('Autosync'),
'mealplan_autoadd_shopping': _('Auto Add Meal Plan'),
'mealplan_autoexclude_onhand': _('Exclude On Hand'),
'mealplan_autoinclude_related': _('Include Related'),
'default_delay': _('Default Delay Hours'),
'filter_to_supermarket': _('Filter to Supermarket'),
'shopping_recent_days': _('Recent Days'),
'csv_delim': _('CSV Delimiter'),
"csv_prefix_label": _("List Prefix"),
'shopping_add_onhand': _("Auto On Hand"),
}
widgets = {
'shopping_share': MultiSelectWidget
}
class SpacePreferenceForm(forms.ModelForm):
prefix = 'space'
reset_food_inherit = forms.BooleanField(label=_("Reset Food Inheritance"), initial=False, required=False,
help_text=_("Reset all food to inherit the fields configured."))
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) # populates the post
self.fields['food_inherit'].queryset = Food.inheritable_fields
class Meta:
model = Space
fields = ('food_inherit', 'reset_food_inherit', 'show_facet_count')
help_texts = {
'food_inherit': _('Fields on food that should be inherited by default.'),
'show_facet_count': _('Show recipe counts on search filters'), }
widgets = {
'food_inherit': MultiSelectWidget
}

View File

@@ -1,10 +1,11 @@
import datetime
from gettext import gettext as _
from django.conf import settings
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from django.contrib import messages
from django.core.cache import caches
from gettext import gettext as _
from cookbook.models import InviteLink
@@ -16,13 +17,10 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
Whether to allow sign-ups.
"""
signup_token = False
if 'signup_token' in request.session and InviteLink.objects.filter(
valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
if 'signup_token' in request.session and InviteLink.objects.filter(valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
signup_token = True
if request.resolver_match.view_name == 'account_signup' and not settings.ENABLE_SIGNUP and not signup_token:
return False
elif request.resolver_match.view_name == 'socialaccount_signup' and len(settings.SOCIAL_PROVIDERS) < 1:
if (request.resolver_match.view_name == 'account_signup' or request.resolver_match.view_name == 'socialaccount_signup') and not settings.ENABLE_SIGNUP and not signup_token:
return False
else:
return super(AllAuthCustomAdapter, self).is_open_for_signup(request)
@@ -35,7 +33,7 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
if c == default:
try:
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
except Exception: # dont fail signup just because confirmation mail could not be send
except Exception: # dont fail signup just because confirmation mail could not be send
pass
else:
messages.add_message(self.request, messages.ERROR, _('In order to prevent spam, the requested email was not send. Please wait a few minutes and try again.'))

View File

@@ -0,0 +1,8 @@
from django.test.runner import DiscoverRunner
from django_scopes import scopes_disabled
class CustomTestRunner(DiscoverRunner):
def run_tests(self, *args, **kwargs):
with scopes_disabled():
return super().run_tests(*args, **kwargs)

View File

@@ -1,12 +1,4 @@
import socket
from urllib.parse import urlparse
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.db.models import Func
from ipaddress import ip_address
from recipes import settings
class Round(Func):
@@ -15,34 +7,7 @@ class Round(Func):
def str2bool(v):
if isinstance(v, bool) or v is None:
if type(v) == bool or v is None:
return v
else:
return v.lower() in ("yes", "true", "1")
"""
validates an url that is supposed to be imported
checks that the protocol used is http(s) and that no local address is accessed
@:param url to test
@:return true if url is valid, false otherwise
"""
def validate_import_url(url):
try:
validator = URLValidator(schemes=['http', 'https'])
validator(url)
except ValidationError:
# if schema is not http or https, consider url invalid
return False
# resolve IP address of url
try:
url_ip_address = ip_address(str(socket.gethostbyname(urlparse(url).hostname)))
except (ValueError, AttributeError, TypeError, Exception) as e:
# if ip cannot be parsed, consider url invalid
return False
# validate that IP is neither private nor any other special address
return not any([url_ip_address.is_private, url_ip_address.is_reserved, url_ip_address.is_loopback, url_ip_address.is_multicast, url_ip_address.is_link_local, ])

View File

@@ -1,4 +1,6 @@
import cookbook.helper.dal
from cookbook.helper.AllAuthCustomAdapter import AllAuthCustomAdapter
__all__ = [
'dal',
]

View File

@@ -1,227 +0,0 @@
import re
from django.core.cache import caches
from django.db.models.functions import Lower
from cookbook.models import Automation
class AutomationEngine:
request = None
source = None
use_cache = None
food_aliases = None
keyword_aliases = None
unit_aliases = None
never_unit = None
transpose_words = None
regex_replace = {
Automation.DESCRIPTION_REPLACE: None,
Automation.INSTRUCTION_REPLACE: None,
Automation.FOOD_REPLACE: None,
Automation.UNIT_REPLACE: None,
Automation.NAME_REPLACE: None,
}
def __init__(self, request, use_cache=True, source=None):
self.request = request
self.use_cache = use_cache
if not source:
self.source = "default_string_to_avoid_false_regex_match"
else:
self.source = source
def apply_keyword_automation(self, keyword):
keyword = keyword.strip()
if self.use_cache and self.keyword_aliases is None:
self.keyword_aliases = {}
KEYWORD_CACHE_KEY = f'automation_keyword_alias_{self.request.space.pk}'
if c := caches['default'].get(KEYWORD_CACHE_KEY, None):
self.keyword_aliases = c
caches['default'].touch(KEYWORD_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.KEYWORD_ALIAS).only('param_1', 'param_2').order_by('order').all():
self.keyword_aliases[a.param_1.lower()] = a.param_2
caches['default'].set(KEYWORD_CACHE_KEY, self.keyword_aliases, 30)
else:
self.keyword_aliases = {}
if self.keyword_aliases:
try:
keyword = self.keyword_aliases[keyword.lower()]
except KeyError:
pass
else:
if automation := Automation.objects.filter(space=self.request.space, type=Automation.KEYWORD_ALIAS, param_1__iexact=keyword, disabled=False).order_by('order').first():
return automation.param_2
return keyword
def apply_unit_automation(self, unit):
unit = unit.strip()
if self.use_cache and self.unit_aliases is None:
self.unit_aliases = {}
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
if c := caches['default'].get(UNIT_CACHE_KEY, None):
self.unit_aliases = c
caches['default'].touch(UNIT_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').order_by('order').all():
self.unit_aliases[a.param_1.lower()] = a.param_2
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
else:
self.unit_aliases = {}
if self.unit_aliases:
try:
unit = self.unit_aliases[unit.lower()]
except KeyError:
pass
else:
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1__iexact=unit, disabled=False).order_by('order').first():
return automation.param_2
return self.apply_regex_replace_automation(unit, Automation.UNIT_REPLACE)
def apply_food_automation(self, food):
food = food.strip()
if self.use_cache and self.food_aliases is None:
self.food_aliases = {}
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
if c := caches['default'].get(FOOD_CACHE_KEY, None):
self.food_aliases = c
caches['default'].touch(FOOD_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').order_by('order').all():
self.food_aliases[a.param_1.lower()] = a.param_2
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
else:
self.food_aliases = {}
if self.food_aliases:
try:
return self.food_aliases[food.lower()]
except KeyError:
return self.apply_regex_replace_automation(food, Automation.FOOD_REPLACE)
else:
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1__iexact=food, disabled=False).order_by('order').first():
return automation.param_2
return self.apply_regex_replace_automation(food, Automation.FOOD_REPLACE)
def apply_never_unit_automation(self, tokens):
"""
Moves a string that should never be treated as a unit to next token and optionally replaced with default unit
e.g. NEVER_UNIT: param1: egg, param2: None would modify ['1', 'egg', 'white'] to ['1', '', 'egg', 'white']
or NEVER_UNIT: param1: egg, param2: pcs would modify ['1', 'egg', 'yolk'] to ['1', 'pcs', 'egg', 'yolk']
:param1 string: string that should never be considered a unit, will be moved to token[2]
:param2 (optional) unit as string: will insert unit string into token[1]
:return: unit as string (possibly changed by automation)
"""
if self.use_cache and self.never_unit is None:
self.never_unit = {}
NEVER_UNIT_CACHE_KEY = f'automation_never_unit_{self.request.space.pk}'
if c := caches['default'].get(NEVER_UNIT_CACHE_KEY, None):
self.never_unit = c
caches['default'].touch(NEVER_UNIT_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.NEVER_UNIT).only('param_1', 'param_2').order_by('order').all():
self.never_unit[a.param_1.lower()] = a.param_2
caches['default'].set(NEVER_UNIT_CACHE_KEY, self.never_unit, 30)
else:
self.never_unit = {}
new_unit = None
alt_unit = self.apply_unit_automation(tokens[1])
never_unit = False
if self.never_unit:
try:
new_unit = self.never_unit[tokens[1].lower()]
never_unit = True
except KeyError:
return tokens
else:
if a := Automation.objects.annotate(param_1_lower=Lower('param_1')).filter(space=self.request.space, type=Automation.NEVER_UNIT, param_1_lower__in=[
tokens[1].lower(), alt_unit.lower()], disabled=False).order_by('order').first():
new_unit = a.param_2
never_unit = True
if never_unit:
tokens.insert(1, new_unit)
return tokens
def apply_transpose_automation(self, string):
"""
If two words (param_1 & param_2) are detected in sequence, swap their position in the ingredient string
:param 1: first word to detect
:param 2: second word to detect
return: new ingredient string
"""
if self.use_cache and self.transpose_words is None:
self.transpose_words = {}
TRANSPOSE_WORDS_CACHE_KEY = f'automation_transpose_words_{self.request.space.pk}'
if c := caches['default'].get(TRANSPOSE_WORDS_CACHE_KEY, None):
self.transpose_words = c
caches['default'].touch(TRANSPOSE_WORDS_CACHE_KEY, 30)
else:
i = 0
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.TRANSPOSE_WORDS).only(
'param_1', 'param_2').order_by('order').all()[:512]:
self.transpose_words[i] = [a.param_1.lower(), a.param_2.lower()]
i += 1
caches['default'].set(TRANSPOSE_WORDS_CACHE_KEY, self.transpose_words, 30)
else:
self.transpose_words = {}
tokens = [x.lower() for x in string.replace(',', ' ').split()]
if self.transpose_words:
for key, value in self.transpose_words.items():
if value[0] in tokens and value[1] in tokens:
string = re.sub(rf"\b({value[0]})\W*({value[1]})\b", r"\2 \1", string, flags=re.IGNORECASE)
else:
for rule in Automation.objects.filter(space=self.request.space, type=Automation.TRANSPOSE_WORDS, disabled=False) \
.annotate(param_1_lower=Lower('param_1'), param_2_lower=Lower('param_2')) \
.filter(param_1_lower__in=tokens, param_2_lower__in=tokens).order_by('order')[:512]:
if rule.param_1 in tokens and rule.param_2 in tokens:
string = re.sub(rf"\b({rule.param_1})\W*({rule.param_2})\b", r"\2 \1", string, flags=re.IGNORECASE)
return string
def apply_regex_replace_automation(self, string, automation_type):
# TODO add warning - maybe on SPACE page? when a max of 512 automations of a specific type is exceeded (ALIAS types excluded?)
"""
Replaces strings in a recipe field that are from a matched source
field_type are Automation.type that apply regex replacements
Automation.DESCRIPTION_REPLACE
Automation.INSTRUCTION_REPLACE
Automation.FOOD_REPLACE
Automation.UNIT_REPLACE
Automation.NAME_REPLACE
regex replacment utilized the following fields from the Automation model
:param 1: source that should apply the automation in regex format ('.*' for all)
:param 2: regex pattern to match ()
:param 3: replacement string (leave blank to delete)
return: new string
"""
if self.use_cache and self.regex_replace[automation_type] is None:
self.regex_replace[automation_type] = {}
REGEX_REPLACE_CACHE_KEY = f'automation_regex_replace_{self.request.space.pk}'
if c := caches['default'].get(REGEX_REPLACE_CACHE_KEY, None):
self.regex_replace[automation_type] = c[automation_type]
caches['default'].touch(REGEX_REPLACE_CACHE_KEY, 30)
else:
i = 0
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
self.regex_replace[automation_type][i] = [a.param_1, a.param_2, a.param_3]
i += 1
caches['default'].set(REGEX_REPLACE_CACHE_KEY, self.regex_replace, 30)
else:
self.regex_replace[automation_type] = {}
if self.regex_replace[automation_type]:
for rule in self.regex_replace[automation_type].values():
if re.match(rule[0], (self.source)[:512]):
string = re.sub(rule[1], rule[2], string, flags=re.IGNORECASE)
else:
for rule in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
if re.match(rule.param_1, (self.source)[:512]):
string = re.sub(rule.param_2, rule.param_3, string, flags=re.IGNORECASE)
return string

View File

@@ -1,11 +0,0 @@
class CacheHelper:
space = None
BASE_UNITS_CACHE_KEY = None
PROPERTY_TYPE_CACHE_KEY = None
def __init__(self, space):
self.space = space
self.BASE_UNITS_CACHE_KEY = f'SPACE_{space.id}_BASE_UNITS'
self.PROPERTY_TYPE_CACHE_KEY = f'SPACE_{space.id}_PROPERTY_TYPES'

View File

@@ -11,5 +11,4 @@ def context_settings(request):
'PRIVACY_URL': settings.PRIVACY_URL,
'IMPRINT_URL': settings.IMPRINT_URL,
'SHOPPING_MIN_AUTOSYNC_INTERVAL': settings.SHOPPING_MIN_AUTOSYNC_INTERVAL,
'DISABLE_EXTERNAL_CONNECTORS': settings.DISABLE_EXTERNAL_CONNECTORS,
}

34
cookbook/helper/dal.py Normal file
View File

@@ -0,0 +1,34 @@
from cookbook.models import Food, Keyword, Recipe, Unit
from dal import autocomplete
class BaseAutocomplete(autocomplete.Select2QuerySetView):
model = None
def get_queryset(self):
if not self.request.user.is_authenticated:
return self.model.objects.none()
qs = self.model.objects.filter(space=self.request.space).all()
if self.q:
qs = qs.filter(name__icontains=self.q)
return qs
class KeywordAutocomplete(BaseAutocomplete):
model = Keyword
class IngredientsAutocomplete(BaseAutocomplete):
model = Food
class RecipeAutocomplete(BaseAutocomplete):
model = Recipe
class UnitAutocomplete(BaseAutocomplete):
model = Unit

View File

@@ -1,19 +0,0 @@
import json
def get_all_nutrient_types():
f = open('') # <--- download the foundation food or any other dataset and retrieve all nutrition ID's from it https://fdc.nal.usda.gov/download-datasets.html
json_data = json.loads(f.read())
nutrients = {}
for food in json_data['FoundationFoods']:
for entry in food['foodNutrients']:
nutrients[entry['nutrient']['id']] = {'name': entry['nutrient']['name'], 'unit': entry['nutrient']['unitName']}
nutrient_ids = list(nutrients.keys())
nutrient_ids.sort()
for nid in nutrient_ids:
print('{', f'value: {nid}, text: "{nutrients[nid]["name"]} [{nutrients[nid]["unit"]}] ({nid})"', '},')
get_all_nutrient_types()

View File

@@ -1,7 +1,8 @@
import os
from io import BytesIO
import sys
from PIL import Image
from io import BytesIO
def rescale_image_jpeg(image_object, base_width=1020):
@@ -10,7 +11,7 @@ def rescale_image_jpeg(image_object, base_width=1020):
width_percent = (base_width / float(img.size[0]))
height = int((float(img.size[1]) * float(width_percent)))
img = img.resize((base_width, height), Image.LANCZOS)
img = img.resize((base_width, height), Image.ANTIALIAS)
img_bytes = BytesIO()
img.save(img_bytes, 'JPEG', quality=90, optimize=True, icc_profile=icc_profile)
@@ -21,7 +22,7 @@ def rescale_image_png(image_object, base_width=1020):
image_object = Image.open(image_object)
wpercent = (base_width / float(image_object.size[0]))
hsize = int((float(image_object.size[1]) * float(wpercent)))
img = image_object.resize((base_width, hsize), Image.LANCZOS)
img = image_object.resize((base_width, hsize), Image.ANTIALIAS)
im_io = BytesIO()
img.save(im_io, 'PNG', quality=90)
@@ -39,12 +40,7 @@ def get_filetype(name):
# TODO also add env variable to define which images sizes should be compressed
# filetype argument can not be optional, otherwise this function will treat all images as if they were a jpeg
# Because it's no longer optional, no reason to return it
def handle_image(request, image_object, filetype):
try:
Image.open(image_object).verify()
except Exception:
return None
def handle_image(request, image_object, filetype):
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
if filetype == '.jpeg' or filetype == '.jpg':
return rescale_image_jpeg(image_object)

View File

@@ -2,16 +2,18 @@ import re
import string
import unicodedata
from cookbook.helper.automation_helper import AutomationEngine
from cookbook.models import Food, Ingredient, Unit
from django.core.cache import caches
from cookbook.models import Unit, Food, Automation, Ingredient
class IngredientParser:
request = None
ignore_rules = False
automation = None
food_aliases = {}
unit_aliases = {}
def __init__(self, request, cache_mode=True, ignore_automations=False):
def __init__(self, request, cache_mode, ignore_automations=False):
"""
Initialize ingredient parser
:param request: request context (to control caching, rule ownership, etc.)
@@ -20,8 +22,65 @@ class IngredientParser:
"""
self.request = request
self.ignore_rules = ignore_automations
if not self.ignore_rules:
self.automation = AutomationEngine(self.request, use_cache=cache_mode)
if cache_mode:
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
if c := caches['default'].get(FOOD_CACHE_KEY, None):
self.food_aliases = c
caches['default'].touch(FOOD_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').all():
self.food_aliases[a.param_1] = a.param_2
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
if c := caches['default'].get(UNIT_CACHE_KEY, None):
self.unit_aliases = c
caches['default'].touch(UNIT_CACHE_KEY, 30)
else:
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').all():
self.unit_aliases[a.param_1] = a.param_2
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
else:
self.food_aliases = {}
self.unit_aliases = {}
def apply_food_automation(self, food):
"""
Apply food alias automations to passed food
:param food: unit as string
:return: food as string (possibly changed by automation)
"""
if self.ignore_rules:
return food
else:
if self.food_aliases:
try:
return self.food_aliases[food]
except KeyError:
return food
else:
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).first():
return automation.param_2
return food
def apply_unit_automation(self, unit):
"""
Apply unit alias automations to passed unit
:param unit: unit as string
:return: unit as string (possibly changed by automation)
"""
if self.ignore_rules:
return unit
else:
if self.unit_aliases:
try:
return self.unit_aliases[unit]
except KeyError:
return unit
else:
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).first():
return automation.param_2
return unit
def get_unit(self, unit):
"""
@@ -32,10 +91,7 @@ class IngredientParser:
if not unit:
return None
if len(unit) > 0:
if self.ignore_rules:
u, created = Unit.objects.get_or_create(name=unit.strip(), space=self.request.space)
else:
u, created = Unit.objects.get_or_create(name=self.automation.apply_unit_automation(unit), space=self.request.space)
u, created = Unit.objects.get_or_create(name=self.apply_unit_automation(unit), space=self.request.space)
return u
return None
@@ -48,10 +104,7 @@ class IngredientParser:
if not food:
return None
if len(food) > 0:
if self.ignore_rules:
f, created = Food.objects.get_or_create(name=food.strip(), space=self.request.space)
else:
f, created = Food.objects.get_or_create(name=self.automation.apply_food_automation(food), space=self.request.space)
f, created = Food.objects.get_or_create(name=self.apply_food_automation(food), space=self.request.space)
return f
return None
@@ -73,17 +126,15 @@ class IngredientParser:
amount = 0
unit = None
note = ''
if x.strip() == '':
return amount, unit, note
did_check_frac = False
end = 0
while (end < len(x) and (x[end] in string.digits
or (
(x[end] == '.' or x[end] == ',' or x[end] == '/')
and end + 1 < len(x)
and x[end + 1] in string.digits
))):
(x[end] == '.' or x[end] == ',' or x[end] == '/')
and end + 1 < len(x)
and x[end + 1] in string.digits
))):
end += 1
if end > 0:
if "/" in x[:end]:
@@ -107,8 +158,7 @@ class IngredientParser:
if unit is not None and unit.strip() == '':
unit = None
if unit is not None and (unit.startswith('(') or unit.startswith(
'-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
if unit is not None and (unit.startswith('(') or unit.startswith('-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
unit = None
note = x
return amount, unit, note
@@ -169,9 +219,6 @@ class IngredientParser:
if len(ingredient) == 0:
raise ValueError('string to parse cannot be empty')
if len(ingredient) > 512:
raise ValueError('cannot parse ingredients with more than 512 characters')
# some people/languages put amount and unit at the end of the ingredient string
# if something like this is detected move it to the beginning so the parser can handle it
if len(ingredient) < 1000 and re.search(r'^([^\W\d_])+(.)*[1-9](\d)*\s*([^\W\d_])+', ingredient):
@@ -181,24 +228,13 @@ class IngredientParser:
# if the string contains parenthesis early on remove it and place it at the end
# because its likely some kind of note
if re.match('(.){1,6}\\s\\((.[^\\(\\)])+\\)\\s', ingredient):
match = re.search('\\((.[^\\(])+\\)', ingredient)
if re.match('(.){1,6}\s\((.[^\(\)])+\)\s', ingredient):
match = re.search('\((.[^\(])+\)', ingredient)
ingredient = ingredient[:match.start()] + ingredient[match.end():] + ' ' + ingredient[match.start():match.end()]
# leading spaces before commas result in extra tokens, clean them out
ingredient = ingredient.replace(' ,', ',')
# handle "(from) - (to)" amounts by using the minimum amount and adding the range to the description
# "10.5 - 200 g XYZ" => "100 g XYZ (10.5 - 200)"
ingredient = re.sub("^(\\d+|\\d+[\\.,]\\d+) - (\\d+|\\d+[\\.,]\\d+) (.*)", "\\1 \\3 (\\1 - \\2)", ingredient)
# if amount and unit are connected add space in between
if re.match('([0-9])+([A-z])+\\s', ingredient):
ingredient = re.sub(r'(?<=([a-z])|\d)(?=(?(1)\d|[a-z]))', ' ', ingredient)
if not self.ignore_rules:
ingredient = self.automation.apply_transpose_automation(ingredient)
tokens = ingredient.split() # split at each space into tokens
if len(tokens) == 1:
# there only is one argument, that must be the food
@@ -211,8 +247,6 @@ class IngredientParser:
# three arguments if it already has a unit there can't be
# a fraction for the amount
if len(tokens) > 2:
if not self.ignore_rules:
tokens = self.automation.apply_never_unit_automation(tokens)
try:
if unit is not None:
# a unit is already found, no need to try the second argument for a fraction
@@ -259,11 +293,10 @@ class IngredientParser:
if unit_note not in note:
note += ' ' + unit_note
if unit and not self.ignore_rules:
unit = self.automation.apply_unit_automation(unit)
if unit:
unit = self.apply_unit_automation(unit.strip())
if food and not self.ignore_rules:
food = self.automation.apply_food_automation(food)
food = self.apply_food_automation(food.strip())
if len(food) > Food._meta.get_field('name').max_length: # test if food name is to long
# try splitting it at a space and taking only the first arg
if len(food.split()) > 1 and len(food.split()[0]) < Food._meta.get_field('name').max_length:

View File

@@ -35,7 +35,6 @@ Negative examples:
u'<p>del.icio.us</p>'
"""
from xml.etree.ElementTree import Element
import markdown
@@ -65,7 +64,7 @@ class UrlizePattern(markdown.inlinepatterns.Pattern):
else:
url = 'http://' + url
el = Element("a")
el = markdown.util.etree.Element("a")
el.set('href', url)
el.text = markdown.util.AtomicString(text)
return el

View File

@@ -1,496 +0,0 @@
import traceback
from collections import defaultdict
from decimal import Decimal
from cookbook.models import (Food, FoodProperty, Property, PropertyType, Supermarket,
SupermarketCategory, SupermarketCategoryRelation, Unit, UnitConversion)
import re
class OpenDataImportResponse:
total_created = 0
total_updated = 0
total_untouched = 0
total_errored = 0
def to_dict(self):
return {'total_created': self.total_created, 'total_updated': self.total_updated, 'total_untouched': self.total_untouched, 'total_errored': self.total_errored}
class OpenDataImporter:
request = None
data = {}
slug_id_cache = {}
update_existing = False
use_metric = True
def __init__(self, request, data, update_existing=False, use_metric=True):
self.request = request
self.data = data
self.update_existing = update_existing
self.use_metric = use_metric
def _update_slug_cache(self, object_class, datatype):
self.slug_id_cache[datatype] = dict(object_class.objects.filter(space=self.request.space, open_data_slug__isnull=False).values_list('open_data_slug', 'id', ))
@staticmethod
def _is_obj_identical(field_list, obj, existing_obj):
"""
checks if the obj meant for import is identical to an already existing one
:param field_list: list of field names to check
:type field_list: list[str]
:param obj: object meant for import
:type obj: Object
:param existing_obj: object already in DB
:type existing_obj: Object
:return: if objects are identical
:rtype: bool
"""
for field in field_list:
if isinstance(getattr(obj, field), float) or isinstance(getattr(obj, field), Decimal):
if abs(float(getattr(obj, field)) - float(existing_obj[field])) > 0.001: # convert both to float and check if basically equal
print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
elif getattr(obj, field) != existing_obj[field]:
print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
return False
return True
@staticmethod
def _merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
"""
sometimes there might be two objects conflicting for open data import (one has the slug, the other the name)
this function checks if that is the case and merges the two objects if possible
:param model_type: type of model to check/merge
:type model_type: Model
:param obj: object that should be created/updated
:type obj: Model
:param existing_data_slugs: dict of open data slugs mapped to objects
:type existing_data_slugs: dict
:param existing_data_names: dict of names mapped to objects
:type existing_data_names: dict
:return: true if merge was successful or not necessary else false
:rtype: bool
"""
if obj.open_data_slug in existing_data_slugs and obj.name in existing_data_names and existing_data_slugs[obj.open_data_slug]['pk'] != existing_data_names[obj.name]['pk']:
try:
source_obj = model_type.objects.get(pk=existing_data_slugs[obj.open_data_slug]['pk'])
del existing_data_slugs[obj.open_data_slug]
source_obj.merge_into(model_type.objects.get(pk=existing_data_names[obj.name]['pk']))
return True
except RuntimeError:
return False # in the edge case (e.g. parent/child) that an object cannot be merged don't update it for now
else:
return True
@staticmethod
def _get_existing_obj(obj, existing_data_slugs, existing_data_names):
"""
gets the existing object from slug or name cache
:param obj: object that should be found
:type obj: Model
:param existing_data_slugs: dict of open data slugs mapped to objects
:type existing_data_slugs: dict
:param existing_data_names: dict of names mapped to objects
:type existing_data_names: dict
:return: existing object
:rtype: dict
"""
existing_obj = None
if obj.open_data_slug in existing_data_slugs:
existing_obj = existing_data_slugs[obj.open_data_slug]
elif obj.name in existing_data_names:
existing_obj = existing_data_names[obj.name]
return existing_obj
def import_units(self):
od_response = OpenDataImportResponse()
datatype = 'unit'
model_type = Unit
field_list = ['name', 'plural_name', 'base_unit', 'open_data_slug']
existing_data_slugs = {}
existing_data_names = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_names[obj['name']] = obj
update_list = []
create_list = []
for u in list(self.data[datatype].keys()):
obj = model_type(
name=self.data[datatype][u]['name'],
plural_name=self.data[datatype][u]['plural_name'],
base_unit=self.data[datatype][u]['base_unit'].lower() if self.data[datatype][u]['base_unit'] != '' else None,
open_data_slug=u,
space=self.request.space
)
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
od_response.total_errored += 1
continue # if conflicting objects exist and cannot be merged skip object
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
create_list.append(obj)
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
od_response.total_updated += len(update_list)
if len(create_list) > 0:
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
od_response.total_created += len(create_list)
return od_response
def import_category(self):
od_response = OpenDataImportResponse()
datatype = 'category'
model_type = SupermarketCategory
field_list = ['name', 'open_data_slug']
existing_data_slugs = {}
existing_data_names = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_names[obj['name']] = obj
update_list = []
create_list = []
for k in list(self.data[datatype].keys()):
obj = model_type(
name=self.data[datatype][k]['name'],
open_data_slug=k,
space=self.request.space
)
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
od_response.total_errored += 1
continue # if conflicting objects exist and cannot be merged skip object
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
create_list.append(obj)
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
od_response.total_updated += len(update_list)
if len(create_list) > 0:
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
od_response.total_created += len(create_list)
return od_response
def import_property(self):
od_response = OpenDataImportResponse()
datatype = 'property'
model_type = PropertyType
field_list = ['name', 'unit', 'fdc_id', 'open_data_slug']
existing_data_slugs = {}
existing_data_names = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_names[obj['name']] = obj
update_list = []
create_list = []
for k in list(self.data[datatype].keys()):
obj = model_type(
name=self.data[datatype][k]['name'],
unit=self.data[datatype][k]['unit'],
fdc_id=self.data[datatype][k]['fdc_id'],
open_data_slug=k,
space=self.request.space
)
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
od_response.total_errored += 1
continue # if conflicting objects exist and cannot be merged skip object
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
create_list.append(obj)
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
od_response.total_updated += len(update_list)
if len(create_list) > 0:
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
od_response.total_created += len(create_list)
return od_response
def import_supermarket(self):
od_response = OpenDataImportResponse()
datatype = 'store'
model_type = Supermarket
field_list = ['name', 'open_data_slug']
existing_data_slugs = {}
existing_data_names = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_names[obj['name']] = obj
update_list = []
create_list = []
self._update_slug_cache(SupermarketCategory, 'category')
for k in list(self.data[datatype].keys()):
obj = model_type(
name=self.data[datatype][k]['name'],
open_data_slug=k,
space=self.request.space
)
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
od_response.total_errored += 1
continue # if conflicting objects exist and cannot be merged skip object
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
create_list.append(obj)
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
od_response.total_updated += len(update_list)
if len(create_list) > 0:
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
od_response.total_created += len(create_list)
# always add open data slug if matching supermarket is found, otherwise relation might fail
self._update_slug_cache(Supermarket, 'store')
for k in list(self.data[datatype].keys()):
relations = []
order = 0
for c in self.data[datatype][k]['categories']:
relations.append(
SupermarketCategoryRelation(
supermarket_id=self.slug_id_cache[datatype][k],
category_id=self.slug_id_cache['category'][c],
order=order,
)
)
order += 1
SupermarketCategoryRelation.objects.bulk_create(relations, ignore_conflicts=True, unique_fields=('supermarket', 'category',))
return od_response
def import_food(self):
od_response = OpenDataImportResponse()
datatype = 'food'
model_type = Food
field_list = ['name', 'open_data_slug']
existing_data_slugs = {}
existing_data_names = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_names[obj['name']] = obj
update_list = []
create_list = []
self._update_slug_cache(Unit, 'unit')
self._update_slug_cache(PropertyType, 'property')
self._update_slug_cache(SupermarketCategory, 'category')
unit_g = Unit.objects.filter(space=self.request.space, base_unit__iexact='g').first()
for k in list(self.data[datatype].keys()):
obj_dict = {
'name': self.data[datatype][k]['name'],
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
'fdc_id': re.sub(r'\D', '', self.data[datatype][k]['fdc_id']) if self.data[datatype][k]['fdc_id'] != '' else None,
'open_data_slug': k,
'properties_food_unit_id': None,
'space_id': self.request.space.id,
}
if unit_g:
obj_dict['properties_food_unit_id'] = unit_g.id
obj = model_type(**obj_dict)
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
od_response.total_errored += 1
continue # if conflicting objects exist and cannot be merged skip object
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
create_list.append({'data': obj_dict})
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
od_response.total_updated += len(update_list)
if len(create_list) > 0:
Food.load_bulk(create_list, None)
od_response.total_created += len(create_list)
# --------------- PROPERTY STUFF -----------------------
model_type = Property
field_list = ['property_type_id', 'property_amount', 'open_data_food_slug']
existing_data_slugs = {}
existing_data_property_types = {}
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_food_slug']] = obj
existing_data_property_types[obj['property_type_id']] = obj
update_list = []
create_list = []
self._update_slug_cache(Food, 'food')
for k in list(self.data['food'].keys()):
for fp in self.data['food'][k]['properties']['type_values']:
obj = model_type(
property_type_id=self.slug_id_cache['property'][fp['property_type']],
property_amount=fp['property_value'],
open_data_food_slug=k,
space=self.request.space,
)
if obj.open_data_food_slug in existing_data_slugs and obj.property_type_id in existing_data_property_types and existing_data_slugs[obj.open_data_food_slug] == existing_data_property_types[obj.property_type_id]:
existing_obj = existing_data_slugs[obj.open_data_food_slug]
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
create_list.append(obj)
if self.update_existing and len(update_list) > 0:
model_type.objects.bulk_update(update_list, field_list)
if len(create_list) > 0:
model_type.objects.bulk_create(create_list, ignore_conflicts=True, unique_fields=('space', 'open_data_food_slug', 'property_type',))
linked_properties = list(FoodProperty.objects.filter(food__space=self.request.space).values_list('property_id', flat=True).all())
property_food_relation_list = []
for p in model_type.objects.filter(space=self.request.space, open_data_food_slug__isnull=False).values_list('open_data_food_slug', 'id', ):
if p[1] == 147:
pass
# slug_id_cache should always exist, don't create relations for already linked properties (ignore_conflicts would do that as well but this is more performant)
if p[0] in self.slug_id_cache['food'] and p[1] not in linked_properties:
property_food_relation_list.append(Food.properties.through(food_id=self.slug_id_cache['food'][p[0]], property_id=p[1]))
FoodProperty.objects.bulk_create(property_food_relation_list, unique_fields=('food_id', 'property_id',))
return od_response
def import_conversion(self):
od_response = OpenDataImportResponse()
datatype = 'conversion'
model_type = UnitConversion
field_list = ['base_amount', 'base_unit_id', 'converted_amount', 'converted_unit_id', 'food_id', 'open_data_slug']
self._update_slug_cache(Food, 'food')
self._update_slug_cache(Unit, 'unit')
existing_data_slugs = {}
existing_data_foods = defaultdict(list)
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
existing_data_slugs[obj['open_data_slug']] = obj
existing_data_foods[obj['food_id']].append(obj)
update_list = []
create_list = []
for k in list(self.data[datatype].keys()):
# try catch here because sometimes key "k" is not set for the food cache
try:
obj = model_type(
base_amount=Decimal(self.data[datatype][k]['base_amount']),
base_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['base_unit']],
converted_amount=Decimal(self.data[datatype][k]['converted_amount']),
converted_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['converted_unit']],
food_id=self.slug_id_cache['food'][self.data[datatype][k]['food']],
open_data_slug=k,
space=self.request.space,
created_by_id=self.request.user.id,
)
if obj.open_data_slug in existing_data_slugs:
existing_obj = existing_data_slugs[obj.open_data_slug]
if not self._is_obj_identical(field_list, obj, existing_obj):
obj.pk = existing_obj['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
else:
matching_existing_found = False
if obj.food_id in existing_data_foods:
for edf in existing_data_foods[obj.food_id]:
if obj.base_unit_id == edf['base_unit_id'] and obj.converted_unit_id == edf['converted_unit_id']:
matching_existing_found = True
if not self._is_obj_identical(field_list, obj, edf):
obj.pk = edf['pk']
update_list.append(obj)
else:
od_response.total_untouched += 1
if not matching_existing_found:
create_list.append(obj)
except KeyError as e:
traceback.print_exc()
od_response.total_errored += 1
print(self.data[datatype][k]['food'] + ' is not in self.slug_id_cache["food"]')
if self.update_existing and len(update_list) > 0:
od_response.total_updated = model_type.objects.bulk_update(update_list, field_list)
od_response.total_errored += len(update_list) - od_response.total_updated
if len(create_list) > 0:
objs_created = model_type.objects.bulk_create(create_list, ignore_conflicts=True, unique_fields=('space', 'base_unit', 'converted_unit', 'food', 'open_data_slug'))
od_response.total_created = len(objs_created)
od_response.total_errored += len(create_list) - od_response.total_created
return od_response

View File

@@ -4,16 +4,16 @@ from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.utils.translation import gettext as _
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
from oauth2_provider.contrib.rest_framework import TokenHasScope, TokenHasReadWriteScope
from oauth2_provider.models import AccessToken
from rest_framework import permissions
from rest_framework.permissions import SAFE_METHODS
from cookbook.models import Recipe, ShareLink, UserSpace
from cookbook.models import ShareLink, Recipe, UserSpace
def get_allowed_groups(groups_required):
@@ -75,7 +75,7 @@ def is_object_owner(user, obj):
if not user.is_authenticated:
return False
try:
return obj.get_owner() == 'orphan' or obj.get_owner() == user
return obj.get_owner() == user
except Exception:
return False
@@ -123,7 +123,7 @@ def share_link_valid(recipe, share):
return c
if link := ShareLink.objects.filter(recipe=recipe, uuid=share, abuse_blocked=False).first():
if 0 < settings.SHARING_LIMIT < link.request_count and not link.space.no_sharing_limit:
if 0 < settings.SHARING_LIMIT < link.request_count:
return False
link.request_count += 1
link.save()
@@ -255,6 +255,9 @@ class CustomIsShared(permissions.BasePermission):
return request.user.is_authenticated
def has_object_permission(self, request, view, obj):
# # temporary hack to make old shopping list work with new shopping list
# if obj.__class__.__name__ in ['ShoppingList', 'ShoppingListEntry']:
# return is_object_shared(request.user, obj) or obj.created_by in list(request.user.get_shopping_share())
return is_object_shared(request.user, obj)
@@ -319,8 +322,7 @@ class CustomRecipePermission(permissions.BasePermission):
def has_permission(self, request, view): # user is either at least a guest or a share link is given and the request is safe
share = request.query_params.get('share', None)
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS) or has_group_permission(
request.user, ['user'])) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
return has_group_permission(request.user, ['guest']) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
def has_object_permission(self, request, view, obj):
share = request.query_params.get('share', None)
@@ -330,8 +332,7 @@ class CustomRecipePermission(permissions.BasePermission):
if obj.private:
return ((obj.created_by == request.user) or (request.user in obj.shared.all())) and obj.space == request.space
else:
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS)
or has_group_permission(request.user, ['user'])) and obj.space == request.space
return has_group_permission(request.user, ['guest']) and obj.space == request.space
class CustomUserPermission(permissions.BasePermission):
@@ -360,7 +361,7 @@ class CustomTokenHasScope(TokenHasScope):
"""
def has_permission(self, request, view):
if isinstance(request.auth, AccessToken):
if type(request.auth) == AccessToken:
return super().has_permission(request, view)
else:
return request.user.is_authenticated
@@ -374,7 +375,7 @@ class CustomTokenHasReadWriteScope(TokenHasReadWriteScope):
"""
def has_permission(self, request, view):
if isinstance(request.auth, AccessToken):
if type(request.auth) == AccessToken:
return super().has_permission(request, view)
else:
return True
@@ -433,10 +434,3 @@ def switch_user_active_space(user, space):
return us
except ObjectDoesNotExist:
return None
class IsReadOnlyDRF(permissions.BasePermission):
message = 'You cannot interact with this object as it is not owned by you!'
def has_permission(self, request, view):
return request.method in SAFE_METHODS

View File

@@ -1,78 +0,0 @@
from django.core.cache import caches
from cookbook.helper.cache_helper import CacheHelper
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
from cookbook.models import PropertyType
class FoodPropertyHelper:
space = None
def __init__(self, space):
"""
Helper to perform food property calculations
:param space: space to limit scope to
"""
self.space = space
def calculate_recipe_properties(self, recipe):
"""
Calculate all food properties for a given recipe.
:param recipe: recipe to calculate properties for
:return: dict of with property keys and total/food values for each property available
"""
ingredients = []
computed_properties = {}
for s in recipe.steps.all():
ingredients += s.ingredients.all()
property_types = caches['default'].get(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, None)
if not property_types:
property_types = PropertyType.objects.filter(space=self.space).all()
# cache is cleared on property type save signal so long duration is fine
caches['default'].set(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, property_types, 60 * 60)
for fpt in property_types:
computed_properties[fpt.id] = {'id': fpt.id, 'name': fpt.name, 'description': fpt.description,
'unit': fpt.unit, 'order': fpt.order, 'food_values': {}, 'total_value': 0, 'missing_value': False}
uch = UnitConversionHelper(self.space)
for i in ingredients:
if i.food is not None:
conversions = uch.get_conversions(i)
for pt in property_types:
found_property = False
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None: # if food is configured incorrectly
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
computed_properties[pt.id]['missing_value'] = True
else:
for p in i.food.properties.all():
if p.property_type == pt and p.property_amount is not None:
for c in conversions:
if c.unit == i.food.properties_food_unit:
found_property = True
computed_properties[pt.id]['total_value'] += (c.amount / i.food.properties_food_amount) * p.property_amount
computed_properties[pt.id]['food_values'] = self.add_or_create(
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
if not found_property:
if i.amount == 0: # don't count ingredients without an amount as missing
computed_properties[pt.id]['missing_value'] = computed_properties[pt.id]['missing_value'] or False # don't override if another food was already missing
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
else:
computed_properties[pt.id]['missing_value'] = True
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
return computed_properties
# small dict helper to add to existing key or create new, probably a better way of doing this
# TODO move to central helper ? --> use defaultdict
@staticmethod
def add_or_create(d, key, value, food):
if key in d and d[key]['value']:
d[key]['value'] += value
else:
d[key] = {'id': food.id, 'food': food.name, 'value': value}
return d

View File

@@ -0,0 +1,191 @@
# import json
# import re
# from json import JSONDecodeError
# from urllib.parse import unquote
# from bs4 import BeautifulSoup
# from bs4.element import Tag
# from recipe_scrapers import scrape_html, scrape_me
# from recipe_scrapers._exceptions import NoSchemaFoundInWildMode
# from recipe_scrapers._utils import get_host_name, normalize_string
# from cookbook.helper import recipe_url_import as helper
# from cookbook.helper.scrapers.scrapers import text_scraper
# def get_recipe_from_source(text, url, request):
# def build_node(k, v):
# if isinstance(v, dict):
# node = {
# 'name': k,
# 'value': k,
# 'children': get_children_dict(v)
# }
# elif isinstance(v, list):
# node = {
# 'name': k,
# 'value': k,
# 'children': get_children_list(v)
# }
# else:
# node = {
# 'name': k + ": " + normalize_string(str(v)),
# 'value': normalize_string(str(v))
# }
# return node
# def get_children_dict(children):
# kid_list = []
# for k, v in children.items():
# kid_list.append(build_node(k, v))
# return kid_list
# def get_children_list(children):
# kid_list = []
# for kid in children:
# if type(kid) == list:
# node = {
# 'name': "unknown list",
# 'value': "unknown list",
# 'children': get_children_list(kid)
# }
# kid_list.append(node)
# elif type(kid) == dict:
# for k, v in kid.items():
# kid_list.append(build_node(k, v))
# else:
# kid_list.append({
# 'name': normalize_string(str(kid)),
# 'value': normalize_string(str(kid))
# })
# return kid_list
# recipe_tree = []
# parse_list = []
# soup = BeautifulSoup(text, "html.parser")
# html_data = get_from_html(soup)
# images = get_images_from_source(soup, url)
# text = unquote(text)
# scrape = None
# if url and not text:
# try:
# scrape = scrape_me(url_path=url, wild_mode=True)
# except(NoSchemaFoundInWildMode):
# pass
# if not scrape:
# try:
# parse_list.append(remove_graph(json.loads(text)))
# if not url and 'url' in parse_list[0]:
# url = parse_list[0]['url']
# scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
# except JSONDecodeError:
# for el in soup.find_all('script', type='application/ld+json'):
# el = remove_graph(el)
# if not url and 'url' in el:
# url = el['url']
# if type(el) == list:
# for le in el:
# parse_list.append(le)
# elif type(el) == dict:
# parse_list.append(el)
# for el in soup.find_all(type='application/json'):
# el = remove_graph(el)
# if type(el) == list:
# for le in el:
# parse_list.append(le)
# elif type(el) == dict:
# parse_list.append(el)
# scrape = text_scraper(text, url=url)
# recipe_json = helper.get_from_scraper(scrape, request)
# # TODO: DEPRECATE recipe_tree & html_data. first validate it isn't used anywhere
# for el in parse_list:
# temp_tree = []
# if isinstance(el, Tag):
# try:
# el = json.loads(el.string)
# except TypeError:
# continue
# for k, v in el.items():
# if isinstance(v, dict):
# node = {
# 'name': k,
# 'value': k,
# 'children': get_children_dict(v)
# }
# elif isinstance(v, list):
# node = {
# 'name': k,
# 'value': k,
# 'children': get_children_list(v)
# }
# else:
# node = {
# 'name': k + ": " + normalize_string(str(v)),
# 'value': normalize_string(str(v))
# }
# temp_tree.append(node)
# if '@type' in el and el['@type'] == 'Recipe':
# recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
# else:
# recipe_tree += [{'name': 'json', 'children': temp_tree}]
# return recipe_json, recipe_tree, html_data, images
# def get_from_html(soup):
# INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
# html = []
# for s in soup.strings:
# if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
# html.append(s)
# return html
# def get_images_from_source(soup, url):
# sources = ['src', 'srcset', 'data-src']
# images = []
# img_tags = soup.find_all('img')
# if url:
# site = get_host_name(url)
# prot = url.split(':')[0]
# urls = []
# for img in img_tags:
# for src in sources:
# try:
# urls.append(img[src])
# except KeyError:
# pass
# for u in urls:
# u = u.split('?')[0]
# filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
# if filename:
# if (('http' not in u) and (url)):
# # sometimes an image source can be relative
# # if it is provide the base url
# u = '{}://{}{}'.format(prot, site, u)
# if 'http' in u:
# images.append(u)
# return images
# def remove_graph(el):
# # recipes type might be wrapped in @graph type
# if isinstance(el, Tag):
# try:
# el = json.loads(el.string)
# if '@graph' in el:
# for x in el['@graph']:
# if '@type' in x and x['@type'] == 'Recipe':
# el = x
# except (TypeError, JSONDecodeError):
# pass
# return el

View File

@@ -1,11 +1,14 @@
import json
from collections import Counter
from datetime import date, timedelta
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity
from django.core.cache import cache
from django.db.models import Avg, Case, Count, Exists, F, Max, OuterRef, Q, Subquery, Value, When
from django.core.cache import caches
from django.db.models import (Avg, Case, Count, Exists, F, Func, Max, OuterRef, Q, Subquery, Value, When, FilteredRelation)
from django.db.models.functions import Coalesce, Lower, Substr
from django.utils import timezone, translation
from django.utils.translation import gettext as _
from cookbook.helper.HelperFunctions import Round, str2bool
from cookbook.managers import DICTIONARY
@@ -14,25 +17,20 @@ from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, Searc
from recipes import settings
# TODO create extensive tests to make sure ORs ANDs and various filters, sorting, etc work as expected
# TODO consider creating a simpleListRecipe API that only includes minimum of recipe info and minimal filtering
class RecipeSearch():
_postgres = settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql'
_postgres = settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']
def __init__(self, request, **params):
self._request = request
self._queryset = None
if f := params.get('filter', None):
custom_filter = (
CustomFilter.objects.filter(id=f, space=self._request.space)
.filter(Q(created_by=self._request.user) | Q(shared=self._request.user) | Q(recipebook__shared=self._request.user))
.first()
)
custom_filter = CustomFilter.objects.filter(id=f, space=self._request.space).filter(Q(created_by=self._request.user) |
Q(shared=self._request.user) | Q(recipebook__shared=self._request.user)).first()
if custom_filter:
self._params = {**json.loads(custom_filter.search)}
self._original_params = {**(params or {})}
# json.loads casts rating as an integer, expecting string
if isinstance(self._params.get('rating', None), int):
self._params['rating'] = str(self._params['rating'])
else:
self._params = {**(params or {})}
else:
@@ -47,8 +45,7 @@ class RecipeSearch():
cache.set(CACHE_KEY, self._search_prefs, timeout=10)
else:
self._search_prefs = SearchPreference()
self._string = self._params.get('query').strip(
) if self._params.get('query', None) else None
self._string = self._params.get('query').strip() if self._params.get('query', None) else None
self._rating = self._params.get('rating', None)
self._keywords = {
'or': self._params.get('keywords_or', None) or self._params.get('keywords', None),
@@ -77,8 +74,7 @@ class RecipeSearch():
self._random = str2bool(self._params.get('random', False))
self._new = str2bool(self._params.get('new', False))
self._num_recent = int(self._params.get('num_recent', 0))
self._include_children = str2bool(
self._params.get('include_children', None))
self._include_children = str2bool(self._params.get('include_children', None))
self._timescooked = self._params.get('timescooked', None)
self._cookedon = self._params.get('cookedon', None)
self._createdon = self._params.get('createdon', None)
@@ -86,9 +82,9 @@ class RecipeSearch():
self._viewedon = self._params.get('viewedon', None)
self._makenow = self._params.get('makenow', None)
# this supports hidden feature to find recipes missing X ingredients
if isinstance(self._makenow, bool) and self._makenow == True:
if type(self._makenow) == bool and self._makenow == True:
self._makenow = 0
elif isinstance(self._makenow, str) and self._makenow in ["yes", "true"]:
elif type(self._makenow) == str and self._makenow in ["yes", "true"]:
self._makenow = 0
else:
try:
@@ -145,7 +141,7 @@ class RecipeSearch():
self.unit_filters(units=self._units)
self._makenow_filter(missing=self._makenow)
self.string_filters(string=self._string)
return self._queryset.filter(space=self._request.space).order_by(*self.orderby)
return self._queryset.filter(space=self._request.space).distinct().order_by(*self.orderby)
def _sort_includes(self, *args):
for x in args:
@@ -161,7 +157,7 @@ class RecipeSearch():
else:
order = []
# TODO add userpreference for default sort order and replace '-favorite'
default_order = ['name']
default_order = ['-name']
# recent and new_recipe are always first; they float a few recipes to the top
if self._num_recent:
order += ['-recent']
@@ -170,6 +166,7 @@ class RecipeSearch():
# if a sort order is provided by user - use that order
if self._sort_order:
if not isinstance(self._sort_order, list):
order += [self._sort_order]
else:
@@ -185,10 +182,8 @@ class RecipeSearch():
# otherwise sort by the remaining order_by attributes or favorite by default
else:
order += default_order
order[:] = [Lower('name').asc() if x ==
'name' else x for x in order]
order[:] = [Lower('name').desc() if x ==
'-name' else x for x in order]
order[:] = [Lower('name').asc() if x == 'name' else x for x in order]
order[:] = [Lower('name').desc() if x == '-name' else x for x in order]
self.orderby = order
def string_filters(self, string=None):
@@ -205,8 +200,7 @@ class RecipeSearch():
for f in self._filters:
query_filter |= f
# this creates duplicate records which can screw up other aggregates, see makenow for workaround
self._queryset = self._queryset.filter(query_filter).distinct()
self._queryset = self._queryset.filter(query_filter).distinct() # this creates duplicate records which can screw up other aggregates, see makenow for workaround
if self._fulltext_include:
if self._fuzzy_match is None:
self._queryset = self._queryset.annotate(score=Coalesce(Max(self.search_rank), 0.0))
@@ -234,13 +228,12 @@ class RecipeSearch():
default = timezone.now() - timedelta(days=100000)
else:
default = timezone.now()
self._queryset = self._queryset.annotate(
lastcooked=Coalesce(Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default))
)
self._queryset = self._queryset.annotate(lastcooked=Coalesce(
Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default)))
if cooked_date is None:
return
cooked_date = date(*[int(x)for x in cooked_date.split('-') if x != ''])
cooked_date = date(*[int(x) for x in cooked_date.split('-') if x != ''])
if lessthan:
self._queryset = self._queryset.filter(lastcooked__date__lte=cooked_date).exclude(lastcooked=default)
@@ -261,7 +254,7 @@ class RecipeSearch():
if updated_date is None:
return
lessthan = '-' in updated_date[:1]
updated_date = date(*[int(x)for x in updated_date.split('-') if x != ''])
updated_date = date(*[int(x) for x in updated_date.split('-') if x != ''])
if lessthan:
self._queryset = self._queryset.filter(updated_at__date__lte=updated_date)
else:
@@ -270,13 +263,12 @@ class RecipeSearch():
def _viewed_on_filter(self, viewed_date=None):
if self._sort_includes('lastviewed') or viewed_date:
longTimeAgo = timezone.now() - timedelta(days=100000)
self._queryset = self._queryset.annotate(
lastviewed=Coalesce(Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo))
)
self._queryset = self._queryset.annotate(lastviewed=Coalesce(
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo)))
if viewed_date is None:
return
lessthan = '-' in viewed_date[:1]
viewed_date = date(*[int(x)for x in viewed_date.split('-') if x != ''])
viewed_date = date(*[int(x) for x in viewed_date.split('-') if x != ''])
if lessthan:
self._queryset = self._queryset.filter(lastviewed__date__lte=viewed_date).exclude(lastviewed=longTimeAgo)
@@ -287,11 +279,9 @@ class RecipeSearch():
# TODO make new days a user-setting
if not self._new:
return
self._queryset = self._queryset.annotate(
new_recipe=Case(
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')),
default=Value(0),
)
self._queryset = (
self._queryset.annotate(new_recipe=Case(
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')), default=Value(0), ))
)
def _recently_viewed(self, num_recent=None):
@@ -301,25 +291,19 @@ class RecipeSearch():
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__pk'))), Value(0)))
return
num_recent_recipes = (
ViewLog.objects.filter(created_by=self._request.user, space=self._request.space)
.values('recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
)
num_recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space).values(
'recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
self._queryset = self._queryset.annotate(recent=Coalesce(Max(Case(When(pk__in=num_recent_recipes.values('recipe'), then='viewlog__pk'))), Value(0)))
def _favorite_recipes(self, times_cooked=None):
if self._sort_includes('favorite') or times_cooked:
less_than = '-' in (str(times_cooked) or []) and not self._sort_includes('-favorite')
less_than = '-' in (times_cooked or []) or not self._sort_includes('-favorite')
if less_than:
default = 1000
else:
default = 0
favorite_recipes = (
CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk'))
.values('recipe')
.annotate(count=Count('pk', distinct=True))
.values('count')
)
favorite_recipes = CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk')
).values('recipe').annotate(count=Count('pk', distinct=True)).values('count')
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
if times_cooked is None:
return
@@ -327,7 +311,7 @@ class RecipeSearch():
if times_cooked == '0':
self._queryset = self._queryset.filter(favorite=0)
elif less_than:
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked.replace('-', ''))).exclude(favorite=0)
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked[1:])).exclude(favorite=0)
else:
self._queryset = self._queryset.filter(favorite__gte=int(times_cooked))
@@ -409,9 +393,8 @@ class RecipeSearch():
def rating_filter(self, rating=None):
if rating or self._sort_includes('rating'):
lessthan = '-' in (rating or [])
reverse = 'rating' in (self._sort_order or []) and '-rating' not in (self._sort_order or [])
if lessthan or reverse:
lessthan = self._sort_includes('-rating') or '-' in (rating or [])
if lessthan:
default = 100
else:
default = 0
@@ -463,7 +446,7 @@ class RecipeSearch():
if not steps:
return
if not isinstance(steps, list):
steps = [steps]
steps = [unistepsts]
self._queryset = self._queryset.filter(steps__id__in=steps)
def build_fulltext_filters(self, string=None):
@@ -520,62 +503,263 @@ class RecipeSearch():
trigram += TrigramSimilarity(f, self._string)
else:
trigram = TrigramSimilarity(f, self._string)
self._fuzzy_match = (
Recipe.objects.annotate(trigram=trigram)
.distinct()
.annotate(simularity=Max('trigram'))
.values('id', 'simularity')
.filter(simularity__gt=self._search_prefs.trigram_threshold)
)
self._fuzzy_match = Recipe.objects.annotate(trigram=trigram).distinct(
).annotate(simularity=Max('trigram')).values('id', 'simularity').filter(simularity__gt=self._search_prefs.trigram_threshold)
self._filters += [Q(pk__in=self._fuzzy_match.values('pk'))]
def _makenow_filter(self, missing=None):
if missing is None or (isinstance(missing, bool) and missing == False):
if missing is None or (type(missing) == bool and missing == False):
return
shopping_users = [*self._request.user.get_shopping_share(), self._request.user]
onhand_filter = (
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
# or substitute food onhand
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users)
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users) # or substitute food onhand
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
)
makenow_recipes = Recipe.objects.annotate(
count_food=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__isnull=False), distinct=True),
count_onhand=Count('steps__ingredients__food__pk', filter=onhand_filter, distinct=True),
count_ignore_shopping=Count(
'steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True, steps__ingredients__food__recipe__isnull=True), distinct=True
),
count_ignore_shopping=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True,
steps__ingredients__food__recipe__isnull=True), distinct=True),
has_child_sub=Case(When(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users), then=Value(1)), default=Value(0)),
has_sibling_sub=Case(When(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users), then=Value(1)), default=Value(0))
).annotate(missingfood=F('count_food') - F('count_onhand') - F('count_ignore_shopping')).filter(missingfood__lte=missing)
).annotate(missingfood=F('count_food') - F('count_onhand') - F('count_ignore_shopping')).filter(missingfood=missing)
self._queryset = self._queryset.distinct().filter(id__in=makenow_recipes.values('id'))
@staticmethod
def __children_substitute_filter(shopping_users=None):
children_onhand_subquery = Food.objects.filter(path__startswith=OuterRef('path'), depth__gt=OuterRef('depth'), onhand_users__in=shopping_users)
return (
Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
)
.exclude(depth=1, numchild=0)
.filter(substitute_children=True)
.annotate(child_onhand_count=Exists(children_onhand_subquery))
.filter(child_onhand_count=True)
children_onhand_subquery = Food.objects.filter(
path__startswith=OuterRef('path'),
depth__gt=OuterRef('depth'),
onhand_users__in=shopping_users
)
return Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
Q(onhand_users__in=shopping_users)
| Q(ignore_shopping=True, recipe__isnull=True)
| Q(substitute__onhand_users__in=shopping_users)
).exclude(depth=1, numchild=0
).filter(substitute_children=True
).annotate(child_onhand_count=Exists(children_onhand_subquery)
).filter(child_onhand_count=True)
@staticmethod
def __sibling_substitute_filter(shopping_users=None):
sibling_onhand_subquery = Food.objects.filter(
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)), depth=OuterRef('depth'), onhand_users__in=shopping_users
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)),
depth=OuterRef('depth'),
onhand_users__in=shopping_users
)
return (
Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
)
.exclude(depth=1, numchild=0)
.filter(substitute_siblings=True)
.annotate(sibling_onhand=Exists(sibling_onhand_subquery))
.filter(sibling_onhand=True)
return Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
Q(onhand_users__in=shopping_users)
| Q(ignore_shopping=True, recipe__isnull=True)
| Q(substitute__onhand_users__in=shopping_users)
).exclude(depth=1, numchild=0
).filter(substitute_siblings=True
).annotate(sibling_onhand=Exists(sibling_onhand_subquery)
).filter(sibling_onhand=True)
class RecipeFacet():
class CacheEmpty(Exception):
pass
def __init__(self, request, queryset=None, hash_key=None, cache_timeout=3600):
if hash_key is None and queryset is None:
raise ValueError(_("One of queryset or hash_key must be provided"))
self._request = request
self._queryset = queryset
self.hash_key = hash_key or str(hash(self._queryset.query))
self._SEARCH_CACHE_KEY = f"recipes_filter_{self.hash_key}"
self._cache_timeout = cache_timeout
self._cache = caches['default'].get(self._SEARCH_CACHE_KEY, {})
if self._cache is None and self._queryset is None:
raise self.CacheEmpty("No queryset provided and cache empty")
self.Keywords = self._cache.get('Keywords', None)
self.Foods = self._cache.get('Foods', None)
self.Books = self._cache.get('Books', None)
self.Ratings = self._cache.get('Ratings', None)
# TODO Move Recent to recipe annotation/serializer: requrires change in RecipeSearch(), RecipeSearchView.vue and serializer
self.Recent = self._cache.get('Recent', None)
if self._queryset is not None:
self._recipe_list = list(self._queryset.values_list('id', flat=True))
self._search_params = {
'keyword_list': self._request.query_params.getlist('keywords', []),
'food_list': self._request.query_params.getlist('foods', []),
'book_list': self._request.query_params.getlist('book', []),
'search_keywords_or': str2bool(self._request.query_params.get('keywords_or', True)),
'search_foods_or': str2bool(self._request.query_params.get('foods_or', True)),
'search_books_or': str2bool(self._request.query_params.get('books_or', True)),
'space': self._request.space,
}
elif self.hash_key is not None:
self._recipe_list = self._cache.get('recipe_list', [])
self._search_params = {
'keyword_list': self._cache.get('keyword_list', None),
'food_list': self._cache.get('food_list', None),
'book_list': self._cache.get('book_list', None),
'search_keywords_or': self._cache.get('search_keywords_or', None),
'search_foods_or': self._cache.get('search_foods_or', None),
'search_books_or': self._cache.get('search_books_or', None),
'space': self._cache.get('space', None),
}
self._cache = {
**self._search_params,
'recipe_list': self._recipe_list,
'Ratings': self.Ratings,
'Recent': self.Recent,
'Keywords': self.Keywords,
'Foods': self.Foods,
'Books': self.Books
}
caches['default'].set(self._SEARCH_CACHE_KEY, self._cache, self._cache_timeout)
def get_facets(self, from_cache=False):
if from_cache:
return {
'cache_key': self.hash_key or '',
'Ratings': self.Ratings or {},
'Recent': self.Recent or [],
'Keywords': self.Keywords or [],
'Foods': self.Foods or [],
'Books': self.Books or []
}
return {
'cache_key': self.hash_key,
'Ratings': self.get_ratings(),
'Recent': self.get_recent(),
'Keywords': self.get_keywords(),
'Foods': self.get_foods(),
'Books': self.get_books()
}
def set_cache(self, key, value):
self._cache = {**self._cache, key: value}
caches['default'].set(
self._SEARCH_CACHE_KEY,
self._cache,
self._cache_timeout
)
def get_books(self):
if self.Books is None:
self.Books = []
return self.Books
def get_keywords(self):
if self.Keywords is None:
if self._search_params['search_keywords_or']:
keywords = Keyword.objects.filter(space=self._request.space).distinct()
else:
keywords = Keyword.objects.filter(Q(recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
# set keywords to root objects only
keywords = self._keyword_queryset(keywords)
self.Keywords = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
self.set_cache('Keywords', self.Keywords)
return self.Keywords
def get_foods(self):
if self.Foods is None:
# # if using an OR search, will annotate all keywords, otherwise, just those that appear in results
if self._search_params['search_foods_or']:
foods = Food.objects.filter(space=self._request.space).distinct()
else:
foods = Food.objects.filter(Q(ingredient__step__recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
# set keywords to root objects only
foods = self._food_queryset(foods)
self.Foods = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
self.set_cache('Foods', self.Foods)
return self.Foods
def get_books(self):
if self.Books is None:
self.Books = []
return self.Books
def get_ratings(self):
if self.Ratings is None:
if not self._request.space.demo and self._request.space.show_facet_count:
if self._queryset is None:
self._queryset = Recipe.objects.filter(id__in=self._recipe_list)
rating_qs = self._queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=self._request.user, then='cooklog__rating'), default=Value(0)))))
self.Ratings = dict(Counter(r.rating for r in rating_qs))
else:
self.Rating = {}
self.set_cache('Ratings', self.Ratings)
return self.Ratings
def get_recent(self):
if self.Recent is None:
# TODO make days of recent recipe a setting
recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space, created_at__gte=timezone.now() - timedelta(days=14)
).values_list('recipe__pk', flat=True)
self.Recent = list(recent_recipes)
self.set_cache('Recent', self.Recent)
return self.Recent
def add_food_children(self, id):
try:
food = Food.objects.get(id=id)
nodes = food.get_ancestors()
except Food.DoesNotExist:
return self.get_facets()
foods = self._food_queryset(food.get_children(), food)
deep_search = self.Foods
for node in nodes:
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
deep_search = deep_search[index]['children']
index = next((i for i, x in enumerate(deep_search) if x["id"] == food.id), None)
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
self.set_cache('Foods', self.Foods)
return self.get_facets()
def add_keyword_children(self, id):
try:
keyword = Keyword.objects.get(id=id)
nodes = keyword.get_ancestors()
except Keyword.DoesNotExist:
return self.get_facets()
keywords = self._keyword_queryset(keyword.get_children(), keyword)
deep_search = self.Keywords
for node in nodes:
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
deep_search = deep_search[index]['children']
index = next((i for i, x in enumerate(deep_search) if x["id"] == keyword.id), None)
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
self.set_cache('Keywords', self.Keywords)
return self.get_facets()
def _recipe_count_queryset(self, field, depth=1, steplen=4):
return Recipe.objects.filter(**{f'{field}__path__startswith': OuterRef('path'), f'{field}__depth__gte': depth}, id__in=self._recipe_list, space=self._request.space
).annotate(count=Coalesce(Func('pk', function='Count'), 0)).values('count')
def _keyword_queryset(self, queryset, keyword=None):
depth = getattr(keyword, 'depth', 0) + 1
steplen = depth * Keyword.steplen
if not self._request.space.demo and self._request.space.show_facet_count:
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('keywords', depth, steplen)), 0)
).filter(depth=depth, count__gt=0
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
else:
return queryset.filter(depth=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
def _food_queryset(self, queryset, food=None):
depth = getattr(food, 'depth', 0) + 1
steplen = depth * Food.steplen
if not self._request.space.demo and self._request.space.show_facet_count:
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('steps__ingredients__food', depth, steplen)), 0)
).filter(depth__lte=depth, count__gt=0
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
else:
return queryset.filter(depth__lte=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())

View File

@@ -1,6 +1,7 @@
import random
import re
import traceback
from html import unescape
from unicodedata import decomposition
from django.utils.dateparse import parse_duration
from django.utils.translation import gettext as _
@@ -9,34 +10,16 @@ from isodate.isoerror import ISO8601Error
from pytube import YouTube
from recipe_scrapers._utils import get_host_name, get_minutes
from cookbook.helper.automation_helper import AutomationEngine
from cookbook.helper import recipe_url_import as helper
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.models import Automation, Keyword, PropertyType
from cookbook.models import Keyword
# from recipe_scrapers._utils import get_minutes ## temporary until/unless upstream incorporates get_minutes() PR
def get_from_scraper(scrape, request):
# converting the scrape_html object to the existing json format based on ld+json
recipe_json = {'steps': [], 'internal': True}
keywords = []
# assign source URL
try:
source_url = scrape.canonical_url()
except Exception:
try:
source_url = scrape.url
except Exception:
pass
if source_url:
recipe_json['source_url'] = source_url
try:
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
except Exception:
recipe_json['source_url'] = ''
automation_engine = AutomationEngine(request, source=recipe_json.get('source_url'))
# assign recipe name
# converting the scrape_me object to the existing json format based on ld+json
recipe_json = {}
try:
recipe_json['name'] = parse_name(scrape.title()[:128] or None)
except Exception:
@@ -47,13 +30,6 @@ def get_from_scraper(scrape, request):
except Exception:
recipe_json['name'] = ''
if isinstance(recipe_json['name'], list) and len(recipe_json['name']) > 0:
recipe_json['name'] = recipe_json['name'][0]
recipe_json['name'] = automation_engine.apply_regex_replace_automation(recipe_json['name'], Automation.NAME_REPLACE)
# assign recipe description
# TODO notify user about limit if reached - >256 description will be truncated
try:
description = scrape.description() or None
except Exception:
@@ -64,20 +40,16 @@ def get_from_scraper(scrape, request):
except Exception:
description = ''
recipe_json['description'] = parse_description(description)
recipe_json['description'] = automation_engine.apply_regex_replace_automation(recipe_json['description'], Automation.DESCRIPTION_REPLACE)
recipe_json['internal'] = True
# assign servings attributes
try:
# dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
servings = scrape.schema.data.get('recipeYield') or 1
servings = scrape.schema.data.get('recipeYield') or 1 # dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
except Exception:
servings = 1
recipe_json['servings'] = parse_servings(servings)
recipe_json['servings_text'] = parse_servings_text(servings)
# assign time attributes
try:
recipe_json['working_time'] = get_minutes(scrape.prep_time()) or 0
except Exception:
@@ -102,7 +74,6 @@ def get_from_scraper(scrape, request):
except Exception:
pass
# assign image
try:
recipe_json['image'] = parse_image(scrape.image()) or None
except Exception:
@@ -113,7 +84,7 @@ def get_from_scraper(scrape, request):
except Exception:
recipe_json['image'] = ''
# assign keywords
keywords = []
try:
if scrape.schema.data.get("keywords"):
keywords += listify_keywords(scrape.schema.data.get("keywords"))
@@ -139,59 +110,59 @@ def get_from_scraper(scrape, request):
pass
try:
if scrape.author():
keywords.append(scrape.author())
source_url = scrape.canonical_url()
except Exception:
pass
try:
source_url = scrape.url
except Exception:
pass
if source_url:
recipe_json['source_url'] = source_url
try:
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
except Exception:
pass
try:
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request)
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
except AttributeError:
recipe_json['keywords'] = keywords
ingredient_parser = IngredientParser(request, True)
# assign steps
recipe_json['steps'] = []
try:
for i in parse_instructions(scrape.instructions()):
recipe_json['steps'].append({
'instruction': i,
'ingredients': [],
'show_ingredients_table': request.user.userpreference.show_step_ingredients,
})
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
except Exception:
pass
if len(recipe_json['steps']) == 0:
recipe_json['steps'].append({
'instruction': '',
'ingredients': [],
})
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
recipe_json['description'] = recipe_json['description'][:512]
if len(recipe_json['description']) > 256: # split at 256 as long descriptions don't look good on recipe cards
recipe_json['steps'][0]['instruction'] = f"*{recipe_json['description']}* \n\n" + recipe_json['steps'][0]['instruction']
if len(parse_description(description)) > 256: # split at 256 as long descriptions dont look good on recipe cards
recipe_json['steps'][0]['instruction'] = f'*{parse_description(description)}* \n\n' + recipe_json['steps'][0]['instruction']
else:
recipe_json['description'] = parse_description(description)[:512]
try:
for x in scrape.ingredients():
if x.strip() != '':
try:
amount, unit, ingredient, note = ingredient_parser.parse(x)
ingredient = {
'amount': amount,
'food': {
'name': ingredient,
},
'unit': None,
'note': note,
'original_text': x
}
if unit:
ingredient['unit'] = {
'name': unit,
}
recipe_json['steps'][0]['ingredients'].append(ingredient)
except Exception:
recipe_json['steps'][0]['ingredients'].append({
try:
amount, unit, ingredient, note = ingredient_parser.parse(x)
ingredient = {
'amount': amount,
'food': {
'name': ingredient,
},
'unit': None,
'note': note,
'original_text': x
}
if unit:
ingredient['unit'] = {'name': unit, }
recipe_json['steps'][0]['ingredients'].append(ingredient)
except Exception:
recipe_json['steps'][0]['ingredients'].append(
{
'amount': 0,
'unit': None,
'food': {
@@ -199,48 +170,14 @@ def get_from_scraper(scrape, request):
},
'note': '',
'original_text': x
})
}
)
except Exception:
pass
try:
recipe_json['properties'] = get_recipe_properties(request.space, scrape.schema.nutrients())
print(recipe_json['properties'])
except Exception:
traceback.print_exc()
pass
for s in recipe_json['steps']:
s['instruction'] = automation_engine.apply_regex_replace_automation(s['instruction'], Automation.INSTRUCTION_REPLACE)
# re.sub(a.param_2, a.param_3, s['instruction'])
return recipe_json
def get_recipe_properties(space, property_data):
# {'servingSize': '1', 'calories': '302 kcal', 'proteinContent': '7,66g', 'fatContent': '11,56g', 'carbohydrateContent': '41,33g'}
properties = {
"property-calories": "calories",
"property-carbohydrates": "carbohydrateContent",
"property-proteins": "proteinContent",
"property-fats": "fatContent",
}
recipe_properties = []
for pt in PropertyType.objects.filter(space=space, open_data_slug__in=list(properties.keys())).all():
for p in list(properties.keys()):
if pt.open_data_slug == p:
if properties[p] in property_data:
recipe_properties.append({
'property_type': {
'id': pt.id,
'name': pt.name,
},
'property_amount': parse_servings(property_data[properties[p]]) / parse_servings(property_data['servingSize']),
})
return recipe_properties
def get_from_youtube_scraper(url, request):
"""A YouTube Information Scraper."""
kw, created = Keyword.objects.get_or_create(name='YouTube', space=request.space)
@@ -252,27 +189,21 @@ def get_from_youtube_scraper(url, request):
'working_time': 0,
'waiting_time': 0,
'image': "",
'keywords': [{
'name': kw.name,
'label': kw.name,
'id': kw.pk
}],
'keywords': [{'name': kw.name, 'label': kw.name, 'id': kw.pk}],
'source_url': url,
'steps': [{
'ingredients': [],
'instruction': ''
}]
'steps': [
{
'ingredients': [],
'instruction': ''
}
]
}
try:
automation_engine = AutomationEngine(request, source=url)
video = YouTube(url)
video.streams.first() # this is required to execute some kind of generator/web request that fetches the description
default_recipe_json['name'] = automation_engine.apply_regex_replace_automation(video.title, Automation.NAME_REPLACE)
video = YouTube(url=url)
default_recipe_json['name'] = video.title
default_recipe_json['image'] = video.thumbnail_url
if video.description:
default_recipe_json['steps'][0]['instruction'] = automation_engine.apply_regex_replace_automation(video.description, Automation.INSTRUCTION_REPLACE)
default_recipe_json['steps'][0]['instruction'] = video.description
except Exception:
pass
@@ -280,7 +211,7 @@ def get_from_youtube_scraper(url, request):
def parse_name(name):
if isinstance(name, list):
if type(name) == list:
try:
name = name[0]
except Exception:
@@ -293,27 +224,10 @@ def parse_description(description):
def clean_instruction_string(instruction):
# handle HTML tags that can be converted to markup
normalized_string = instruction \
.replace("<nobr>", "**") \
.replace("</nobr>", "**") \
.replace("<strong>", "**") \
.replace("</strong>", "**")
normalized_string = normalize_string(normalized_string)
normalized_string = normalize_string(instruction)
normalized_string = normalized_string.replace('\n', ' \n')
normalized_string = normalized_string.replace(' \n \n', '\n\n')
# handle unsupported, special UTF8 character in Thermomix-specific instructions,
# that happen in nearly every recipe on Cookidoo, Zaubertopf Club, Rezeptwelt
# and in Thermomix-specific recipes on many other sites
return normalized_string \
.replace("", _('reverse rotation')) \
.replace("", _('careful rotation')) \
.replace("", _('knead')) \
.replace("Andicken ", _('thicken')) \
.replace("Erwärmen ", _('warm up')) \
.replace("Fermentieren ", _('ferment')) \
.replace("Sous-vide ", _("sous-vide"))
return normalized_string
def parse_instructions(instructions):
@@ -324,16 +238,16 @@ def parse_instructions(instructions):
"""
instruction_list = []
if isinstance(instructions, list):
if type(instructions) == list:
for i in instructions:
if isinstance(i, str):
if type(i) == str:
instruction_list.append(clean_instruction_string(i))
else:
if 'text' in i:
instruction_list.append(clean_instruction_string(i['text']))
elif 'itemListElement' in i:
for ile in i['itemListElement']:
if isinstance(ile, str):
if type(ile) == str:
instruction_list.append(clean_instruction_string(ile))
elif 'text' in ile:
instruction_list.append(clean_instruction_string(ile['text']))
@@ -349,13 +263,13 @@ def parse_image(image):
# check if list of images is returned, take first if so
if not image:
return None
if isinstance(image, list):
if type(image) == list:
for pic in image:
if (isinstance(pic, str)) and (pic[:4] == 'http'):
if (type(pic) == str) and (pic[:4] == 'http'):
image = pic
elif 'url' in pic:
image = pic['url']
elif isinstance(image, dict):
elif type(image) == dict:
if 'url' in image:
image = image['url']
@@ -366,30 +280,25 @@ def parse_image(image):
def parse_servings(servings):
if isinstance(servings, str):
if type(servings) == str:
try:
servings = int(re.search(r'\d+', servings).group())
except AttributeError:
servings = 1
elif isinstance(servings, list):
elif type(servings) == list:
try:
servings = int(re.findall(r'\b\d+\b', str(servings[0]))[0])
except (KeyError, IndexError):
servings = int(re.findall(r'\b\d+\b', servings[0])[0])
except KeyError:
servings = 1
return servings
def parse_servings_text(servings):
if isinstance(servings, str):
if type(servings) == str:
try:
servings = re.sub("\\d+", '', servings).strip()
servings = re.sub("\d+", '', servings).strip()
except Exception:
servings = ''
if isinstance(servings, list):
try:
servings = parse_servings_text(servings[1])
except Exception:
pass
return str(servings)[:32]
@@ -402,7 +311,7 @@ def parse_time(recipe_time):
recipe_time = round(iso_parse_duration(recipe_time).seconds / 60)
except ISO8601Error:
try:
if (isinstance(recipe_time, list) and len(recipe_time) > 0):
if (type(recipe_time) == list and len(recipe_time) > 0):
recipe_time = recipe_time[0]
recipe_time = round(parse_duration(recipe_time).seconds / 60)
except AttributeError:
@@ -411,18 +320,13 @@ def parse_time(recipe_time):
return recipe_time
def parse_keywords(keyword_json, request):
def parse_keywords(keyword_json, space):
keywords = []
automation_engine = AutomationEngine(request)
# keywords as list
for kw in keyword_json:
kw = normalize_string(kw)
# if alias exists use that instead
if len(kw) != 0:
kw = automation_engine.apply_keyword_automation(kw)
if k := Keyword.objects.filter(name__iexact=kw, space=request.space).first():
if k := Keyword.objects.filter(name=kw, space=space).first():
keywords.append({'label': str(k), 'name': k.name, 'id': k.id})
else:
keywords.append({'label': kw, 'name': kw})
@@ -433,15 +337,15 @@ def parse_keywords(keyword_json, request):
def listify_keywords(keyword_list):
# keywords as string
try:
if isinstance(keyword_list[0], dict):
if type(keyword_list[0]) == dict:
return keyword_list
except (KeyError, IndexError):
pass
if isinstance(keyword_list, str):
if type(keyword_list) == str:
keyword_list = keyword_list.split(',')
# keywords as string in list
if (isinstance(keyword_list, list) and len(keyword_list) == 1 and ',' in keyword_list[0]):
if (type(keyword_list) == list and len(keyword_list) == 1 and ',' in keyword_list[0]):
keyword_list = keyword_list[0].split(',')
return [x.strip() for x in keyword_list]
@@ -458,7 +362,10 @@ def normalize_string(string):
def iso_duration_to_minutes(string):
match = re.match(r'P((?P<years>\d+)Y)?((?P<months>\d+)M)?((?P<weeks>\d+)W)?((?P<days>\d+)D)?T((?P<hours>\d+)H)?((?P<minutes>\d+)M)?((?P<seconds>\d+)S)?', string).groupdict()
match = re.match(
r'P((?P<years>\d+)Y)?((?P<months>\d+)M)?((?P<weeks>\d+)W)?((?P<days>\d+)D)?T((?P<hours>\d+)H)?((?P<minutes>\d+)M)?((?P<seconds>\d+)S)?',
string
).groupdict()
return int(match['days'] or 0) * 24 * 60 + int(match['hours'] or 0) * 60 + int(match['minutes'] or 0)
@@ -489,18 +396,3 @@ def get_images_from_soup(soup, url):
if 'http' in u:
images.append(u)
return images
def clean_dict(input_dict, key):
if isinstance(input_dict, dict):
for x in list(input_dict):
if x == key:
del input_dict[x]
elif isinstance(input_dict[x], dict):
input_dict[x] = clean_dict(input_dict[x], key)
elif isinstance(input_dict[x], list):
temp_list = []
for e in input_dict[x]:
temp_list.append(clean_dict(e, key))
return input_dict

View File

@@ -1,6 +1,8 @@
from django.urls import reverse
from django_scopes import scope, scopes_disabled
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.exceptions import AuthenticationFailed
from cookbook.views import views
@@ -48,6 +50,7 @@ class ScopeMiddleware:
return views.no_groups(request)
request.space = user_space.space
# with scopes_disabled():
with scope(space=request.space):
return self.get_response(request)
else:

View File

@@ -0,0 +1,68 @@
import json
from recipe_scrapers._abstract import AbstractScraper
class CooksIllustrated(AbstractScraper):
@classmethod
def host(cls, site='cooksillustrated'):
return {
'cooksillustrated': f"{site}.com",
'americastestkitchen': f"{site}.com",
'cookscountry': f"{site}.com",
}.get(site)
def title(self):
return self.schema.title()
def image(self):
return self.schema.image()
def total_time(self):
if not self.recipe:
self.get_recipe()
return self.recipe['recipeTimeNote']
def yields(self):
if not self.recipe:
self.get_recipe()
return self.recipe['yields']
def ingredients(self):
if not self.recipe:
self.get_recipe()
ingredients = []
for group in self.recipe['ingredientGroups']:
ingredients += group['fields']['recipeIngredientItems']
return [
"{} {} {}{}".format(
i['fields']['qty'] or '',
i['fields']['measurement'] or '',
i['fields']['ingredient']['fields']['title'] or '',
i['fields']['postText'] or ''
)
for i in ingredients
]
def instructions(self):
if not self.recipe:
self.get_recipe()
if self.recipe.get('headnote', False):
i = ['Note: ' + self.recipe.get('headnote', '')]
else:
i = []
return "\n".join(
i
+ [self.recipe.get('whyThisWorks', '')]
+ [
instruction['fields']['content']
for instruction in self.recipe['instructions']
]
)
def nutrients(self):
raise NotImplementedError("This should be implemented.")
def get_recipe(self):
j = json.loads(self.soup.find(type='application/json').string)
name = list(j['props']['initialState']['content']['documents'])[0]
self.recipe = j['props']['initialState']['content']['documents'][name]

View File

@@ -0,0 +1,43 @@
from json import JSONDecodeError
from bs4 import BeautifulSoup
from recipe_scrapers import SCRAPERS, get_host_name
from recipe_scrapers._factory import SchemaScraperFactory
from recipe_scrapers._schemaorg import SchemaOrg
from .cooksillustrated import CooksIllustrated
CUSTOM_SCRAPERS = {
CooksIllustrated.host(site="cooksillustrated"): CooksIllustrated,
CooksIllustrated.host(site="americastestkitchen"): CooksIllustrated,
CooksIllustrated.host(site="cookscountry"): CooksIllustrated,
}
SCRAPERS.update(CUSTOM_SCRAPERS)
def text_scraper(text, url=None):
domain = None
if url:
domain = get_host_name(url)
if domain in SCRAPERS:
scraper_class = SCRAPERS[domain]
else:
scraper_class = SchemaScraperFactory.SchemaScraper
class TextScraper(scraper_class):
def __init__(
self,
html=None,
url=None,
):
self.wild_mode = False
self.meta_http_equiv = False
self.soup = BeautifulSoup(html, "html.parser")
self.url = url
self.recipe = None
try:
self.schema = SchemaOrg(html)
except (JSONDecodeError, AttributeError):
pass
return TextScraper(url=url, html=text)

View File

@@ -1,12 +1,16 @@
from datetime import timedelta
from decimal import Decimal
from django.contrib.postgres.aggregates import ArrayAgg
from django.db.models import F, OuterRef, Q, Subquery, Value
from django.db.models.functions import Coalesce
from django.utils import timezone
from django.utils.translation import gettext as _
from cookbook.helper.HelperFunctions import Round, str2bool
from cookbook.models import (Ingredient, MealPlan, Recipe, ShoppingListEntry, ShoppingListRecipe,
SupermarketCategoryRelation)
from recipes import settings
def shopping_helper(qs, request):
@@ -26,6 +30,9 @@ def shopping_helper(qs, request):
elif checked in ['true', 1, '1']:
qs = qs.filter(checked=True)
elif checked in ['recent']:
today_start = timezone.now().replace(hour=0, minute=0, second=0)
week_ago = today_start - timedelta(days=user.userpreference.shopping_recent_days)
qs = qs.filter(Q(checked=False) | Q(completed_at__gte=week_ago))
supermarket_order = ['checked'] + supermarket_order
return qs.distinct().order_by(*supermarket_order).select_related('unit', 'food', 'ingredient', 'created_by', 'list_recipe', 'list_recipe__mealplan', 'list_recipe__recipe')
@@ -40,8 +47,6 @@ class RecipeShoppingEditor():
self.mealplan = self._kwargs.get('mealplan', None)
if type(self.mealplan) in [int, float]:
self.mealplan = MealPlan.objects.filter(id=self.mealplan, space=self.space)
if isinstance(self.mealplan, dict):
self.mealplan = MealPlan.objects.filter(id=self.mealplan['id'], space=self.space).first()
self.id = self._kwargs.get('id', None)
self._shopping_list_recipe = self.get_shopping_list_recipe(self.id, self.created_by, self.space)
@@ -62,12 +67,11 @@ class RecipeShoppingEditor():
@property
def _recipe_servings(self):
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings',
None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings', None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
@property
def _servings_factor(self):
return Decimal(self.servings) / Decimal(self._recipe_servings)
return Decimal(self.servings)/Decimal(self._recipe_servings)
@property
def _shared_users(self):
@@ -75,17 +79,18 @@ class RecipeShoppingEditor():
@staticmethod
def get_shopping_list_recipe(id, user, space):
return ShoppingListRecipe.objects.filter(id=id).filter(entries__space=space).filter(
Q(entries__created_by=user)
return ShoppingListRecipe.objects.filter(id=id).filter(Q(shoppinglist__space=space) | Q(entries__space=space)).filter(
Q(shoppinglist__created_by=user)
| Q(shoppinglist__shared=user)
| Q(entries__created_by=user)
| Q(entries__created_by__in=list(user.get_shopping_share()))
).prefetch_related('entries').first()
def get_recipe_ingredients(self, id, exclude_onhand=False):
if exclude_onhand:
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(
food__onhand_users__id__in=[x.id for x in self._shared_users])
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(food__onhand_users__id__in=[x.id for x in self._shared_users])
else:
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
@property
def _include_related(self):
@@ -102,10 +107,7 @@ class RecipeShoppingEditor():
self.servings = float(servings)
if mealplan := kwargs.get('mealplan', None):
if isinstance(mealplan, dict):
self.mealplan = MealPlan.objects.filter(id=mealplan['id'], space=self.space).first()
else:
self.mealplan = mealplan
self.mealplan = mealplan
self.recipe = mealplan.recipe
elif recipe := kwargs.get('recipe', None):
self.recipe = recipe
@@ -163,14 +165,14 @@ class RecipeShoppingEditor():
try:
self._shopping_list_recipe.delete()
return True
except BaseException:
except:
return False
def _add_ingredients(self, ingredients=None):
if not ingredients:
return
elif isinstance(ingredients, list):
ingredients = Ingredient.objects.filter(id__in=ingredients, food__ignore_shopping=False)
elif type(ingredients) == list:
ingredients = Ingredient.objects.filter(id__in=ingredients)
existing = self._shopping_list_recipe.entries.filter(ingredient__in=ingredients).values_list('ingredient__pk', flat=True)
add_ingredients = ingredients.exclude(id__in=existing)
@@ -192,3 +194,120 @@ class RecipeShoppingEditor():
to_delete = self._shopping_list_recipe.entries.exclude(ingredient__in=ingredients)
ShoppingListEntry.objects.filter(id__in=to_delete).delete()
self._shopping_list_recipe = self.get_shopping_list_recipe(self.id, self.created_by, self.space)
# # TODO refactor as class
# def list_from_recipe(list_recipe=None, recipe=None, mealplan=None, servings=None, ingredients=None, created_by=None, space=None, append=False):
# """
# Creates ShoppingListRecipe and associated ShoppingListEntrys from a recipe or a meal plan with a recipe
# :param list_recipe: Modify an existing ShoppingListRecipe
# :param recipe: Recipe to use as list of ingredients. One of [recipe, mealplan] are required
# :param mealplan: alternatively use a mealplan recipe as source of ingredients
# :param servings: Optional: Number of servings to use to scale shoppinglist. If servings = 0 an existing recipe list will be deleted
# :param ingredients: Ingredients, list of ingredient IDs to include on the shopping list. When not provided all ingredients will be used
# :param append: If False will remove any entries not included with ingredients, when True will append ingredients to the shopping list
# """
# r = recipe or getattr(mealplan, 'recipe', None) or getattr(list_recipe, 'recipe', None)
# if not r:
# raise ValueError(_("You must supply a recipe or mealplan"))
# created_by = created_by or getattr(ShoppingListEntry.objects.filter(list_recipe=list_recipe).first(), 'created_by', None)
# if not created_by:
# raise ValueError(_("You must supply a created_by"))
# try:
# servings = float(servings)
# except (ValueError, TypeError):
# servings = getattr(mealplan, 'servings', 1.0)
# servings_factor = servings / r.servings
# shared_users = list(created_by.get_shopping_share())
# shared_users.append(created_by)
# if list_recipe:
# created = False
# else:
# list_recipe = ShoppingListRecipe.objects.create(recipe=r, mealplan=mealplan, servings=servings)
# created = True
# related_step_ing = []
# if servings == 0 and not created:
# list_recipe.delete()
# return []
# elif ingredients:
# ingredients = Ingredient.objects.filter(pk__in=ingredients, space=space)
# else:
# ingredients = Ingredient.objects.filter(step__recipe=r, food__ignore_shopping=False, space=space)
# if exclude_onhand := created_by.userpreference.mealplan_autoexclude_onhand:
# ingredients = ingredients.exclude(food__onhand_users__id__in=[x.id for x in shared_users])
# if related := created_by.userpreference.mealplan_autoinclude_related:
# # TODO: add levels of related recipes (related recipes of related recipes) to use when auto-adding mealplans
# related_recipes = r.get_related_recipes()
# for x in related_recipes:
# # related recipe is a Step serving size is driven by recipe serving size
# # TODO once/if Steps can have a serving size this needs to be refactored
# if exclude_onhand:
# # if steps are used more than once in a recipe or subrecipe - I don' think this results in the desired behavior
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users]).values_list('id', flat=True)
# else:
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).values_list('id', flat=True)
# x_ing = []
# if ingredients.filter(food__recipe=x).exists():
# for ing in ingredients.filter(food__recipe=x):
# if exclude_onhand:
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users])
# else:
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__ignore_shopping=True)
# for i in [x for x in x_ing]:
# ShoppingListEntry.objects.create(
# list_recipe=list_recipe,
# food=i.food,
# unit=i.unit,
# ingredient=i,
# amount=i.amount * Decimal(servings_factor),
# created_by=created_by,
# space=space,
# )
# # dont' add food to the shopping list that are actually recipes that will be added as ingredients
# ingredients = ingredients.exclude(food__recipe=x)
# add_ingredients = list(ingredients.values_list('id', flat=True)) + related_step_ing
# if not append:
# existing_list = ShoppingListEntry.objects.filter(list_recipe=list_recipe)
# # delete shopping list entries not included in ingredients
# existing_list.exclude(ingredient__in=ingredients).delete()
# # add shopping list entries that did not previously exist
# add_ingredients = set(add_ingredients) - set(existing_list.values_list('ingredient__id', flat=True))
# add_ingredients = Ingredient.objects.filter(id__in=add_ingredients, space=space)
# # if servings have changed, update the ShoppingListRecipe and existing Entries
# if servings <= 0:
# servings = 1
# if not created and list_recipe.servings != servings:
# update_ingredients = set(ingredients.values_list('id', flat=True)) - set(add_ingredients.values_list('id', flat=True))
# list_recipe.servings = servings
# list_recipe.save()
# for sle in ShoppingListEntry.objects.filter(list_recipe=list_recipe, ingredient__id__in=update_ingredients):
# sle.amount = sle.ingredient.amount * Decimal(servings_factor)
# sle.save()
# # add any missing Entries
# for i in [x for x in add_ingredients if x.food]:
# ShoppingListEntry.objects.create(
# list_recipe=list_recipe,
# food=i.food,
# unit=i.unit,
# ingredient=i,
# amount=i.amount * Decimal(servings_factor),
# created_by=created_by,
# space=space,
# )
# # return all shopping list items
# return list_recipe

View File

@@ -2,6 +2,7 @@ from gettext import gettext as _
import bleach
import markdown as md
from bleach_allowlist import markdown_attrs, markdown_tags
from jinja2 import Template, TemplateSyntaxError, UndefinedError
from markdown.extensions.tables import TableExtension
@@ -14,34 +15,17 @@ class IngredientObject(object):
unit = ""
food = ""
note = ""
numeric_amount = 0
def __init__(self, ingredient):
if ingredient.no_amount:
self.amount = ""
else:
self.amount = f"<scalable-number v-bind:number='{bleach.clean(str(ingredient.amount))}' v-bind:factor='ingredient_factor'></scalable-number>"
self.numeric_amount = float(ingredient.amount)
if ingredient.unit:
if ingredient.unit.plural_name in (None, ""):
self.unit = bleach.clean(str(ingredient.unit))
else:
if ingredient.always_use_plural_unit or ingredient.amount > 1 and not ingredient.no_amount:
self.unit = bleach.clean(ingredient.unit.plural_name)
else:
self.unit = bleach.clean(str(ingredient.unit))
self.unit = bleach.clean(str(ingredient.unit))
else:
self.unit = ""
if ingredient.food:
if ingredient.food.plural_name in (None, ""):
self.food = bleach.clean(str(ingredient.food))
else:
if ingredient.always_use_plural_food or ingredient.amount > 1 and not ingredient.no_amount:
self.food = bleach.clean(str(ingredient.food.plural_name))
else:
self.food = bleach.clean(str(ingredient.food))
else:
self.food = ""
self.food = bleach.clean(str(ingredient.food))
self.note = bleach.clean(str(ingredient.note))
def __str__(self):
@@ -54,17 +38,9 @@ class IngredientObject(object):
def render_instructions(step): # TODO deduplicate markdown cleanup code
instructions = step.instruction
tags = {
"h1", "h2", "h3", "h4", "h5", "h6",
"b", "i", "strong", "em", "tt",
"p", "br",
"span", "div", "blockquote", "code", "pre", "hr",
"ul", "ol", "li", "dd", "dt",
"img",
"a",
"sub", "sup",
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead'
}
tags = markdown_tags + [
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead', 'img'
]
parsed_md = md.markdown(
instructions,
extensions=[
@@ -72,11 +48,7 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
UrlizeExtension(), MarkdownFormatExtension()
]
)
markdown_attrs = {
"*": ["id", "class", 'width', 'height'],
"img": ["src", "alt", "title"],
"a": ["href", "alt", "title"],
}
markdown_attrs['*'] = markdown_attrs['*'] + ['class', 'width', 'height']
instructions = bleach.clean(parsed_md, tags, markdown_attrs)
@@ -85,12 +57,9 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
for i in step.ingredients.all():
ingredients.append(IngredientObject(i))
def scale(number):
return f"<scalable-number v-bind:number='{bleach.clean(str(number))}' v-bind:factor='ingredient_factor'></scalable-number>"
try:
template = Template(instructions)
instructions = template.render(ingredients=ingredients, scale=scale)
instructions = template.render(ingredients=ingredients)
except TemplateSyntaxError:
return _('Could not parse template code.') + ' Error: Template Syntax broken'
except UndefinedError:

View File

@@ -1,142 +0,0 @@
from django.core.cache import caches
from decimal import Decimal
from cookbook.helper.cache_helper import CacheHelper
from cookbook.models import Ingredient, Unit
CONVERSION_TABLE = {
'weight': {
'g': 1000,
'kg': 1,
'ounce': 35.274,
'pound': 2.20462
},
'volume': {
'ml': 1000,
'l': 1,
'fluid_ounce': 33.814,
'pint': 2.11338,
'quart': 1.05669,
'gallon': 0.264172,
'tbsp': 67.628,
'tsp': 202.884,
'us_cup': 4.22675,
'imperial_fluid_ounce': 35.1951,
'imperial_pint': 1.75975,
'imperial_quart': 0.879877,
'imperial_gallon': 0.219969,
'imperial_tbsp': 56.3121,
'imperial_tsp': 168.936,
},
}
BASE_UNITS_WEIGHT = list(CONVERSION_TABLE['weight'].keys())
BASE_UNITS_VOLUME = list(CONVERSION_TABLE['volume'].keys())
class ConversionException(Exception):
pass
class UnitConversionHelper:
space = None
def __init__(self, space):
"""
Initializes unit conversion helper
:param space: space to perform conversions on
"""
self.space = space
@staticmethod
def convert_from_to(from_unit, to_unit, amount):
"""
Convert from one base unit to another. Throws ConversionException if trying to convert between different systems (weight/volume) or if units are not supported.
:param from_unit: str unit to convert from
:param to_unit: str unit to convert to
:param amount: amount to convert
:return: Decimal converted amount
"""
system = None
if from_unit in BASE_UNITS_WEIGHT and to_unit in BASE_UNITS_WEIGHT:
system = 'weight'
if from_unit in BASE_UNITS_VOLUME and to_unit in BASE_UNITS_VOLUME:
system = 'volume'
if not system:
raise ConversionException('Trying to convert units not existing or not in one unit system (weight/volume)')
return Decimal(amount / Decimal(CONVERSION_TABLE[system][from_unit] / CONVERSION_TABLE[system][to_unit]))
def base_conversions(self, ingredient_list):
"""
Calculates all possible base unit conversions for each ingredient give.
Converts to all common base units IF they exist in the unit database of the space.
For useful results all ingredients passed should be of the same food, otherwise filtering afterwards might be required.
:param ingredient_list: list of ingredients to convert
:return: ingredient list with appended conversions
"""
base_conversion_ingredient_list = ingredient_list.copy()
for i in ingredient_list:
try:
conversion_unit = i.unit.name
if i.unit.base_unit:
conversion_unit = i.unit.base_unit
# TODO allow setting which units to convert to? possibly only once conversions become visible
units = caches['default'].get(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, None)
if not units:
units = Unit.objects.filter(space=self.space, base_unit__in=(BASE_UNITS_VOLUME + BASE_UNITS_WEIGHT)).all()
caches['default'].set(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, units, 60 * 60) # cache is cleared on unit save signal so long duration is fine
for u in units:
try:
ingredient = Ingredient(amount=self.convert_from_to(conversion_unit, u.base_unit, i.amount), unit=u, food=ingredient_list[0].food, )
if not any((x.unit.name == ingredient.unit.name or x.unit.base_unit == ingredient.unit.name) for x in base_conversion_ingredient_list):
base_conversion_ingredient_list.append(ingredient)
except ConversionException:
pass
except Exception:
pass
return base_conversion_ingredient_list
def get_conversions(self, ingredient):
"""
Converts an ingredient to all possible conversions based on the custom unit conversion database.
After that passes conversion to UnitConversionHelper.base_conversions() to get all base conversions possible.
:param ingredient: Ingredient object
:return: list of ingredients with all possible custom and base conversions
"""
conversions = [ingredient]
if ingredient.unit:
for c in ingredient.unit.unit_conversion_base_relation.all():
if c.space == self.space:
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
if r and r not in conversions:
conversions.append(r)
for c in ingredient.unit.unit_conversion_converted_relation.all():
if c.space == self.space:
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
if r and r not in conversions:
conversions.append(r)
conversions = self.base_conversions(conversions)
return conversions
def _uc_convert(self, uc, amount, unit, food):
"""
Helper to calculate values for custom unit conversions.
Converts given base values using the passed UnitConversion object into a converted Ingredient
:param uc: UnitConversion object
:param amount: base amount
:param unit: base unit
:param food: base food
:return: converted ingredient object from base amount/unit/food
"""
if uc.food is None or uc.food == food:
if unit == uc.base_unit:
return Ingredient(amount=amount * (uc.converted_amount / uc.base_amount), unit=uc.converted_unit, food=food, space=self.space)
else:
return Ingredient(amount=amount * (uc.base_amount / uc.converted_amount), unit=uc.base_unit, food=food, space=self.space)

View File

@@ -36,7 +36,7 @@ class ChefTap(Integration):
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space, )
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space,)
if source_url != '':
step.instruction += '\n' + source_url

View File

@@ -4,7 +4,6 @@ from zipfile import ZipFile
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step
@@ -20,10 +19,6 @@ class Chowdown(Integration):
direction_mode = False
description_mode = False
description = None
prep_time = None
serving = None
ingredients = []
directions = []
descriptions = []
@@ -31,12 +26,6 @@ class Chowdown(Integration):
line = fl.decode("utf-8")
if 'title:' in line:
title = line.replace('title:', '').replace('"', '').strip()
if 'description:' in line:
description = line.replace('description:', '').replace('"', '').strip()
if 'prep_time:' in line:
prep_time = line.replace('prep_time:', '').replace('"', '').strip()
if 'yield:' in line:
serving = line.replace('yield:', '').replace('"', '').strip()
if 'image:' in line:
image = line.replace('image:', '').strip()
if 'tags:' in line:
@@ -59,43 +48,15 @@ class Chowdown(Integration):
descriptions.append(line)
recipe = Recipe.objects.create(name=title, created_by=self.request.user, internal=True, space=self.request.space)
if description:
recipe.description = description
for k in tags.split(','):
print(f'adding keyword {k.strip()}')
keyword, created = Keyword.objects.get_or_create(name=k.strip(), space=self.request.space)
recipe.keywords.add(keyword)
ingredients_added = False
for direction in directions:
if len(direction.strip()) > 0:
step = Step.objects.create(
instruction=direction, name='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
)
else:
step = Step.objects.create(
instruction=direction, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
)
if not ingredients_added:
ingredients_added = True
ingredient_parser = IngredientParser(self.request, True)
for ingredient in ingredients:
if len(ingredient.strip()) > 0:
amount, unit, food, note = ingredient_parser.parse(ingredient)
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
))
recipe.steps.add(step)
if serving:
recipe.servings = parse_servings(serving)
recipe.servings_text = 'servings'
if prep_time:
recipe.working_time = parse_time(prep_time)
step = Step.objects.create(
instruction='\n'.join(directions) + '\n\n' + '\n'.join(descriptions), space=self.request.space,
)
ingredient_parser = IngredientParser(self.request, True)
for ingredient in ingredients:
@@ -115,7 +76,6 @@ class Chowdown(Integration):
if re.match(f'^images/{image}$', z.filename):
self.import_recipe_image(recipe, BytesIO(import_zip.read(z.filename)), filetype=get_filetype(z.filename))
recipe.save()
return recipe
def get_file_from_recipe(self, recipe):

View File

@@ -1,15 +1,20 @@
import base64
import gzip
import json
import re
from gettext import gettext as _
from io import BytesIO
import requests
import validators
import yaml
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import (get_from_scraper, get_images_from_soup,
iso_duration_to_minutes)
from recipe_scrapers import scrape_html
from cookbook.helper.scrapers.scrapers import text_scraper
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Recipe, Step
from cookbook.models import Ingredient, Keyword, Recipe, Step
class CookBookApp(Integration):
@@ -20,7 +25,8 @@ class CookBookApp(Integration):
def get_recipe_from_file(self, file):
recipe_html = file.getvalue().decode("utf-8")
scrape = scrape_html(html=recipe_html, org_url="https://cookbookapp.import", supported_only=False)
# recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
scrape = text_scraper(text=recipe_html)
recipe_json = get_from_scraper(scrape, self.request)
images = list(dict.fromkeys(get_images_from_soup(scrape.soup, None)))
@@ -31,7 +37,7 @@ class CookBookApp(Integration):
try:
recipe.servings = re.findall('([0-9])+', recipe_json['recipeYield'])[0]
except Exception:
except Exception as e:
pass
try:
@@ -41,8 +47,7 @@ class CookBookApp(Integration):
pass
# assuming import files only contain single step
step = Step.objects.create(instruction=recipe_json['steps'][0]['instruction'], space=self.request.space,
show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
step = Step.objects.create(instruction=recipe_json['steps'][0]['instruction'], space=self.request.space, )
if 'nutrition' in recipe_json:
step.instruction = step.instruction + '\n\n' + recipe_json['nutrition']
@@ -57,13 +62,13 @@ class CookBookApp(Integration):
if unit := ingredient.get('unit', None):
u = ingredient_parser.get_unit(unit.get('name', None))
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
food=f, unit=u, amount=ingredient.get('amount', None), note=ingredient.get('note', None), original_text=ingredient.get('original_text', None), space=self.request.space,
))
if len(images) > 0:
try:
url = images[0]
if validate_import_url(url):
if validators.url(url, public=True):
response = requests.get(url)
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception as e:

View File

@@ -1,12 +1,17 @@
import base64
import json
from io import BytesIO
import requests
from gettext import gettext as _
import requests
import validators
from lxml import etree
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.helper.recipe_url_import import parse_servings, parse_time, parse_servings_text
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Recipe, Step
from cookbook.models import Ingredient, Keyword, Recipe, Step
class Cookmate(Integration):
@@ -45,7 +50,7 @@ class Cookmate(Integration):
for step in recipe_text.getchildren():
if step.text:
step = Step.objects.create(
instruction=step.text.strip(), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction=step.text.strip(), space=self.request.space,
)
recipe.steps.add(step)
@@ -69,7 +74,7 @@ class Cookmate(Integration):
if recipe_xml.find('imageurl') is not None:
try:
url = recipe_xml.find('imageurl').text.strip()
if validate_import_url(url):
if validators.url(url, public=True):
response = requests.get(url)
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception as e:

View File

@@ -1,3 +1,4 @@
import re
from io import BytesIO
from zipfile import ZipFile
@@ -25,13 +26,12 @@ class CopyMeThat(Integration):
except AttributeError:
source = None
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip(
)[:128], source_url=source, created_by=self.request.user, internal=True, space=self.request.space, )
recipe = Recipe.objects.create(name=file.find("div", {"id": "name"}).text.strip()[:128], source_url=source, created_by=self.request.user, internal=True, space=self.request.space, )
for category in file.find_all("span", {"class": "recipeCategory"}):
keyword, created = Keyword.objects.get_or_create(name=category.text, space=self.request.space)
recipe.keywords.add(keyword)
try:
recipe.servings = parse_servings(file.find("a", {"id": "recipeYield"}).text.strip())
recipe.working_time = iso_duration_to_minutes(file.find("span", {"meta": "prepTime"}).text.strip())
@@ -51,7 +51,7 @@ class CopyMeThat(Integration):
except AttributeError:
pass
step = Step.objects.create(instruction='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
step = Step.objects.create(instruction='', space=self.request.space, )
ingredient_parser = IngredientParser(self.request, True)
@@ -61,14 +61,7 @@ class CopyMeThat(Integration):
if not isinstance(ingredient, Tag) or not ingredient.text.strip() or "recipeIngredient_spacer" in ingredient['class']:
continue
if any(x in ingredient['class'] for x in ["recipeIngredient_subheader", "recipeIngredient_note"]):
step.ingredients.add(
Ingredient.objects.create(
is_header=True,
note=ingredient.text.strip()[
:256],
original_text=ingredient.text.strip(),
space=self.request.space,
))
step.ingredients.add(Ingredient.objects.create(is_header=True, note=ingredient.text.strip()[:256], original_text=ingredient.text.strip(), space=self.request.space, ))
else:
amount, unit, food, note = ingredient_parser.parse(ingredient.text.strip())
f = ingredient_parser.get_food(food)
@@ -85,7 +78,7 @@ class CopyMeThat(Integration):
step.save()
recipe.steps.add(step)
step = Step.objects.create(instruction='', space=self.request.space, )
step.name = instruction.text.strip()[:128]
else:
step.instruction += instruction.text.strip() + ' \n\n'

View File

@@ -1,5 +1,4 @@
import json
import traceback
from io import BytesIO, StringIO
from re import match
from zipfile import ZipFile
@@ -20,10 +19,7 @@ class Default(Integration):
recipe = self.decode_recipe(recipe_string)
images = list(filter(lambda v: match('image.*', v), recipe_zip.namelist()))
if images:
try:
self.import_recipe_image(recipe, BytesIO(recipe_zip.read(images[0])), filetype=get_filetype(images[0]))
except AttributeError:
traceback.print_exc()
self.import_recipe_image(recipe, BytesIO(recipe_zip.read(images[0])), filetype=get_filetype(images[0]))
return recipe
def decode_recipe(self, string):
@@ -58,7 +54,7 @@ class Default(Integration):
try:
recipe_zip_obj.writestr(f'image{get_filetype(r.image.file.name)}', r.image.file.read())
except (ValueError, FileNotFoundError):
except ValueError:
pass
recipe_zip_obj.close()
@@ -71,4 +67,4 @@ class Default(Integration):
export_zip_obj.close()
return [[self.get_export_file_name(), export_zip_stream.getvalue()]]
return [[ self.get_export_file_name(), export_zip_stream.getvalue() ]]

View File

@@ -28,7 +28,7 @@ class Domestica(Integration):
recipe.save()
step = Step.objects.create(
instruction=file['directions'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction=file['directions'], space=self.request.space,
)
if file['source'] != '':

View File

@@ -1,11 +1,16 @@
import time
import datetime
import json
import traceback
import uuid
from io import BytesIO
from io import BytesIO, StringIO
from zipfile import BadZipFile, ZipFile
from bs4 import Tag
import lxml
from django.core.cache import cache
import datetime
from bs4 import Tag
from django.core.exceptions import ObjectDoesNotExist
from django.core.files import File
from django.db import IntegrityError
@@ -15,9 +20,11 @@ from django.utils.translation import gettext as _
from django_scopes import scope
from lxml import etree
from cookbook.helper.image_processing import handle_image
from cookbook.forms import ImportExportBase
from cookbook.helper.image_processing import get_filetype, handle_image
from cookbook.models import Keyword, Recipe
from recipes.settings import DEBUG, EXPORT_FILE_CACHE_DURATION
from recipes.settings import DEBUG
from recipes.settings import EXPORT_FILE_CACHE_DURATION
class Integration:
@@ -37,6 +44,7 @@ class Integration:
self.ignored_recipes = []
description = f'Imported by {request.user.get_user_display_name()} at {date_format(datetime.datetime.now(), "DATETIME_FORMAT")}. Type: {export_type}'
icon = '📥'
try:
last_kw = Keyword.objects.filter(name__regex=r'^(Import [0-9]+)', space=request.space).latest('created_at')
@@ -49,19 +57,23 @@ class Integration:
self.keyword = parent.add_child(
name=name,
description=description,
icon=icon,
space=request.space
)
except (IntegrityError, ValueError): # in case, for whatever reason, the name does exist append UUID to it. Not nice but works for now.
self.keyword = parent.add_child(
name=f'{name} {str(uuid.uuid4())[0:8]}',
description=description,
icon=icon,
space=request.space
)
def do_export(self, recipes, el):
with scope(space=self.request.space):
el.total_recipes = len(recipes)
el.total_recipes = len(recipes)
el.cache_duration = EXPORT_FILE_CACHE_DURATION
el.save()
@@ -73,7 +85,7 @@ class Integration:
export_file = file
else:
# zip the files if there is more then one file
#zip the files if there is more then one file
export_filename = self.get_export_file_name()
export_stream = BytesIO()
export_obj = ZipFile(export_stream, 'w')
@@ -84,7 +96,8 @@ class Integration:
export_obj.close()
export_file = export_stream.getvalue()
cache.set('export_file_' + str(el.pk), {'filename': export_filename, 'file': export_file}, EXPORT_FILE_CACHE_DURATION)
cache.set('export_file_'+str(el.pk), {'filename': export_filename, 'file': export_file}, EXPORT_FILE_CACHE_DURATION)
el.running = False
el.save()
@@ -92,6 +105,7 @@ class Integration:
response['Content-Disposition'] = 'attachment; filename="' + export_filename + '"'
return response
def import_file_name_filter(self, zip_info_object):
"""
Since zipfile.namelist() returns all files in all subdirectories this function allows filtering of files
@@ -155,7 +169,7 @@ class Integration:
for z in file_list:
try:
if not hasattr(z, 'filename') or isinstance(z, Tag):
if not hasattr(z, 'filename') or type(z) == Tag:
recipe = self.get_recipe_from_file(z)
else:
recipe = self.get_recipe_from_file(BytesIO(import_zip.read(z.filename)))
@@ -168,7 +182,7 @@ class Integration:
traceback.print_exc()
self.handle_exception(e, log=il, message=f'-------------------- \nERROR \n{e}\n--------------------\n')
import_zip.close()
elif '.json' in f['name'] or '.xml' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
elif '.json' in f['name'] or '.txt' in f['name'] or '.mmf' in f['name'] or '.rk' in f['name'] or '.melarecipe' in f['name']:
data_list = self.split_recipe_file(f['file'])
il.total_recipes += len(data_list)
for d in data_list:
@@ -289,6 +303,7 @@ class Integration:
if DEBUG:
traceback.print_exc()
def get_export_file_name(self, format='zip'):
return "export_{}.{}".format(datetime.datetime.now().strftime("%Y-%m-%d"), format)

View File

@@ -5,9 +5,8 @@ from zipfile import ZipFile
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step
from cookbook.models import Ingredient, Recipe, Step
class Mealie(Integration):
@@ -24,66 +23,41 @@ class Mealie(Integration):
name=recipe_json['name'].strip(), description=description,
created_by=self.request.user, internal=True, space=self.request.space)
# TODO parse times (given in PT2H3M )
# @vabene check recipe_url_import.iso_duration_to_minutes I think it does what you are looking for
ingredients_added = False
for s in recipe_json['recipe_instructions']:
step = Step.objects.create(instruction=s['text'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
recipe.steps.add(step)
step = recipe.steps.first()
if not step: # if there is no step in the exported data
step = Step.objects.create(instruction='', space=self.request.space, )
recipe.steps.add(step)
if len(recipe_json['description'].strip()) > 500:
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
ingredient_parser = IngredientParser(self.request, True)
for ingredient in recipe_json['recipe_ingredient']:
try:
if ingredient['food']:
f = ingredient_parser.get_food(ingredient['food'])
u = ingredient_parser.get_unit(ingredient['unit'])
amount = ingredient['quantity']
note = ingredient['note']
original_text = None
else:
amount, unit, food, note = ingredient_parser.parse(ingredient['note'])
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
original_text = ingredient['note']
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=amount, note=note, original_text=original_text, space=self.request.space,
))
except Exception:
pass
if 'tags' in recipe_json and len(recipe_json['tags']) > 0:
for k in recipe_json['tags']:
if 'name' in k:
keyword, created = Keyword.objects.get_or_create(name=k['name'].strip(), space=self.request.space)
recipe.keywords.add(keyword)
if 'notes' in recipe_json and len(recipe_json['notes']) > 0:
notes_text = "#### Notes \n\n"
for n in recipe_json['notes']:
notes_text += f'{n["text"]} \n'
step = Step.objects.create(
instruction=notes_text, space=self.request.space,
instruction=s['text'], space=self.request.space,
)
if not ingredients_added:
ingredients_added = True
if len(recipe_json['description'].strip()) > 500:
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
ingredient_parser = IngredientParser(self.request, True)
for ingredient in recipe_json['recipe_ingredient']:
try:
if ingredient['food']:
f = ingredient_parser.get_food(ingredient['food'])
u = ingredient_parser.get_unit(ingredient['unit'])
amount = ingredient['quantity']
note = ingredient['note']
original_text = None
else:
amount, unit, food, note = ingredient_parser.parse(ingredient['note'])
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
original_text = ingredient['note']
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=amount, note=note, original_text=original_text, space=self.request.space,
))
except Exception:
pass
recipe.steps.add(step)
if 'recipe_yield' in recipe_json:
recipe.servings = parse_servings(recipe_json['recipe_yield'])
recipe.servings_text = parse_servings_text(recipe_json['recipe_yield'])
if 'total_time' in recipe_json and recipe_json['total_time'] is not None:
recipe.working_time = parse_time(recipe_json['total_time'])
if 'org_url' in recipe_json:
recipe.source_url = recipe_json['org_url']
recipe.save()
for f in self.files:
if '.zip' in f['name']:
import_zip = ZipFile(f['file'])

View File

@@ -22,7 +22,7 @@ class MealMaster(Integration):
if 'Yield:' in line:
servings_text = line.replace('Yield:', '').strip()
else:
if re.match(r'\s{2,}([0-9])+', line):
if re.match('\s{2,}([0-9])+', line):
ingredients.append(line.strip())
else:
directions.append(line.strip())
@@ -39,7 +39,7 @@ class MealMaster(Integration):
recipe.keywords.add(keyword)
step = Step.objects.create(
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
)
ingredient_parser = IngredientParser(self.request, True)

View File

@@ -57,7 +57,7 @@ class MelaRecipes(Integration):
recipe.source_url = recipe_json['link']
step = Step.objects.create(
instruction=instruction, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients
instruction=instruction, space=self.request.space,
)
ingredient_parser = IngredientParser(self.request, True)

View File

@@ -1,15 +1,13 @@
import json
import re
from io import BytesIO, StringIO
from io import BytesIO
from zipfile import ZipFile
from PIL import Image
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import iso_duration_to_minutes
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, NutritionInformation, Recipe, Step
from cookbook.models import Ingredient, Keyword, Recipe, Step, NutritionInformation
class NextcloudCookbook(Integration):
@@ -52,28 +50,23 @@ class NextcloudCookbook(Integration):
ingredients_added = False
for s in recipe_json['recipeInstructions']:
if 'text' in s:
step = Step.objects.create(
instruction=s['text'], name=s['name'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
)
else:
step = Step.objects.create(
instruction=s, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
)
step = Step.objects.create(
instruction=s, space=self.request.space,
)
if not ingredients_added:
if len(recipe_json['description'].strip()) > 500:
step.instruction = recipe_json['description'].strip() + '\n\n' + step.instruction
ingredient_parser = IngredientParser(self.request, True)
if ingredients_added == False:
ingredients_added = True
ingredient_parser = IngredientParser(self.request, True)
for ingredient in recipe_json['recipeIngredient']:
ingredients_added = True
if ingredient.startswith('##'):
subheader = ingredient.replace('##', '', 1)
step.ingredients.add(Ingredient.objects.create(note=subheader, is_header=True, no_amount=True, space=self.request.space))
else:
amount, unit, food, note = ingredient_parser.parse(ingredient)
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,))
amount, unit, food, note = ingredient_parser.parse(ingredient)
f = ingredient_parser.get_food(food)
u = ingredient_parser.get_unit(unit)
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=amount, note=note, original_text=ingredient, space=self.request.space,
))
recipe.steps.add(step)
if 'nutrition' in recipe_json:
@@ -91,7 +84,7 @@ class NextcloudCookbook(Integration):
if nutrition != {}:
recipe.nutrition = NutritionInformation.objects.create(**nutrition, space=self.request.space)
recipe.save()
except Exception:
except Exception as e:
pass
for f in self.files:
@@ -103,90 +96,5 @@ class NextcloudCookbook(Integration):
return recipe
def formatTime(self, min):
h = min // 60
m = min % 60
return f'PT{h}H{m}M0S'
def get_file_from_recipe(self, recipe):
export = {}
export['name'] = recipe.name
export['description'] = recipe.description
export['url'] = recipe.source_url
export['prepTime'] = self.formatTime(recipe.working_time)
export['cookTime'] = self.formatTime(recipe.waiting_time)
export['totalTime'] = self.formatTime(recipe.working_time + recipe.waiting_time)
export['recipeYield'] = recipe.servings
export['image'] = f'/Recipes/{recipe.name}/full.jpg'
export['imageUrl'] = f'/Recipes/{recipe.name}/full.jpg'
recipeKeyword = []
for k in recipe.keywords.all():
recipeKeyword.append(k.name)
export['keywords'] = recipeKeyword
recipeInstructions = []
recipeIngredient = []
for s in recipe.steps.all():
recipeInstructions.append(s.instruction)
for i in s.ingredients.all():
recipeIngredient.append(f'{float(i.amount)} {i.unit} {i.food}')
export['recipeIngredient'] = recipeIngredient
export['recipeInstructions'] = recipeInstructions
return "recipe.json", json.dumps(export)
def get_files_from_recipes(self, recipes, el, cookie):
export_zip_stream = BytesIO()
export_zip_obj = ZipFile(export_zip_stream, 'w')
for recipe in recipes:
if recipe.internal and recipe.space == self.request.space:
recipe_stream = StringIO()
filename, data = self.get_file_from_recipe(recipe)
recipe_stream.write(data)
export_zip_obj.writestr(f'{recipe.name}/{filename}', recipe_stream.getvalue())
recipe_stream.close()
try:
imageByte = recipe.image.file.read()
export_zip_obj.writestr(f'{recipe.name}/full.jpg', self.getJPEG(imageByte))
export_zip_obj.writestr(f'{recipe.name}/thumb.jpg', self.getThumb(171, imageByte))
export_zip_obj.writestr(f'{recipe.name}/thumb16.jpg', self.getThumb(16, imageByte))
except ValueError:
pass
el.exported_recipes += 1
el.msg += self.get_recipe_processed_msg(recipe)
el.save()
export_zip_obj.close()
return [[self.get_export_file_name(), export_zip_stream.getvalue()]]
def getJPEG(self, imageByte):
image = Image.open(BytesIO(imageByte))
image = image.convert('RGB')
bytes = BytesIO()
image.save(bytes, "JPEG")
return bytes.getvalue()
def getThumb(self, size, imageByte):
image = Image.open(BytesIO(imageByte))
w, h = image.size
m = min(w, h)
image = image.crop(((w - m) // 2, (h - m) // 2, (w + m) // 2, (h + m) // 2))
image = image.resize([size, size], Image.Resampling.LANCZOS)
image = image.convert('RGB')
bytes = BytesIO()
image.save(bytes, "JPEG")
return bytes.getvalue()
raise NotImplementedError('Method not implemented in storage integration')

View File

@@ -1,59 +1,27 @@
import json
from django.utils.translation import gettext as _
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.integration.integration import Integration
from cookbook.models import Comment, CookLog, Ingredient, Keyword, Recipe, Step
from cookbook.models import Ingredient, Recipe, Step
class OpenEats(Integration):
def get_recipe_from_file(self, file):
description = file['info']
description_max_length = Recipe._meta.get_field('description').max_length
if len(description) > description_max_length:
description = description[0:description_max_length]
recipe = Recipe.objects.create(name=file['name'].strip(), description=description, created_by=self.request.user, internal=True,
recipe = Recipe.objects.create(name=file['name'].strip(), created_by=self.request.user, internal=True,
servings=file['servings'], space=self.request.space, waiting_time=file['cook_time'], working_time=file['prep_time'])
instructions = ''
if file["info"] != '':
instructions += file["info"]
if file["directions"] != '':
instructions += file["directions"]
if file["source"] != '':
instructions += '\n' + _('Recipe source:') + f'[{file["source"]}]({file["source"]})'
instructions += file["source"]
cuisine_keyword, created = Keyword.objects.get_or_create(name="Cuisine", space=self.request.space)
if file["cuisine"] != '':
keyword, created = Keyword.objects.get_or_create(name=file["cuisine"].strip(), space=self.request.space)
if created:
keyword.move(cuisine_keyword, pos="last-child")
recipe.keywords.add(keyword)
course_keyword, created = Keyword.objects.get_or_create(name="Course", space=self.request.space)
if file["course"] != '':
keyword, created = Keyword.objects.get_or_create(name=file["course"].strip(), space=self.request.space)
if created:
keyword.move(course_keyword, pos="last-child")
recipe.keywords.add(keyword)
for tag in file["tags"]:
keyword, created = Keyword.objects.get_or_create(name=tag.strip(), space=self.request.space)
recipe.keywords.add(keyword)
for comment in file['comments']:
Comment.objects.create(recipe=recipe, text=comment['text'], created_by=self.request.user)
CookLog.objects.create(recipe=recipe, rating=comment['rating'], created_by=self.request.user, space=self.request.space)
if file["photo"] != '':
recipe.image = f'recipes/openeats-import/{file["photo"]}'
recipe.save()
step = Step.objects.create(instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
step = Step.objects.create(instruction=instructions, space=self.request.space,)
ingredient_parser = IngredientParser(self.request, True)
for ingredient in file['ingredients']:
@@ -70,9 +38,6 @@ class OpenEats(Integration):
recipe_json = json.loads(file.read())
recipe_dict = {}
ingredient_group_dict = {}
cuisine_group_dict = {}
course_group_dict = {}
tag_group_dict = {}
for o in recipe_json:
if o['model'] == 'recipe.recipe':
@@ -85,27 +50,11 @@ class OpenEats(Integration):
'cook_time': o['fields']['cook_time'],
'servings': o['fields']['servings'],
'ingredients': [],
'photo': o['fields']['photo'],
'cuisine': o['fields']['cuisine'],
'course': o['fields']['course'],
'tags': o['fields']['tags'],
'comments': [],
}
if o['model'] == 'ingredient.ingredientgroup':
ingredient_group_dict[o['pk']] = o['fields']['recipe']
if o['model'] == 'recipe_groups.cuisine':
cuisine_group_dict[o['pk']] = o['fields']['title']
if o['model'] == 'recipe_groups.course':
course_group_dict[o['pk']] = o['fields']['title']
if o['model'] == 'recipe_groups.tag':
tag_group_dict[o['pk']] = o['fields']['title']
for o in recipe_json:
if o['model'] == 'rating.rating':
recipe_dict[o['fields']['recipe']]["comments"].append({
"text": o['fields']['comment'],
"rating": o['fields']['rating']
})
if o['model'] == 'ingredient.ingredient':
ingredient = {
'food': o['fields']['title'],
@@ -114,15 +63,6 @@ class OpenEats(Integration):
}
recipe_dict[ingredient_group_dict[o['fields']['ingredient_group']]]['ingredients'].append(ingredient)
for k, r in recipe_dict.items():
if r["cuisine"] in cuisine_group_dict:
r["cuisine"] = cuisine_group_dict[r["cuisine"]]
if r["course"] in course_group_dict:
r["course"] = course_group_dict[r["course"]]
for index in range(len(r["tags"])):
if r["tags"][index] in tag_group_dict:
r["tags"][index] = tag_group_dict[r["tags"][index]]
return list(recipe_dict.values())
def get_file_from_recipe(self, recipe):

View File

@@ -5,9 +5,6 @@ import re
from gettext import gettext as _
from io import BytesIO
import requests
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text
from cookbook.integration.integration import Integration
@@ -58,7 +55,7 @@ class Paprika(Integration):
pass
step = Step.objects.create(
instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction=instructions, space=self.request.space,
)
if 'description' in recipe_json and len(recipe_json['description'].strip()) > 500:
@@ -84,14 +81,7 @@ class Paprika(Integration):
recipe.steps.add(step)
try:
if recipe_json.get("image_url", None):
url = recipe_json.get("image_url", None)
if validate_import_url(url):
response = requests.get(url)
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception:
if recipe_json.get("photo_data", None):
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['photo_data'])), filetype='.jpeg')
if recipe_json.get("photo_data", None):
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_json['photo_data'])), filetype='.jpeg')
return recipe

View File

@@ -1,11 +1,21 @@
import json
from io import BytesIO
from re import match
from zipfile import ZipFile
import asyncio
import django.core.management.commands.runserver as runserver
from asgiref.sync import sync_to_async
from pyppeteer import launch
from cookbook.integration.integration import Integration
from rest_framework.renderers import JSONRenderer
from cookbook.helper.image_processing import get_filetype
from cookbook.integration.integration import Integration
from cookbook.serializer import RecipeExportSerializer
from cookbook.models import ExportLog
from asgiref.sync import sync_to_async
import django.core.management.commands.runserver as runserver
import logging
class PDFexport(Integration):
@@ -32,6 +42,7 @@ class PDFexport(Integration):
}
}
files = []
for recipe in recipes:
@@ -39,18 +50,20 @@ class PDFexport(Integration):
await page.emulateMedia('print')
await page.setCookie(cookies)
await page.goto('http://' + cmd.default_addr + ':' + cmd.default_port + '/view/recipe/' + str(recipe.id), {'waitUntil': 'domcontentloaded'})
await page.waitForSelector('#printReady')
await page.goto('http://'+cmd.default_addr+':'+cmd.default_port+'/view/recipe/'+str(recipe.id), {'waitUntil': 'domcontentloaded'})
await page.waitForSelector('#printReady');
files.append([recipe.name + '.pdf', await page.pdf(options)])
await page.close()
await page.close();
el.exported_recipes += 1
el.msg += self.get_recipe_processed_msg(recipe)
await sync_to_async(el.save, thread_sensitive=True)()
await browser.close()
return files
def get_files_from_recipes(self, recipes, el, cookie):
return asyncio.run(self.get_files_from_recipes_async(recipes, el, cookie))

View File

@@ -35,7 +35,7 @@ class Pepperplate(Integration):
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space)
step = Step.objects.create(
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
)
ingredient_parser = IngredientParser(self.request, True)

View File

@@ -2,9 +2,7 @@ from io import BytesIO
import requests
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step
@@ -19,41 +17,35 @@ class Plantoeat(Integration):
tags = None
ingredients = []
directions = []
fields = {}
description = ''
for line in file.replace('\r', '').split('\n'):
if line.strip() != '':
if 'Title:' in line:
fields['name'] = line.replace('Title:', '').replace('"', '').strip()
title = line.replace('Title:', '').replace('"', '').strip()
if 'Description:' in line:
fields['description'] = line.replace('Description:', '').strip()
if 'Serves:' in line:
fields['servings'] = parse_servings(line.replace('Serves:', '').strip())
if 'Source:' in line:
fields['source_url'] = line.replace('Source:', '').strip()
description = line.replace('Description:', '').strip()
if 'Source:' in line or 'Serves:' in line or 'Prep Time:' in line or 'Cook Time:' in line:
directions.append(line.strip() + '\n')
if 'Photo Url:' in line:
image_url = line.replace('Photo Url:', '').strip()
if 'Prep Time:' in line:
fields['working_time'] = parse_time(line.replace('Prep Time:', '').strip())
if 'Cook Time:' in line:
fields['waiting_time'] = parse_time(line.replace('Cook Time:', '').strip())
if 'Tags:' in line:
tags = line.replace('Tags:', '').strip()
if ingredient_mode:
if len(line) > 2 and 'Instructions:' not in line:
ingredients.append(line.strip())
if direction_mode:
if len(line) > 2:
directions.append(line.strip() + '\n')
if 'Ingredients:' in line:
ingredient_mode = True
if 'Directions:' in line:
ingredient_mode = False
direction_mode = True
if ingredient_mode:
if len(line) > 2 and 'Ingredients:' not in line:
ingredients.append(line.strip())
if direction_mode:
if len(line) > 2:
directions.append(line.strip() + '\n')
recipe = Recipe.objects.create(**fields, created_by=self.request.user, internal=True, space=self.request.space)
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space)
step = Step.objects.create(
instruction='\n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction='\n'.join(directions) + '\n\n', space=self.request.space,
)
if tags:
@@ -75,9 +67,8 @@ class Plantoeat(Integration):
if image_url:
try:
if validate_import_url(image_url):
response = requests.get(image_url)
self.import_recipe_image(recipe, BytesIO(response.content))
response = requests.get(image_url)
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception as e:
print('failed to import image ', str(e))

View File

@@ -5,10 +5,9 @@ from io import BytesIO
from zipfile import ZipFile
import requests
import validators
from django.utils.translation import gettext as _
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.image_processing import get_filetype
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.integration.integration import Integration
@@ -47,7 +46,7 @@ class RecetteTek(Integration):
if not instructions:
instructions = ''
step = Step.objects.create(instruction=instructions, space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,)
step = Step.objects.create(instruction=instructions, space=self.request.space,)
# Append the original import url to the step (if it exists)
try:
@@ -62,7 +61,7 @@ class RecetteTek(Integration):
ingredient_parser = IngredientParser(self.request, True)
for ingredient in file['ingredients'].split('\n'):
if len(ingredient.strip()) > 0:
amount, unit, food, note = ingredient_parser.parse(ingredient.strip())
amount, unit, food, note = ingredient_parser.parse(food)
f = ingredient_parser.get_food(ingredient)
u = ingredient_parser.get_unit(unit)
step.ingredients.add(Ingredient.objects.create(
@@ -126,7 +125,7 @@ class RecetteTek(Integration):
else:
if file['originalPicture'] != '':
url = file['originalPicture']
if validate_import_url(url):
if validators.url(url, public=True):
response = requests.get(url)
if imghdr.what(BytesIO(response.content)) is not None:
self.import_recipe_image(recipe, BytesIO(response.content), filetype=get_filetype(file['originalPicture']))

View File

@@ -41,7 +41,7 @@ class RecipeKeeper(Integration):
except AttributeError:
pass
step = Step.objects.create(instruction='', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
step = Step.objects.create(instruction='', space=self.request.space, )
ingredient_parser = IngredientParser(self.request, True)
for ingredient in file.find("div", {"itemprop": "recipeIngredients"}).findChildren("p"):
@@ -58,13 +58,6 @@ class RecipeKeeper(Integration):
if s.text == "":
continue
step.instruction += s.text + ' \n'
step.save()
for s in file.find("div", {"itemprop": "recipeNotes"}).find_all("p"):
if s.text == "":
continue
step.instruction += s.text + ' \n'
step.save()
if file.find("span", {"itemprop": "recipeSource"}).text != '':
step.instruction += "\n\n" + _("Imported from") + ": " + file.find("span", {"itemprop": "recipeSource"}).text

View File

@@ -2,10 +2,9 @@ import json
from io import BytesIO
import requests
import validators
from cookbook.helper.HelperFunctions import validate_import_url
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text, parse_time
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Recipe, Step
@@ -19,27 +18,25 @@ class RecipeSage(Integration):
created_by=self.request.user, internal=True,
space=self.request.space)
if file['recipeYield'] != '':
recipe.servings = parse_servings(file['recipeYield'])
recipe.servings_text = parse_servings_text(file['recipeYield'])
try:
if 'totalTime' in file and file['totalTime'] != '':
recipe.working_time = parse_time(file['totalTime'])
if file['recipeYield'] != '':
recipe.servings = int(file['recipeYield'])
if 'timePrep' in file and file['prepTime'] != '':
recipe.working_time = parse_time(file['timePrep'])
recipe.waiting_time = parse_time(file['totalTime']) - parse_time(file['timePrep'])
if file['totalTime'] != '':
recipe.waiting_time = int(file['totalTime']) - int(file['timePrep'])
if file['prepTime'] != '':
recipe.working_time = int(file['timePrep'])
recipe.save()
except Exception as e:
print('failed to parse time ', str(e))
recipe.save()
print('failed to parse yield or time ', str(e))
ingredient_parser = IngredientParser(self.request, True)
ingredients_added = False
for s in file['recipeInstructions']:
step = Step.objects.create(
instruction=s['text'], space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction=s['text'], space=self.request.space,
)
if not ingredients_added:
ingredients_added = True
@@ -56,7 +53,7 @@ class RecipeSage(Integration):
if len(file['image']) > 0:
try:
url = file['image'][0]
if validate_import_url(url):
if validators.url(url, public=True):
response = requests.get(url)
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception as e:

View File

@@ -1,78 +0,0 @@
import base64
from io import BytesIO
from lxml import etree
from cookbook.helper.ingredient_parser import IngredientParser
from cookbook.helper.recipe_url_import import parse_servings, parse_servings_text
from cookbook.integration.integration import Integration
from cookbook.models import Ingredient, Keyword, Recipe, Step
class Rezeptsuitede(Integration):
def split_recipe_file(self, file):
return etree.parse(file).getroot().getchildren()
def get_recipe_from_file(self, file):
recipe_xml = file
recipe = Recipe.objects.create(
name=recipe_xml.find('head').attrib['title'].strip(),
created_by=self.request.user, internal=True, space=self.request.space)
try:
if recipe_xml.find('head').attrib['servingtype']:
recipe.servings = parse_servings(recipe_xml.find('head').attrib['servingtype'].strip())
recipe.servings_text = parse_servings_text(recipe_xml.find('head').attrib['servingtype'].strip())
except KeyError:
pass
if recipe_xml.find('remark') is not None: # description is a list of <li>'s with text
if recipe_xml.find('remark').find('line') is not None:
recipe.description = recipe_xml.find('remark').find('line').text[:512]
for prep in recipe_xml.findall('preparation'):
try:
if prep.find('step').text:
step = Step.objects.create(
instruction=prep.find('step').text.strip(), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
)
recipe.steps.add(step)
except Exception:
pass
ingredient_parser = IngredientParser(self.request, True)
if recipe_xml.find('part').find('ingredient') is not None:
ingredient_step = recipe.steps.first()
if ingredient_step is None:
ingredient_step = Step.objects.create(space=self.request.space, instruction='')
for ingredient in recipe_xml.find('part').findall('ingredient'):
f = ingredient_parser.get_food(ingredient.attrib['item'])
u = ingredient_parser.get_unit(ingredient.attrib['unit'])
amount = 0
if ingredient.attrib['qty'].strip() != '':
try:
amount, unit, note = ingredient_parser.parse_amount(ingredient.attrib['qty'])
except ValueError: # sometimes quantities contain words which cant be parsed
pass
ingredient_step.ingredients.add(Ingredient.objects.create(food=f, unit=u, amount=amount, space=self.request.space, ))
try:
k, created = Keyword.objects.get_or_create(name=recipe_xml.find('head').find('cat').text.strip(), space=self.request.space)
recipe.keywords.add(k)
except Exception:
pass
recipe.save()
try:
self.import_recipe_image(recipe, BytesIO(base64.b64decode(recipe_xml.find('head').find('picbin').text)), filetype='.jpeg')
except BaseException:
pass
return recipe
def get_file_from_recipe(self, recipe):
raise NotImplementedError('Method not implemented in storage integration')

View File

@@ -38,7 +38,7 @@ class RezKonv(Integration):
recipe.keywords.add(keyword)
step = Step.objects.create(
instruction=' \n'.join(directions) + '\n\n', space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients,
instruction=' \n'.join(directions) + '\n\n', space=self.request.space,
)
ingredient_parser = IngredientParser(self.request, True)
@@ -60,8 +60,8 @@ class RezKonv(Integration):
def split_recipe_file(self, file):
recipe_list = []
current_recipe = ''
# TODO build algorithm to try trough encodings and fail if none work, use for all importers
# encoding_list = ['windows-1250', 'latin-1']
encoding_list = ['windows-1250',
'latin-1'] # TODO build algorithm to try trough encodings and fail if none work, use for all importers
encoding = 'windows-1250'
for fl in file.readlines():
try:

View File

@@ -43,7 +43,7 @@ class Saffron(Integration):
recipe = Recipe.objects.create(name=title, description=description, created_by=self.request.user, internal=True, space=self.request.space, )
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, show_ingredients_table=self.request.user.userpreference.show_step_ingredients, )
step = Step.objects.create(instruction='\n'.join(directions), space=self.request.space, )
ingredient_parser = IngredientParser(self.request, True)
for ingredient in ingredients:
@@ -59,11 +59,11 @@ class Saffron(Integration):
def get_file_from_recipe(self, recipe):
data = "Title: " + recipe.name if recipe.name else "" + "\n"
data += "Description: " + recipe.description if recipe.description else "" + "\n"
data = "Title: "+recipe.name if recipe.name else ""+"\n"
data += "Description: "+recipe.description if recipe.description else ""+"\n"
data += "Source: \n"
data += "Original URL: \n"
data += "Yield: " + str(recipe.servings) + "\n"
data += "Yield: "+str(recipe.servings)+"\n"
data += "Cookbook: \n"
data += "Section: \n"
data += "Image: \n"
@@ -78,13 +78,13 @@ class Saffron(Integration):
data += "Ingredients: \n"
for ingredient in recipeIngredient:
data += ingredient + "\n"
data += ingredient+"\n"
data += "Instructions: \n"
for instruction in recipeInstructions:
data += instruction + "\n"
data += instruction+"\n"
return recipe.name + '.txt', data
return recipe.name+'.txt', data
def get_files_from_recipes(self, recipes, el, cookie):
files = []

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More