mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2026-01-11 09:07:12 -05:00
Merge branch 'develop' into feature/2402-make-now-count
This commit is contained in:
@@ -1,11 +1,10 @@
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from gettext import gettext as _
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.core.cache import caches
|
||||
from gettext import gettext as _
|
||||
|
||||
from cookbook.models import InviteLink
|
||||
|
||||
@@ -17,10 +16,13 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
Whether to allow sign-ups.
|
||||
"""
|
||||
signup_token = False
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
if 'signup_token' in request.session and InviteLink.objects.filter(
|
||||
valid_until__gte=datetime.datetime.today(), used_by=None, uuid=request.session['signup_token']).exists():
|
||||
signup_token = True
|
||||
|
||||
if (request.resolver_match.view_name == 'account_signup' or request.resolver_match.view_name == 'socialaccount_signup') and not settings.ENABLE_SIGNUP and not signup_token:
|
||||
if request.resolver_match.view_name == 'account_signup' and not settings.ENABLE_SIGNUP and not signup_token:
|
||||
return False
|
||||
elif request.resolver_match.view_name == 'socialaccount_signup' and len(settings.SOCIAL_PROVIDERS) < 1:
|
||||
return False
|
||||
else:
|
||||
return super(AllAuthCustomAdapter, self).is_open_for_signup(request)
|
||||
@@ -33,7 +35,7 @@ class AllAuthCustomAdapter(DefaultAccountAdapter):
|
||||
if c == default:
|
||||
try:
|
||||
super(AllAuthCustomAdapter, self).send_mail(template_prefix, email, context)
|
||||
except Exception: # dont fail signup just because confirmation mail could not be send
|
||||
except Exception: # dont fail signup just because confirmation mail could not be send
|
||||
pass
|
||||
else:
|
||||
messages.add_message(self.request, messages.ERROR, _('In order to prevent spam, the requested email was not send. Please wait a few minutes and try again.'))
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import cookbook.helper.dal
|
||||
from cookbook.helper.AllAuthCustomAdapter import AllAuthCustomAdapter
|
||||
|
||||
__all__ = [
|
||||
'dal',
|
||||
]
|
||||
|
||||
227
cookbook/helper/automation_helper.py
Normal file
227
cookbook/helper/automation_helper.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import re
|
||||
|
||||
from django.core.cache import caches
|
||||
from django.db.models.functions import Lower
|
||||
|
||||
from cookbook.models import Automation
|
||||
|
||||
|
||||
class AutomationEngine:
|
||||
request = None
|
||||
source = None
|
||||
use_cache = None
|
||||
food_aliases = None
|
||||
keyword_aliases = None
|
||||
unit_aliases = None
|
||||
never_unit = None
|
||||
transpose_words = None
|
||||
regex_replace = {
|
||||
Automation.DESCRIPTION_REPLACE: None,
|
||||
Automation.INSTRUCTION_REPLACE: None,
|
||||
Automation.FOOD_REPLACE: None,
|
||||
Automation.UNIT_REPLACE: None,
|
||||
Automation.NAME_REPLACE: None,
|
||||
}
|
||||
|
||||
def __init__(self, request, use_cache=True, source=None):
|
||||
self.request = request
|
||||
self.use_cache = use_cache
|
||||
if not source:
|
||||
self.source = "default_string_to_avoid_false_regex_match"
|
||||
else:
|
||||
self.source = source
|
||||
|
||||
def apply_keyword_automation(self, keyword):
|
||||
keyword = keyword.strip()
|
||||
if self.use_cache and self.keyword_aliases is None:
|
||||
self.keyword_aliases = {}
|
||||
KEYWORD_CACHE_KEY = f'automation_keyword_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(KEYWORD_CACHE_KEY, None):
|
||||
self.keyword_aliases = c
|
||||
caches['default'].touch(KEYWORD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.KEYWORD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.keyword_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(KEYWORD_CACHE_KEY, self.keyword_aliases, 30)
|
||||
else:
|
||||
self.keyword_aliases = {}
|
||||
if self.keyword_aliases:
|
||||
try:
|
||||
keyword = self.keyword_aliases[keyword.lower()]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.KEYWORD_ALIAS, param_1__iexact=keyword, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return keyword
|
||||
|
||||
def apply_unit_automation(self, unit):
|
||||
unit = unit.strip()
|
||||
if self.use_cache and self.unit_aliases is None:
|
||||
self.unit_aliases = {}
|
||||
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(UNIT_CACHE_KEY, None):
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.unit_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
self.unit_aliases = {}
|
||||
if self.unit_aliases:
|
||||
try:
|
||||
unit = self.unit_aliases[unit.lower()]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1__iexact=unit, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return self.apply_regex_replace_automation(unit, Automation.UNIT_REPLACE)
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
food = food.strip()
|
||||
if self.use_cache and self.food_aliases is None:
|
||||
self.food_aliases = {}
|
||||
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(FOOD_CACHE_KEY, None):
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.food_aliases[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
else:
|
||||
self.food_aliases = {}
|
||||
|
||||
if self.food_aliases:
|
||||
try:
|
||||
return self.food_aliases[food.lower()]
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1__iexact=food, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return self.apply_regex_replace_automation(food, Automation.FOOD_REPLACE)
|
||||
|
||||
def apply_never_unit_automation(self, tokens):
|
||||
"""
|
||||
Moves a string that should never be treated as a unit to next token and optionally replaced with default unit
|
||||
e.g. NEVER_UNIT: param1: egg, param2: None would modify ['1', 'egg', 'white'] to ['1', '', 'egg', 'white']
|
||||
or NEVER_UNIT: param1: egg, param2: pcs would modify ['1', 'egg', 'yolk'] to ['1', 'pcs', 'egg', 'yolk']
|
||||
:param1 string: string that should never be considered a unit, will be moved to token[2]
|
||||
:param2 (optional) unit as string: will insert unit string into token[1]
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
|
||||
if self.use_cache and self.never_unit is None:
|
||||
self.never_unit = {}
|
||||
NEVER_UNIT_CACHE_KEY = f'automation_never_unit_{self.request.space.pk}'
|
||||
if c := caches['default'].get(NEVER_UNIT_CACHE_KEY, None):
|
||||
self.never_unit = c
|
||||
caches['default'].touch(NEVER_UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.NEVER_UNIT).only('param_1', 'param_2').order_by('order').all():
|
||||
self.never_unit[a.param_1.lower()] = a.param_2
|
||||
caches['default'].set(NEVER_UNIT_CACHE_KEY, self.never_unit, 30)
|
||||
else:
|
||||
self.never_unit = {}
|
||||
|
||||
new_unit = None
|
||||
alt_unit = self.apply_unit_automation(tokens[1])
|
||||
never_unit = False
|
||||
if self.never_unit:
|
||||
try:
|
||||
new_unit = self.never_unit[tokens[1].lower()]
|
||||
never_unit = True
|
||||
except KeyError:
|
||||
return tokens
|
||||
else:
|
||||
if a := Automation.objects.annotate(param_1_lower=Lower('param_1')).filter(space=self.request.space, type=Automation.NEVER_UNIT, param_1_lower__in=[
|
||||
tokens[1].lower(), alt_unit.lower()], disabled=False).order_by('order').first():
|
||||
new_unit = a.param_2
|
||||
never_unit = True
|
||||
|
||||
if never_unit:
|
||||
tokens.insert(1, new_unit)
|
||||
return tokens
|
||||
|
||||
def apply_transpose_automation(self, string):
|
||||
"""
|
||||
If two words (param_1 & param_2) are detected in sequence, swap their position in the ingredient string
|
||||
:param 1: first word to detect
|
||||
:param 2: second word to detect
|
||||
return: new ingredient string
|
||||
"""
|
||||
if self.use_cache and self.transpose_words is None:
|
||||
self.transpose_words = {}
|
||||
TRANSPOSE_WORDS_CACHE_KEY = f'automation_transpose_words_{self.request.space.pk}'
|
||||
if c := caches['default'].get(TRANSPOSE_WORDS_CACHE_KEY, None):
|
||||
self.transpose_words = c
|
||||
caches['default'].touch(TRANSPOSE_WORDS_CACHE_KEY, 30)
|
||||
else:
|
||||
i = 0
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.TRANSPOSE_WORDS).only(
|
||||
'param_1', 'param_2').order_by('order').all()[:512]:
|
||||
self.transpose_words[i] = [a.param_1.lower(), a.param_2.lower()]
|
||||
i += 1
|
||||
caches['default'].set(TRANSPOSE_WORDS_CACHE_KEY, self.transpose_words, 30)
|
||||
else:
|
||||
self.transpose_words = {}
|
||||
|
||||
tokens = [x.lower() for x in string.replace(',', ' ').split()]
|
||||
if self.transpose_words:
|
||||
for key, value in self.transpose_words.items():
|
||||
if value[0] in tokens and value[1] in tokens:
|
||||
string = re.sub(rf"\b({value[0]})\W*({value[1]})\b", r"\2 \1", string, flags=re.IGNORECASE)
|
||||
else:
|
||||
for rule in Automation.objects.filter(space=self.request.space, type=Automation.TRANSPOSE_WORDS, disabled=False) \
|
||||
.annotate(param_1_lower=Lower('param_1'), param_2_lower=Lower('param_2')) \
|
||||
.filter(param_1_lower__in=tokens, param_2_lower__in=tokens).order_by('order')[:512]:
|
||||
if rule.param_1 in tokens and rule.param_2 in tokens:
|
||||
string = re.sub(rf"\b({rule.param_1})\W*({rule.param_2})\b", r"\2 \1", string, flags=re.IGNORECASE)
|
||||
return string
|
||||
|
||||
def apply_regex_replace_automation(self, string, automation_type):
|
||||
# TODO add warning - maybe on SPACE page? when a max of 512 automations of a specific type is exceeded (ALIAS types excluded?)
|
||||
"""
|
||||
Replaces strings in a recipe field that are from a matched source
|
||||
field_type are Automation.type that apply regex replacements
|
||||
Automation.DESCRIPTION_REPLACE
|
||||
Automation.INSTRUCTION_REPLACE
|
||||
Automation.FOOD_REPLACE
|
||||
Automation.UNIT_REPLACE
|
||||
Automation.NAME_REPLACE
|
||||
|
||||
regex replacment utilized the following fields from the Automation model
|
||||
:param 1: source that should apply the automation in regex format ('.*' for all)
|
||||
:param 2: regex pattern to match ()
|
||||
:param 3: replacement string (leave blank to delete)
|
||||
return: new string
|
||||
"""
|
||||
if self.use_cache and self.regex_replace[automation_type] is None:
|
||||
self.regex_replace[automation_type] = {}
|
||||
REGEX_REPLACE_CACHE_KEY = f'automation_regex_replace_{self.request.space.pk}'
|
||||
if c := caches['default'].get(REGEX_REPLACE_CACHE_KEY, None):
|
||||
self.regex_replace[automation_type] = c[automation_type]
|
||||
caches['default'].touch(REGEX_REPLACE_CACHE_KEY, 30)
|
||||
else:
|
||||
i = 0
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
|
||||
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
|
||||
self.regex_replace[automation_type][i] = [a.param_1, a.param_2, a.param_3]
|
||||
i += 1
|
||||
caches['default'].set(REGEX_REPLACE_CACHE_KEY, self.regex_replace, 30)
|
||||
else:
|
||||
self.regex_replace[automation_type] = {}
|
||||
|
||||
if self.regex_replace[automation_type]:
|
||||
for rule in self.regex_replace[automation_type].values():
|
||||
if re.match(rule[0], (self.source)[:512]):
|
||||
string = re.sub(rule[1], rule[2], string, flags=re.IGNORECASE)
|
||||
else:
|
||||
for rule in Automation.objects.filter(space=self.request.space, disabled=False, type=automation_type).only(
|
||||
'param_1', 'param_2', 'param_3').order_by('order').all()[:512]:
|
||||
if re.match(rule.param_1, (self.source)[:512]):
|
||||
string = re.sub(rule.param_2, rule.param_3, string, flags=re.IGNORECASE)
|
||||
return string
|
||||
11
cookbook/helper/cache_helper.py
Normal file
11
cookbook/helper/cache_helper.py
Normal file
@@ -0,0 +1,11 @@
|
||||
class CacheHelper:
|
||||
space = None
|
||||
|
||||
BASE_UNITS_CACHE_KEY = None
|
||||
PROPERTY_TYPE_CACHE_KEY = None
|
||||
|
||||
def __init__(self, space):
|
||||
self.space = space
|
||||
|
||||
self.BASE_UNITS_CACHE_KEY = f'SPACE_{space.id}_BASE_UNITS'
|
||||
self.PROPERTY_TYPE_CACHE_KEY = f'SPACE_{space.id}_PROPERTY_TYPES'
|
||||
@@ -1,8 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
def rescale_image_jpeg(image_object, base_width=1020):
|
||||
@@ -40,7 +39,12 @@ def get_filetype(name):
|
||||
# TODO also add env variable to define which images sizes should be compressed
|
||||
# filetype argument can not be optional, otherwise this function will treat all images as if they were a jpeg
|
||||
# Because it's no longer optional, no reason to return it
|
||||
def handle_image(request, image_object, filetype):
|
||||
def handle_image(request, image_object, filetype):
|
||||
try:
|
||||
Image.open(image_object).verify()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if (image_object.size / 1000) > 500: # if larger than 500 kb compress
|
||||
if filetype == '.jpeg' or filetype == '.jpg':
|
||||
return rescale_image_jpeg(image_object)
|
||||
|
||||
@@ -2,18 +2,16 @@ import re
|
||||
import string
|
||||
import unicodedata
|
||||
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.models import Unit, Food, Automation, Ingredient
|
||||
from cookbook.helper.automation_helper import AutomationEngine
|
||||
from cookbook.models import Food, Ingredient, Unit
|
||||
|
||||
|
||||
class IngredientParser:
|
||||
request = None
|
||||
ignore_rules = False
|
||||
food_aliases = {}
|
||||
unit_aliases = {}
|
||||
automation = None
|
||||
|
||||
def __init__(self, request, cache_mode, ignore_automations=False):
|
||||
def __init__(self, request, cache_mode=True, ignore_automations=False):
|
||||
"""
|
||||
Initialize ingredient parser
|
||||
:param request: request context (to control caching, rule ownership, etc.)
|
||||
@@ -22,65 +20,8 @@ class IngredientParser:
|
||||
"""
|
||||
self.request = request
|
||||
self.ignore_rules = ignore_automations
|
||||
if cache_mode:
|
||||
FOOD_CACHE_KEY = f'automation_food_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(FOOD_CACHE_KEY, None):
|
||||
self.food_aliases = c
|
||||
caches['default'].touch(FOOD_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.FOOD_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.food_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(FOOD_CACHE_KEY, self.food_aliases, 30)
|
||||
|
||||
UNIT_CACHE_KEY = f'automation_unit_alias_{self.request.space.pk}'
|
||||
if c := caches['default'].get(UNIT_CACHE_KEY, None):
|
||||
self.unit_aliases = c
|
||||
caches['default'].touch(UNIT_CACHE_KEY, 30)
|
||||
else:
|
||||
for a in Automation.objects.filter(space=self.request.space, disabled=False, type=Automation.UNIT_ALIAS).only('param_1', 'param_2').order_by('order').all():
|
||||
self.unit_aliases[a.param_1] = a.param_2
|
||||
caches['default'].set(UNIT_CACHE_KEY, self.unit_aliases, 30)
|
||||
else:
|
||||
self.food_aliases = {}
|
||||
self.unit_aliases = {}
|
||||
|
||||
def apply_food_automation(self, food):
|
||||
"""
|
||||
Apply food alias automations to passed food
|
||||
:param food: unit as string
|
||||
:return: food as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return food
|
||||
else:
|
||||
if self.food_aliases:
|
||||
try:
|
||||
return self.food_aliases[food]
|
||||
except KeyError:
|
||||
return food
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.FOOD_ALIAS, param_1=food, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return food
|
||||
|
||||
def apply_unit_automation(self, unit):
|
||||
"""
|
||||
Apply unit alias automations to passed unit
|
||||
:param unit: unit as string
|
||||
:return: unit as string (possibly changed by automation)
|
||||
"""
|
||||
if self.ignore_rules:
|
||||
return unit
|
||||
else:
|
||||
if self.unit_aliases:
|
||||
try:
|
||||
return self.unit_aliases[unit]
|
||||
except KeyError:
|
||||
return unit
|
||||
else:
|
||||
if automation := Automation.objects.filter(space=self.request.space, type=Automation.UNIT_ALIAS, param_1=unit, disabled=False).order_by('order').first():
|
||||
return automation.param_2
|
||||
return unit
|
||||
if not self.ignore_rules:
|
||||
self.automation = AutomationEngine(self.request, use_cache=cache_mode)
|
||||
|
||||
def get_unit(self, unit):
|
||||
"""
|
||||
@@ -91,7 +32,10 @@ class IngredientParser:
|
||||
if not unit:
|
||||
return None
|
||||
if len(unit) > 0:
|
||||
u, created = Unit.objects.get_or_create(name=self.apply_unit_automation(unit), space=self.request.space)
|
||||
if self.ignore_rules:
|
||||
u, created = Unit.objects.get_or_create(name=unit.strip(), space=self.request.space)
|
||||
else:
|
||||
u, created = Unit.objects.get_or_create(name=self.automation.apply_unit_automation(unit), space=self.request.space)
|
||||
return u
|
||||
return None
|
||||
|
||||
@@ -104,7 +48,10 @@ class IngredientParser:
|
||||
if not food:
|
||||
return None
|
||||
if len(food) > 0:
|
||||
f, created = Food.objects.get_or_create(name=self.apply_food_automation(food), space=self.request.space)
|
||||
if self.ignore_rules:
|
||||
f, created = Food.objects.get_or_create(name=food.strip(), space=self.request.space)
|
||||
else:
|
||||
f, created = Food.objects.get_or_create(name=self.automation.apply_food_automation(food), space=self.request.space)
|
||||
return f
|
||||
return None
|
||||
|
||||
@@ -133,10 +80,10 @@ class IngredientParser:
|
||||
end = 0
|
||||
while (end < len(x) and (x[end] in string.digits
|
||||
or (
|
||||
(x[end] == '.' or x[end] == ',' or x[end] == '/')
|
||||
and end + 1 < len(x)
|
||||
and x[end + 1] in string.digits
|
||||
))):
|
||||
(x[end] == '.' or x[end] == ',' or x[end] == '/')
|
||||
and end + 1 < len(x)
|
||||
and x[end + 1] in string.digits
|
||||
))):
|
||||
end += 1
|
||||
if end > 0:
|
||||
if "/" in x[:end]:
|
||||
@@ -160,7 +107,8 @@ class IngredientParser:
|
||||
if unit is not None and unit.strip() == '':
|
||||
unit = None
|
||||
|
||||
if unit is not None and (unit.startswith('(') or unit.startswith('-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
if unit is not None and (unit.startswith('(') or unit.startswith(
|
||||
'-')): # i dont know any unit that starts with ( or - so its likely an alternative like 1L (500ml) Water or 2-3
|
||||
unit = None
|
||||
note = x
|
||||
return amount, unit, note
|
||||
@@ -230,8 +178,8 @@ class IngredientParser:
|
||||
|
||||
# if the string contains parenthesis early on remove it and place it at the end
|
||||
# because its likely some kind of note
|
||||
if re.match('(.){1,6}\s\((.[^\(\)])+\)\s', ingredient):
|
||||
match = re.search('\((.[^\(])+\)', ingredient)
|
||||
if re.match('(.){1,6}\\s\\((.[^\\(\\)])+\\)\\s', ingredient):
|
||||
match = re.search('\\((.[^\\(])+\\)', ingredient)
|
||||
ingredient = ingredient[:match.start()] + ingredient[match.end():] + ' ' + ingredient[match.start():match.end()]
|
||||
|
||||
# leading spaces before commas result in extra tokens, clean them out
|
||||
@@ -239,12 +187,15 @@ class IngredientParser:
|
||||
|
||||
# handle "(from) - (to)" amounts by using the minimum amount and adding the range to the description
|
||||
# "10.5 - 200 g XYZ" => "100 g XYZ (10.5 - 200)"
|
||||
ingredient = re.sub("^(\d+|\d+[\\.,]\d+) - (\d+|\d+[\\.,]\d+) (.*)", "\\1 \\3 (\\1 - \\2)", ingredient)
|
||||
ingredient = re.sub("^(\\d+|\\d+[\\.,]\\d+) - (\\d+|\\d+[\\.,]\\d+) (.*)", "\\1 \\3 (\\1 - \\2)", ingredient)
|
||||
|
||||
# if amount and unit are connected add space in between
|
||||
if re.match('([0-9])+([A-z])+\s', ingredient):
|
||||
if re.match('([0-9])+([A-z])+\\s', ingredient):
|
||||
ingredient = re.sub(r'(?<=([a-z])|\d)(?=(?(1)\d|[a-z]))', ' ', ingredient)
|
||||
|
||||
if not self.ignore_rules:
|
||||
ingredient = self.automation.apply_transpose_automation(ingredient)
|
||||
|
||||
tokens = ingredient.split() # split at each space into tokens
|
||||
if len(tokens) == 1:
|
||||
# there only is one argument, that must be the food
|
||||
@@ -257,6 +208,8 @@ class IngredientParser:
|
||||
# three arguments if it already has a unit there can't be
|
||||
# a fraction for the amount
|
||||
if len(tokens) > 2:
|
||||
if not self.ignore_rules:
|
||||
tokens = self.automation.apply_never_unit_automation(tokens)
|
||||
try:
|
||||
if unit is not None:
|
||||
# a unit is already found, no need to try the second argument for a fraction
|
||||
@@ -303,10 +256,11 @@ class IngredientParser:
|
||||
if unit_note not in note:
|
||||
note += ' ' + unit_note
|
||||
|
||||
if unit:
|
||||
unit = self.apply_unit_automation(unit.strip())
|
||||
if unit and not self.ignore_rules:
|
||||
unit = self.automation.apply_unit_automation(unit)
|
||||
|
||||
food = self.apply_food_automation(food.strip())
|
||||
if food and not self.ignore_rules:
|
||||
food = self.automation.apply_food_automation(food)
|
||||
if len(food) > Food._meta.get_field('name').max_length: # test if food name is to long
|
||||
# try splitting it at a space and taking only the first arg
|
||||
if len(food.split()) > 1 and len(food.split()[0]) < Food._meta.get_field('name').max_length:
|
||||
|
||||
204
cookbook/helper/open_data_importer.py
Normal file
204
cookbook/helper/open_data_importer.py
Normal file
@@ -0,0 +1,204 @@
|
||||
from cookbook.models import (Food, FoodProperty, Property, PropertyType, Supermarket,
|
||||
SupermarketCategory, SupermarketCategoryRelation, Unit, UnitConversion)
|
||||
|
||||
|
||||
class OpenDataImporter:
|
||||
request = None
|
||||
data = {}
|
||||
slug_id_cache = {}
|
||||
update_existing = False
|
||||
use_metric = True
|
||||
|
||||
def __init__(self, request, data, update_existing=False, use_metric=True):
|
||||
self.request = request
|
||||
self.data = data
|
||||
self.update_existing = update_existing
|
||||
self.use_metric = use_metric
|
||||
|
||||
def _update_slug_cache(self, object_class, datatype):
|
||||
self.slug_id_cache[datatype] = dict(object_class.objects.filter(space=self.request.space, open_data_slug__isnull=False).values_list('open_data_slug', 'id', ))
|
||||
|
||||
def import_units(self):
|
||||
datatype = 'unit'
|
||||
|
||||
insert_list = []
|
||||
for u in list(self.data[datatype].keys()):
|
||||
insert_list.append(Unit(
|
||||
name=self.data[datatype][u]['name'],
|
||||
plural_name=self.data[datatype][u]['plural_name'],
|
||||
base_unit=self.data[datatype][u]['base_unit'] if self.data[datatype][u]['base_unit'] != '' else None,
|
||||
open_data_slug=u,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
if self.update_existing:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=(
|
||||
'name', 'plural_name', 'base_unit', 'open_data_slug'), unique_fields=('space', 'name',))
|
||||
else:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_category(self):
|
||||
datatype = 'category'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(SupermarketCategory(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
return SupermarketCategory.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_property(self):
|
||||
datatype = 'property'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(PropertyType(
|
||||
name=self.data[datatype][k]['name'],
|
||||
unit=self.data[datatype][k]['unit'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
return PropertyType.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
|
||||
def import_supermarket(self):
|
||||
datatype = 'store'
|
||||
|
||||
self._update_slug_cache(SupermarketCategory, 'category')
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(Supermarket(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
|
||||
# always add open data slug if matching supermarket is found, otherwise relation might fail
|
||||
supermarkets = Supermarket.objects.bulk_create(insert_list, unique_fields=('space', 'name',), update_conflicts=True, update_fields=('open_data_slug',))
|
||||
self._update_slug_cache(Supermarket, 'store')
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
relations = []
|
||||
order = 0
|
||||
for c in self.data[datatype][k]['categories']:
|
||||
relations.append(
|
||||
SupermarketCategoryRelation(
|
||||
supermarket_id=self.slug_id_cache[datatype][k],
|
||||
category_id=self.slug_id_cache['category'][c],
|
||||
order=order,
|
||||
)
|
||||
)
|
||||
order += 1
|
||||
|
||||
SupermarketCategoryRelation.objects.bulk_create(relations, ignore_conflicts=True, unique_fields=('supermarket', 'category',))
|
||||
|
||||
return supermarkets
|
||||
|
||||
def import_food(self):
|
||||
identifier_list = []
|
||||
datatype = 'food'
|
||||
for k in list(self.data[datatype].keys()):
|
||||
identifier_list.append(self.data[datatype][k]['name'])
|
||||
identifier_list.append(self.data[datatype][k]['plural_name'])
|
||||
|
||||
existing_objects_flat = []
|
||||
existing_objects = {}
|
||||
for f in Food.objects.filter(space=self.request.space).filter(name__in=identifier_list).values_list('id', 'name', 'plural_name'):
|
||||
existing_objects_flat.append(f[1])
|
||||
existing_objects_flat.append(f[2])
|
||||
existing_objects[f[1]] = f
|
||||
existing_objects[f[2]] = f
|
||||
|
||||
self._update_slug_cache(Unit, 'unit')
|
||||
self._update_slug_cache(PropertyType, 'property')
|
||||
|
||||
insert_list = []
|
||||
update_list = []
|
||||
update_field_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
if not (self.data[datatype][k]['name'] in existing_objects_flat or self.data[datatype][k]['plural_name'] in existing_objects_flat):
|
||||
insert_list.append({'data': {
|
||||
'name': self.data[datatype][k]['name'],
|
||||
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
'fdc_id': self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'open_data_slug': k,
|
||||
'space': self.request.space.id,
|
||||
}})
|
||||
else:
|
||||
if self.data[datatype][k]['name'] in existing_objects:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['name']][0]
|
||||
else:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['plural_name']][0]
|
||||
|
||||
if self.update_existing:
|
||||
update_field_list = ['name', 'plural_name', 'preferred_unit_id', 'preferred_shopping_unit_id', 'supermarket_category_id', 'fdc_id', 'open_data_slug', ]
|
||||
update_list.append(Food(
|
||||
id=existing_food_id,
|
||||
name=self.data[datatype][k]['name'],
|
||||
plural_name=self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
supermarket_category_id=self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
fdc_id=self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
open_data_slug=k,
|
||||
))
|
||||
else:
|
||||
update_field_list = ['open_data_slug', ]
|
||||
update_list.append(Food(id=existing_food_id, open_data_slug=k, ))
|
||||
|
||||
Food.load_bulk(insert_list, None)
|
||||
if len(update_list) > 0:
|
||||
Food.objects.bulk_update(update_list, update_field_list)
|
||||
|
||||
self._update_slug_cache(Food, 'food')
|
||||
|
||||
food_property_list = []
|
||||
# alias_list = []
|
||||
|
||||
for k in list(self.data[datatype].keys()):
|
||||
for fp in self.data[datatype][k]['properties']['type_values']:
|
||||
# try catch here because somettimes key "k" is not set for he food cache
|
||||
try:
|
||||
food_property_list.append(Property(
|
||||
property_type_id=self.slug_id_cache['property'][fp['property_type']],
|
||||
property_amount=fp['property_value'],
|
||||
import_food_id=self.slug_id_cache['food'][k],
|
||||
space=self.request.space,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
|
||||
Property.objects.bulk_create(food_property_list, ignore_conflicts=True, unique_fields=('space', 'import_food_id', 'property_type',))
|
||||
|
||||
property_food_relation_list = []
|
||||
for p in Property.objects.filter(space=self.request.space, import_food_id__isnull=False).values_list('import_food_id', 'id', ):
|
||||
property_food_relation_list.append(Food.properties.through(food_id=p[0], property_id=p[1]))
|
||||
|
||||
FoodProperty.objects.bulk_create(property_food_relation_list, ignore_conflicts=True, unique_fields=('food_id', 'property_id',))
|
||||
|
||||
return insert_list + update_list
|
||||
|
||||
def import_conversion(self):
|
||||
datatype = 'conversion'
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
# try catch here because sometimes key "k" is not set for he food cache
|
||||
try:
|
||||
insert_list.append(UnitConversion(
|
||||
base_amount=self.data[datatype][k]['base_amount'],
|
||||
base_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['base_unit']],
|
||||
converted_amount=self.data[datatype][k]['converted_amount'],
|
||||
converted_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['converted_unit']],
|
||||
food_id=self.slug_id_cache['food'][self.data[datatype][k]['food']],
|
||||
open_data_slug=k,
|
||||
space=self.request.space,
|
||||
created_by=self.request.user,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
|
||||
return UnitConversion.objects.bulk_create(insert_list, ignore_conflicts=True, unique_fields=('space', 'base_unit', 'converted_unit', 'food', 'open_data_slug'))
|
||||
@@ -4,16 +4,16 @@ from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import user_passes_test
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasScope, TokenHasReadWriteScope
|
||||
from oauth2_provider.contrib.rest_framework import TokenHasReadWriteScope, TokenHasScope
|
||||
from oauth2_provider.models import AccessToken
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
|
||||
from cookbook.models import ShareLink, Recipe, UserSpace
|
||||
from cookbook.models import Recipe, ShareLink, UserSpace
|
||||
|
||||
|
||||
def get_allowed_groups(groups_required):
|
||||
@@ -255,9 +255,6 @@ class CustomIsShared(permissions.BasePermission):
|
||||
return request.user.is_authenticated
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
# # temporary hack to make old shopping list work with new shopping list
|
||||
# if obj.__class__.__name__ in ['ShoppingList', 'ShoppingListEntry']:
|
||||
# return is_object_shared(request.user, obj) or obj.created_by in list(request.user.get_shopping_share())
|
||||
return is_object_shared(request.user, obj)
|
||||
|
||||
|
||||
@@ -322,7 +319,8 @@ class CustomRecipePermission(permissions.BasePermission):
|
||||
|
||||
def has_permission(self, request, view): # user is either at least a guest or a share link is given and the request is safe
|
||||
share = request.query_params.get('share', None)
|
||||
return has_group_permission(request.user, ['guest']) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
|
||||
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS) or has_group_permission(
|
||||
request.user, ['user'])) or (share and request.method in SAFE_METHODS and 'pk' in view.kwargs)
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
share = request.query_params.get('share', None)
|
||||
@@ -332,7 +330,8 @@ class CustomRecipePermission(permissions.BasePermission):
|
||||
if obj.private:
|
||||
return ((obj.created_by == request.user) or (request.user in obj.shared.all())) and obj.space == request.space
|
||||
else:
|
||||
return has_group_permission(request.user, ['guest']) and obj.space == request.space
|
||||
return ((has_group_permission(request.user, ['guest']) and request.method in SAFE_METHODS)
|
||||
or has_group_permission(request.user, ['user'])) and obj.space == request.space
|
||||
|
||||
|
||||
class CustomUserPermission(permissions.BasePermission):
|
||||
@@ -361,7 +360,7 @@ class CustomTokenHasScope(TokenHasScope):
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
if isinstance(request.auth, AccessToken):
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return request.user.is_authenticated
|
||||
@@ -375,7 +374,7 @@ class CustomTokenHasReadWriteScope(TokenHasReadWriteScope):
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if type(request.auth) == AccessToken:
|
||||
if isinstance(request.auth, AccessToken):
|
||||
return super().has_permission(request, view)
|
||||
else:
|
||||
return True
|
||||
@@ -434,3 +433,10 @@ def switch_user_active_space(user, space):
|
||||
return us
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class IsReadOnlyDRF(permissions.BasePermission):
|
||||
message = 'You cannot interact with this object as it is not owned by you!'
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return request.method in SAFE_METHODS
|
||||
|
||||
74
cookbook/helper/property_helper.py
Normal file
74
cookbook/helper/property_helper.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from django.core.cache import caches
|
||||
|
||||
from cookbook.helper.cache_helper import CacheHelper
|
||||
from cookbook.helper.unit_conversion_helper import UnitConversionHelper
|
||||
from cookbook.models import PropertyType
|
||||
|
||||
|
||||
class FoodPropertyHelper:
|
||||
space = None
|
||||
|
||||
def __init__(self, space):
|
||||
"""
|
||||
Helper to perform food property calculations
|
||||
:param space: space to limit scope to
|
||||
"""
|
||||
self.space = space
|
||||
|
||||
def calculate_recipe_properties(self, recipe):
|
||||
"""
|
||||
Calculate all food properties for a given recipe.
|
||||
:param recipe: recipe to calculate properties for
|
||||
:return: dict of with property keys and total/food values for each property available
|
||||
"""
|
||||
ingredients = []
|
||||
computed_properties = {}
|
||||
|
||||
for s in recipe.steps.all():
|
||||
ingredients += s.ingredients.all()
|
||||
|
||||
property_types = caches['default'].get(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, None)
|
||||
|
||||
if not property_types:
|
||||
property_types = PropertyType.objects.filter(space=self.space).all()
|
||||
# cache is cleared on property type save signal so long duration is fine
|
||||
caches['default'].set(CacheHelper(self.space).PROPERTY_TYPE_CACHE_KEY, property_types, 60 * 60)
|
||||
|
||||
for fpt in property_types:
|
||||
computed_properties[fpt.id] = {'id': fpt.id, 'name': fpt.name, 'description': fpt.description,
|
||||
'unit': fpt.unit, 'order': fpt.order, 'food_values': {}, 'total_value': 0, 'missing_value': False}
|
||||
|
||||
uch = UnitConversionHelper(self.space)
|
||||
|
||||
for i in ingredients:
|
||||
if i.food is not None:
|
||||
conversions = uch.get_conversions(i)
|
||||
for pt in property_types:
|
||||
found_property = False
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
computed_properties[pt.id]['missing_value'] = i.food.properties_food_unit is None
|
||||
else:
|
||||
for p in i.food.properties.all():
|
||||
if p.property_type == pt:
|
||||
for c in conversions:
|
||||
if c.unit == i.food.properties_food_unit:
|
||||
found_property = True
|
||||
computed_properties[pt.id]['total_value'] += (c.amount / i.food.properties_food_amount) * p.property_amount
|
||||
computed_properties[pt.id]['food_values'] = self.add_or_create(
|
||||
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
|
||||
if not found_property:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
|
||||
return computed_properties
|
||||
|
||||
# small dict helper to add to existing key or create new, probably a better way of doing this
|
||||
# TODO move to central helper ?
|
||||
@staticmethod
|
||||
def add_or_create(d, key, value, food):
|
||||
if key in d:
|
||||
d[key]['value'] += value
|
||||
else:
|
||||
d[key] = {'id': food.id, 'food': food.name, 'value': value}
|
||||
return d
|
||||
@@ -1,191 +0,0 @@
|
||||
# import json
|
||||
# import re
|
||||
# from json import JSONDecodeError
|
||||
# from urllib.parse import unquote
|
||||
|
||||
# from bs4 import BeautifulSoup
|
||||
# from bs4.element import Tag
|
||||
# from recipe_scrapers import scrape_html, scrape_me
|
||||
# from recipe_scrapers._exceptions import NoSchemaFoundInWildMode
|
||||
# from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
|
||||
# from cookbook.helper import recipe_url_import as helper
|
||||
# from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
|
||||
|
||||
# def get_recipe_from_source(text, url, request):
|
||||
# def build_node(k, v):
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# return node
|
||||
|
||||
# def get_children_dict(children):
|
||||
# kid_list = []
|
||||
# for k, v in children.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# return kid_list
|
||||
|
||||
# def get_children_list(children):
|
||||
# kid_list = []
|
||||
# for kid in children:
|
||||
# if type(kid) == list:
|
||||
# node = {
|
||||
# 'name': "unknown list",
|
||||
# 'value': "unknown list",
|
||||
# 'children': get_children_list(kid)
|
||||
# }
|
||||
# kid_list.append(node)
|
||||
# elif type(kid) == dict:
|
||||
# for k, v in kid.items():
|
||||
# kid_list.append(build_node(k, v))
|
||||
# else:
|
||||
# kid_list.append({
|
||||
# 'name': normalize_string(str(kid)),
|
||||
# 'value': normalize_string(str(kid))
|
||||
# })
|
||||
# return kid_list
|
||||
|
||||
# recipe_tree = []
|
||||
# parse_list = []
|
||||
# soup = BeautifulSoup(text, "html.parser")
|
||||
# html_data = get_from_html(soup)
|
||||
# images = get_images_from_source(soup, url)
|
||||
# text = unquote(text)
|
||||
# scrape = None
|
||||
|
||||
# if url and not text:
|
||||
# try:
|
||||
# scrape = scrape_me(url_path=url, wild_mode=True)
|
||||
# except(NoSchemaFoundInWildMode):
|
||||
# pass
|
||||
|
||||
# if not scrape:
|
||||
# try:
|
||||
# parse_list.append(remove_graph(json.loads(text)))
|
||||
# if not url and 'url' in parse_list[0]:
|
||||
# url = parse_list[0]['url']
|
||||
# scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
|
||||
# except JSONDecodeError:
|
||||
# for el in soup.find_all('script', type='application/ld+json'):
|
||||
# el = remove_graph(el)
|
||||
# if not url and 'url' in el:
|
||||
# url = el['url']
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# for el in soup.find_all(type='application/json'):
|
||||
# el = remove_graph(el)
|
||||
# if type(el) == list:
|
||||
# for le in el:
|
||||
# parse_list.append(le)
|
||||
# elif type(el) == dict:
|
||||
# parse_list.append(el)
|
||||
# scrape = text_scraper(text, url=url)
|
||||
|
||||
# recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
# # TODO: DEPRECATE recipe_tree & html_data. first validate it isn't used anywhere
|
||||
# for el in parse_list:
|
||||
# temp_tree = []
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# except TypeError:
|
||||
# continue
|
||||
|
||||
# for k, v in el.items():
|
||||
# if isinstance(v, dict):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_dict(v)
|
||||
# }
|
||||
# elif isinstance(v, list):
|
||||
# node = {
|
||||
# 'name': k,
|
||||
# 'value': k,
|
||||
# 'children': get_children_list(v)
|
||||
# }
|
||||
# else:
|
||||
# node = {
|
||||
# 'name': k + ": " + normalize_string(str(v)),
|
||||
# 'value': normalize_string(str(v))
|
||||
# }
|
||||
# temp_tree.append(node)
|
||||
|
||||
# if '@type' in el and el['@type'] == 'Recipe':
|
||||
# recipe_tree += [{'name': 'ld+json', 'children': temp_tree}]
|
||||
# else:
|
||||
# recipe_tree += [{'name': 'json', 'children': temp_tree}]
|
||||
|
||||
# return recipe_json, recipe_tree, html_data, images
|
||||
|
||||
|
||||
# def get_from_html(soup):
|
||||
# INVISIBLE_ELEMS = ('style', 'script', 'head', 'title')
|
||||
# html = []
|
||||
# for s in soup.strings:
|
||||
# if ((s.parent.name not in INVISIBLE_ELEMS) and (len(s.strip()) > 0)):
|
||||
# html.append(s)
|
||||
# return html
|
||||
|
||||
|
||||
# def get_images_from_source(soup, url):
|
||||
# sources = ['src', 'srcset', 'data-src']
|
||||
# images = []
|
||||
# img_tags = soup.find_all('img')
|
||||
# if url:
|
||||
# site = get_host_name(url)
|
||||
# prot = url.split(':')[0]
|
||||
|
||||
# urls = []
|
||||
# for img in img_tags:
|
||||
# for src in sources:
|
||||
# try:
|
||||
# urls.append(img[src])
|
||||
# except KeyError:
|
||||
# pass
|
||||
|
||||
# for u in urls:
|
||||
# u = u.split('?')[0]
|
||||
# filename = re.search(r'/([\w_-]+[.](jpg|jpeg|gif|png))$', u)
|
||||
# if filename:
|
||||
# if (('http' not in u) and (url)):
|
||||
# # sometimes an image source can be relative
|
||||
# # if it is provide the base url
|
||||
# u = '{}://{}{}'.format(prot, site, u)
|
||||
# if 'http' in u:
|
||||
# images.append(u)
|
||||
# return images
|
||||
|
||||
|
||||
# def remove_graph(el):
|
||||
# # recipes type might be wrapped in @graph type
|
||||
# if isinstance(el, Tag):
|
||||
# try:
|
||||
# el = json.loads(el.string)
|
||||
# if '@graph' in el:
|
||||
# for x in el['@graph']:
|
||||
# if '@type' in x and x['@type'] == 'Recipe':
|
||||
# el = x
|
||||
# except (TypeError, JSONDecodeError):
|
||||
# pass
|
||||
# return el
|
||||
@@ -1,14 +1,11 @@
|
||||
import json
|
||||
from collections import Counter
|
||||
from datetime import date, timedelta
|
||||
|
||||
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector, TrigramSimilarity
|
||||
from django.core.cache import cache
|
||||
from django.core.cache import caches
|
||||
from django.db.models import (Avg, Case, Count, Exists, F, Func, Max, OuterRef, Q, Subquery, Value, When, FilteredRelation)
|
||||
from django.db.models import Avg, Case, Count, Exists, F, Max, OuterRef, Q, Subquery, Value, When
|
||||
from django.db.models.functions import Coalesce, Lower, Substr
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.HelperFunctions import Round, str2bool
|
||||
from cookbook.managers import DICTIONARY
|
||||
@@ -17,7 +14,6 @@ from cookbook.models import (CookLog, CustomFilter, Food, Keyword, Recipe, Searc
|
||||
from recipes import settings
|
||||
|
||||
|
||||
# TODO create extensive tests to make sure ORs ANDs and various filters, sorting, etc work as expected
|
||||
# TODO consider creating a simpleListRecipe API that only includes minimum of recipe info and minimal filtering
|
||||
class RecipeSearch():
|
||||
_postgres = settings.DATABASES['default']['ENGINE'] in ['django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql']
|
||||
@@ -26,11 +22,17 @@ class RecipeSearch():
|
||||
self._request = request
|
||||
self._queryset = None
|
||||
if f := params.get('filter', None):
|
||||
custom_filter = CustomFilter.objects.filter(id=f, space=self._request.space).filter(Q(created_by=self._request.user) |
|
||||
Q(shared=self._request.user) | Q(recipebook__shared=self._request.user)).first()
|
||||
custom_filter = (
|
||||
CustomFilter.objects.filter(id=f, space=self._request.space)
|
||||
.filter(Q(created_by=self._request.user) | Q(shared=self._request.user) | Q(recipebook__shared=self._request.user))
|
||||
.first()
|
||||
)
|
||||
if custom_filter:
|
||||
self._params = {**json.loads(custom_filter.search)}
|
||||
self._original_params = {**(params or {})}
|
||||
# json.loads casts rating as an integer, expecting string
|
||||
if isinstance(self._params.get('rating', None), int):
|
||||
self._params['rating'] = str(self._params['rating'])
|
||||
else:
|
||||
self._params = {**(params or {})}
|
||||
else:
|
||||
@@ -45,7 +47,8 @@ class RecipeSearch():
|
||||
cache.set(CACHE_KEY, self._search_prefs, timeout=10)
|
||||
else:
|
||||
self._search_prefs = SearchPreference()
|
||||
self._string = self._params.get('query').strip() if self._params.get('query', None) else None
|
||||
self._string = self._params.get('query').strip(
|
||||
) if self._params.get('query', None) else None
|
||||
self._rating = self._params.get('rating', None)
|
||||
self._keywords = {
|
||||
'or': self._params.get('keywords_or', None) or self._params.get('keywords', None),
|
||||
@@ -74,7 +77,8 @@ class RecipeSearch():
|
||||
self._random = str2bool(self._params.get('random', False))
|
||||
self._new = str2bool(self._params.get('new', False))
|
||||
self._num_recent = int(self._params.get('num_recent', 0))
|
||||
self._include_children = str2bool(self._params.get('include_children', None))
|
||||
self._include_children = str2bool(
|
||||
self._params.get('include_children', None))
|
||||
self._timescooked = self._params.get('timescooked', None)
|
||||
self._cookedon = self._params.get('cookedon', None)
|
||||
self._createdon = self._params.get('createdon', None)
|
||||
@@ -82,9 +86,9 @@ class RecipeSearch():
|
||||
self._viewedon = self._params.get('viewedon', None)
|
||||
self._makenow = self._params.get('makenow', None)
|
||||
# this supports hidden feature to find recipes missing X ingredients
|
||||
if type(self._makenow) == bool and self._makenow == True:
|
||||
if isinstance(self._makenow, bool) and self._makenow == True:
|
||||
self._makenow = 0
|
||||
elif type(self._makenow) == str and self._makenow in ["yes", "true"]:
|
||||
elif isinstance(self._makenow, str) and self._makenow in ["yes", "true"]:
|
||||
self._makenow = 0
|
||||
else:
|
||||
try:
|
||||
@@ -141,7 +145,7 @@ class RecipeSearch():
|
||||
self.unit_filters(units=self._units)
|
||||
self._makenow_filter(missing=self._makenow)
|
||||
self.string_filters(string=self._string)
|
||||
return self._queryset.filter(space=self._request.space).distinct().order_by(*self.orderby)
|
||||
return self._queryset.filter(space=self._request.space).order_by(*self.orderby)
|
||||
|
||||
def _sort_includes(self, *args):
|
||||
for x in args:
|
||||
@@ -157,7 +161,7 @@ class RecipeSearch():
|
||||
else:
|
||||
order = []
|
||||
# TODO add userpreference for default sort order and replace '-favorite'
|
||||
default_order = ['-name']
|
||||
default_order = ['name']
|
||||
# recent and new_recipe are always first; they float a few recipes to the top
|
||||
if self._num_recent:
|
||||
order += ['-recent']
|
||||
@@ -166,7 +170,6 @@ class RecipeSearch():
|
||||
|
||||
# if a sort order is provided by user - use that order
|
||||
if self._sort_order:
|
||||
|
||||
if not isinstance(self._sort_order, list):
|
||||
order += [self._sort_order]
|
||||
else:
|
||||
@@ -182,8 +185,10 @@ class RecipeSearch():
|
||||
# otherwise sort by the remaining order_by attributes or favorite by default
|
||||
else:
|
||||
order += default_order
|
||||
order[:] = [Lower('name').asc() if x == 'name' else x for x in order]
|
||||
order[:] = [Lower('name').desc() if x == '-name' else x for x in order]
|
||||
order[:] = [Lower('name').asc() if x ==
|
||||
'name' else x for x in order]
|
||||
order[:] = [Lower('name').desc() if x ==
|
||||
'-name' else x for x in order]
|
||||
self.orderby = order
|
||||
|
||||
def string_filters(self, string=None):
|
||||
@@ -200,7 +205,8 @@ class RecipeSearch():
|
||||
for f in self._filters:
|
||||
query_filter |= f
|
||||
|
||||
self._queryset = self._queryset.filter(query_filter).distinct() # this creates duplicate records which can screw up other aggregates, see makenow for workaround
|
||||
# this creates duplicate records which can screw up other aggregates, see makenow for workaround
|
||||
self._queryset = self._queryset.filter(query_filter).distinct()
|
||||
if self._fulltext_include:
|
||||
if self._fuzzy_match is None:
|
||||
self._queryset = self._queryset.annotate(score=Coalesce(Max(self.search_rank), 0.0))
|
||||
@@ -228,12 +234,13 @@ class RecipeSearch():
|
||||
default = timezone.now() - timedelta(days=100000)
|
||||
else:
|
||||
default = timezone.now()
|
||||
self._queryset = self._queryset.annotate(lastcooked=Coalesce(
|
||||
Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default)))
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastcooked=Coalesce(Max(Case(When(cooklog__created_by=self._request.user, cooklog__space=self._request.space, then='cooklog__created_at'))), Value(default))
|
||||
)
|
||||
if cooked_date is None:
|
||||
return
|
||||
|
||||
cooked_date = date(*[int(x) for x in cooked_date.split('-') if x != ''])
|
||||
cooked_date = date(*[int(x)for x in cooked_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastcooked__date__lte=cooked_date).exclude(lastcooked=default)
|
||||
@@ -254,7 +261,7 @@ class RecipeSearch():
|
||||
if updated_date is None:
|
||||
return
|
||||
lessthan = '-' in updated_date[:1]
|
||||
updated_date = date(*[int(x) for x in updated_date.split('-') if x != ''])
|
||||
updated_date = date(*[int(x)for x in updated_date.split('-') if x != ''])
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(updated_at__date__lte=updated_date)
|
||||
else:
|
||||
@@ -263,12 +270,13 @@ class RecipeSearch():
|
||||
def _viewed_on_filter(self, viewed_date=None):
|
||||
if self._sort_includes('lastviewed') or viewed_date:
|
||||
longTimeAgo = timezone.now() - timedelta(days=100000)
|
||||
self._queryset = self._queryset.annotate(lastviewed=Coalesce(
|
||||
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo)))
|
||||
self._queryset = self._queryset.annotate(
|
||||
lastviewed=Coalesce(Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__created_at'))), Value(longTimeAgo))
|
||||
)
|
||||
if viewed_date is None:
|
||||
return
|
||||
lessthan = '-' in viewed_date[:1]
|
||||
viewed_date = date(*[int(x) for x in viewed_date.split('-') if x != ''])
|
||||
viewed_date = date(*[int(x)for x in viewed_date.split('-') if x != ''])
|
||||
|
||||
if lessthan:
|
||||
self._queryset = self._queryset.filter(lastviewed__date__lte=viewed_date).exclude(lastviewed=longTimeAgo)
|
||||
@@ -279,9 +287,11 @@ class RecipeSearch():
|
||||
# TODO make new days a user-setting
|
||||
if not self._new:
|
||||
return
|
||||
self._queryset = (
|
||||
self._queryset.annotate(new_recipe=Case(
|
||||
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')), default=Value(0), ))
|
||||
self._queryset = self._queryset.annotate(
|
||||
new_recipe=Case(
|
||||
When(created_at__gte=(timezone.now() - timedelta(days=new_days)), then=('pk')),
|
||||
default=Value(0),
|
||||
)
|
||||
)
|
||||
|
||||
def _recently_viewed(self, num_recent=None):
|
||||
@@ -291,19 +301,25 @@ class RecipeSearch():
|
||||
Max(Case(When(viewlog__created_by=self._request.user, viewlog__space=self._request.space, then='viewlog__pk'))), Value(0)))
|
||||
return
|
||||
|
||||
num_recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space).values(
|
||||
'recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
|
||||
num_recent_recipes = (
|
||||
ViewLog.objects.filter(created_by=self._request.user, space=self._request.space)
|
||||
.values('recipe').annotate(recent=Max('created_at')).order_by('-recent')[:num_recent]
|
||||
)
|
||||
self._queryset = self._queryset.annotate(recent=Coalesce(Max(Case(When(pk__in=num_recent_recipes.values('recipe'), then='viewlog__pk'))), Value(0)))
|
||||
|
||||
def _favorite_recipes(self, times_cooked=None):
|
||||
if self._sort_includes('favorite') or times_cooked:
|
||||
less_than = '-' in (times_cooked or []) or not self._sort_includes('-favorite')
|
||||
less_than = '-' in (times_cooked or []) and not self._sort_includes('-favorite')
|
||||
if less_than:
|
||||
default = 1000
|
||||
else:
|
||||
default = 0
|
||||
favorite_recipes = CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk')
|
||||
).values('recipe').annotate(count=Count('pk', distinct=True)).values('count')
|
||||
favorite_recipes = (
|
||||
CookLog.objects.filter(created_by=self._request.user, space=self._request.space, recipe=OuterRef('pk'))
|
||||
.values('recipe')
|
||||
.annotate(count=Count('pk', distinct=True))
|
||||
.values('count')
|
||||
)
|
||||
self._queryset = self._queryset.annotate(favorite=Coalesce(Subquery(favorite_recipes), default))
|
||||
if times_cooked is None:
|
||||
return
|
||||
@@ -311,7 +327,7 @@ class RecipeSearch():
|
||||
if times_cooked == '0':
|
||||
self._queryset = self._queryset.filter(favorite=0)
|
||||
elif less_than:
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked[1:])).exclude(favorite=0)
|
||||
self._queryset = self._queryset.filter(favorite__lte=int(times_cooked.replace('-', ''))).exclude(favorite=0)
|
||||
else:
|
||||
self._queryset = self._queryset.filter(favorite__gte=int(times_cooked))
|
||||
|
||||
@@ -393,8 +409,9 @@ class RecipeSearch():
|
||||
|
||||
def rating_filter(self, rating=None):
|
||||
if rating or self._sort_includes('rating'):
|
||||
lessthan = self._sort_includes('-rating') or '-' in (rating or [])
|
||||
if lessthan:
|
||||
lessthan = '-' in (rating or [])
|
||||
reverse = 'rating' in (self._sort_order or []) and '-rating' not in (self._sort_order or [])
|
||||
if lessthan or reverse:
|
||||
default = 100
|
||||
else:
|
||||
default = 0
|
||||
@@ -446,7 +463,7 @@ class RecipeSearch():
|
||||
if not steps:
|
||||
return
|
||||
if not isinstance(steps, list):
|
||||
steps = [unistepsts]
|
||||
steps = [steps]
|
||||
self._queryset = self._queryset.filter(steps__id__in=steps)
|
||||
|
||||
def build_fulltext_filters(self, string=None):
|
||||
@@ -503,26 +520,33 @@ class RecipeSearch():
|
||||
trigram += TrigramSimilarity(f, self._string)
|
||||
else:
|
||||
trigram = TrigramSimilarity(f, self._string)
|
||||
self._fuzzy_match = Recipe.objects.annotate(trigram=trigram).distinct(
|
||||
).annotate(simularity=Max('trigram')).values('id', 'simularity').filter(simularity__gt=self._search_prefs.trigram_threshold)
|
||||
self._fuzzy_match = (
|
||||
Recipe.objects.annotate(trigram=trigram)
|
||||
.distinct()
|
||||
.annotate(simularity=Max('trigram'))
|
||||
.values('id', 'simularity')
|
||||
.filter(simularity__gt=self._search_prefs.trigram_threshold)
|
||||
)
|
||||
self._filters += [Q(pk__in=self._fuzzy_match.values('pk'))]
|
||||
|
||||
def _makenow_filter(self, missing=None):
|
||||
if missing is None or (type(missing) == bool and missing == False):
|
||||
if missing is None or (isinstance(missing, bool) and missing == False):
|
||||
return
|
||||
shopping_users = [*self._request.user.get_shopping_share(), self._request.user]
|
||||
|
||||
onhand_filter = (
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users) # or substitute food onhand
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
Q(steps__ingredients__food__onhand_users__in=shopping_users) # food onhand
|
||||
# or substitute food onhand
|
||||
| Q(steps__ingredients__food__substitute__onhand_users__in=shopping_users)
|
||||
| Q(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users))
|
||||
| Q(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users))
|
||||
)
|
||||
makenow_recipes = Recipe.objects.annotate(
|
||||
count_food=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__isnull=False), distinct=True),
|
||||
count_onhand=Count('steps__ingredients__food__pk', filter=onhand_filter, distinct=True),
|
||||
count_ignore_shopping=Count('steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True,
|
||||
steps__ingredients__food__recipe__isnull=True), distinct=True),
|
||||
count_ignore_shopping=Count(
|
||||
'steps__ingredients__food__pk', filter=Q(steps__ingredients__food__ignore_shopping=True, steps__ingredients__food__recipe__isnull=True), distinct=True
|
||||
),
|
||||
has_child_sub=Case(When(steps__ingredients__food__in=self.__children_substitute_filter(shopping_users), then=Value(1)), default=Value(0)),
|
||||
has_sibling_sub=Case(When(steps__ingredients__food__in=self.__sibling_substitute_filter(shopping_users), then=Value(1)), default=Value(0))
|
||||
).annotate(missingfood=F('count_food') - F('count_onhand') - F('count_ignore_shopping')).filter(missingfood__lte=missing)
|
||||
@@ -530,236 +554,28 @@ class RecipeSearch():
|
||||
|
||||
@staticmethod
|
||||
def __children_substitute_filter(shopping_users=None):
|
||||
children_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=OuterRef('path'),
|
||||
depth__gt=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
children_onhand_subquery = Food.objects.filter(path__startswith=OuterRef('path'), depth__gt=OuterRef('depth'), onhand_users__in=shopping_users)
|
||||
return (
|
||||
Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
|
||||
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
|
||||
)
|
||||
.exclude(depth=1, numchild=0)
|
||||
.filter(substitute_children=True)
|
||||
.annotate(child_onhand_count=Exists(children_onhand_subquery))
|
||||
.filter(child_onhand_count=True)
|
||||
)
|
||||
return Food.objects.exclude( # list of foods that are onhand and children of: foods that are not onhand and are set to use children as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0
|
||||
).filter(substitute_children=True
|
||||
).annotate(child_onhand_count=Exists(children_onhand_subquery)
|
||||
).filter(child_onhand_count=True)
|
||||
|
||||
@staticmethod
|
||||
def __sibling_substitute_filter(shopping_users=None):
|
||||
sibling_onhand_subquery = Food.objects.filter(
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)),
|
||||
depth=OuterRef('depth'),
|
||||
onhand_users__in=shopping_users
|
||||
path__startswith=Substr(OuterRef('path'), 1, Food.steplen * (OuterRef('depth') - 1)), depth=OuterRef('depth'), onhand_users__in=shopping_users
|
||||
)
|
||||
return Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
|
||||
Q(onhand_users__in=shopping_users)
|
||||
| Q(ignore_shopping=True, recipe__isnull=True)
|
||||
| Q(substitute__onhand_users__in=shopping_users)
|
||||
).exclude(depth=1, numchild=0
|
||||
).filter(substitute_siblings=True
|
||||
).annotate(sibling_onhand=Exists(sibling_onhand_subquery)
|
||||
).filter(sibling_onhand=True)
|
||||
|
||||
|
||||
class RecipeFacet():
|
||||
class CacheEmpty(Exception):
|
||||
pass
|
||||
|
||||
def __init__(self, request, queryset=None, hash_key=None, cache_timeout=3600):
|
||||
if hash_key is None and queryset is None:
|
||||
raise ValueError(_("One of queryset or hash_key must be provided"))
|
||||
|
||||
self._request = request
|
||||
self._queryset = queryset
|
||||
self.hash_key = hash_key or str(hash(self._queryset.query))
|
||||
self._SEARCH_CACHE_KEY = f"recipes_filter_{self.hash_key}"
|
||||
self._cache_timeout = cache_timeout
|
||||
self._cache = caches['default'].get(self._SEARCH_CACHE_KEY, {})
|
||||
if self._cache is None and self._queryset is None:
|
||||
raise self.CacheEmpty("No queryset provided and cache empty")
|
||||
|
||||
self.Keywords = self._cache.get('Keywords', None)
|
||||
self.Foods = self._cache.get('Foods', None)
|
||||
self.Books = self._cache.get('Books', None)
|
||||
self.Ratings = self._cache.get('Ratings', None)
|
||||
# TODO Move Recent to recipe annotation/serializer: requrires change in RecipeSearch(), RecipeSearchView.vue and serializer
|
||||
self.Recent = self._cache.get('Recent', None)
|
||||
|
||||
if self._queryset is not None:
|
||||
self._recipe_list = list(self._queryset.values_list('id', flat=True))
|
||||
self._search_params = {
|
||||
'keyword_list': self._request.query_params.getlist('keywords', []),
|
||||
'food_list': self._request.query_params.getlist('foods', []),
|
||||
'book_list': self._request.query_params.getlist('book', []),
|
||||
'search_keywords_or': str2bool(self._request.query_params.get('keywords_or', True)),
|
||||
'search_foods_or': str2bool(self._request.query_params.get('foods_or', True)),
|
||||
'search_books_or': str2bool(self._request.query_params.get('books_or', True)),
|
||||
'space': self._request.space,
|
||||
}
|
||||
elif self.hash_key is not None:
|
||||
self._recipe_list = self._cache.get('recipe_list', [])
|
||||
self._search_params = {
|
||||
'keyword_list': self._cache.get('keyword_list', None),
|
||||
'food_list': self._cache.get('food_list', None),
|
||||
'book_list': self._cache.get('book_list', None),
|
||||
'search_keywords_or': self._cache.get('search_keywords_or', None),
|
||||
'search_foods_or': self._cache.get('search_foods_or', None),
|
||||
'search_books_or': self._cache.get('search_books_or', None),
|
||||
'space': self._cache.get('space', None),
|
||||
}
|
||||
|
||||
self._cache = {
|
||||
**self._search_params,
|
||||
'recipe_list': self._recipe_list,
|
||||
'Ratings': self.Ratings,
|
||||
'Recent': self.Recent,
|
||||
'Keywords': self.Keywords,
|
||||
'Foods': self.Foods,
|
||||
'Books': self.Books
|
||||
|
||||
}
|
||||
caches['default'].set(self._SEARCH_CACHE_KEY, self._cache, self._cache_timeout)
|
||||
|
||||
def get_facets(self, from_cache=False):
|
||||
if from_cache:
|
||||
return {
|
||||
'cache_key': self.hash_key or '',
|
||||
'Ratings': self.Ratings or {},
|
||||
'Recent': self.Recent or [],
|
||||
'Keywords': self.Keywords or [],
|
||||
'Foods': self.Foods or [],
|
||||
'Books': self.Books or []
|
||||
}
|
||||
return {
|
||||
'cache_key': self.hash_key,
|
||||
'Ratings': self.get_ratings(),
|
||||
'Recent': self.get_recent(),
|
||||
'Keywords': self.get_keywords(),
|
||||
'Foods': self.get_foods(),
|
||||
'Books': self.get_books()
|
||||
}
|
||||
|
||||
def set_cache(self, key, value):
|
||||
self._cache = {**self._cache, key: value}
|
||||
caches['default'].set(
|
||||
self._SEARCH_CACHE_KEY,
|
||||
self._cache,
|
||||
self._cache_timeout
|
||||
return (
|
||||
Food.objects.exclude( # list of foods that are onhand and siblings of: foods that are not onhand and are set to use siblings as substitutes
|
||||
Q(onhand_users__in=shopping_users) | Q(ignore_shopping=True, recipe__isnull=True) | Q(substitute__onhand_users__in=shopping_users)
|
||||
)
|
||||
.exclude(depth=1, numchild=0)
|
||||
.filter(substitute_siblings=True)
|
||||
.annotate(sibling_onhand=Exists(sibling_onhand_subquery))
|
||||
.filter(sibling_onhand=True)
|
||||
)
|
||||
|
||||
def get_books(self):
|
||||
if self.Books is None:
|
||||
self.Books = []
|
||||
return self.Books
|
||||
|
||||
def get_keywords(self):
|
||||
if self.Keywords is None:
|
||||
if self._search_params['search_keywords_or']:
|
||||
keywords = Keyword.objects.filter(space=self._request.space).distinct()
|
||||
else:
|
||||
keywords = Keyword.objects.filter(Q(recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
|
||||
|
||||
# set keywords to root objects only
|
||||
keywords = self._keyword_queryset(keywords)
|
||||
self.Keywords = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
|
||||
self.set_cache('Keywords', self.Keywords)
|
||||
return self.Keywords
|
||||
|
||||
def get_foods(self):
|
||||
if self.Foods is None:
|
||||
# # if using an OR search, will annotate all keywords, otherwise, just those that appear in results
|
||||
if self._search_params['search_foods_or']:
|
||||
foods = Food.objects.filter(space=self._request.space).distinct()
|
||||
else:
|
||||
foods = Food.objects.filter(Q(ingredient__step__recipe__in=self._recipe_list) | Q(depth=1)).filter(space=self._request.space).distinct()
|
||||
|
||||
# set keywords to root objects only
|
||||
foods = self._food_queryset(foods)
|
||||
|
||||
self.Foods = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
|
||||
self.set_cache('Foods', self.Foods)
|
||||
return self.Foods
|
||||
|
||||
def get_books(self):
|
||||
if self.Books is None:
|
||||
self.Books = []
|
||||
return self.Books
|
||||
|
||||
def get_ratings(self):
|
||||
if self.Ratings is None:
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
if self._queryset is None:
|
||||
self._queryset = Recipe.objects.filter(id__in=self._recipe_list)
|
||||
rating_qs = self._queryset.annotate(rating=Round(Avg(Case(When(cooklog__created_by=self._request.user, then='cooklog__rating'), default=Value(0)))))
|
||||
self.Ratings = dict(Counter(r.rating for r in rating_qs))
|
||||
else:
|
||||
self.Rating = {}
|
||||
self.set_cache('Ratings', self.Ratings)
|
||||
return self.Ratings
|
||||
|
||||
def get_recent(self):
|
||||
if self.Recent is None:
|
||||
# TODO make days of recent recipe a setting
|
||||
recent_recipes = ViewLog.objects.filter(created_by=self._request.user, space=self._request.space, created_at__gte=timezone.now() - timedelta(days=14)
|
||||
).values_list('recipe__pk', flat=True)
|
||||
self.Recent = list(recent_recipes)
|
||||
self.set_cache('Recent', self.Recent)
|
||||
return self.Recent
|
||||
|
||||
def add_food_children(self, id):
|
||||
try:
|
||||
food = Food.objects.get(id=id)
|
||||
nodes = food.get_ancestors()
|
||||
except Food.DoesNotExist:
|
||||
return self.get_facets()
|
||||
foods = self._food_queryset(food.get_children(), food)
|
||||
deep_search = self.Foods
|
||||
for node in nodes:
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
|
||||
deep_search = deep_search[index]['children']
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == food.id), None)
|
||||
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(foods)]
|
||||
self.set_cache('Foods', self.Foods)
|
||||
return self.get_facets()
|
||||
|
||||
def add_keyword_children(self, id):
|
||||
try:
|
||||
keyword = Keyword.objects.get(id=id)
|
||||
nodes = keyword.get_ancestors()
|
||||
except Keyword.DoesNotExist:
|
||||
return self.get_facets()
|
||||
keywords = self._keyword_queryset(keyword.get_children(), keyword)
|
||||
deep_search = self.Keywords
|
||||
for node in nodes:
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == node.id), None)
|
||||
deep_search = deep_search[index]['children']
|
||||
index = next((i for i, x in enumerate(deep_search) if x["id"] == keyword.id), None)
|
||||
deep_search[index]['children'] = [{**x, 'children': None} if x['numchild'] > 0 else x for x in list(keywords)]
|
||||
self.set_cache('Keywords', self.Keywords)
|
||||
return self.get_facets()
|
||||
|
||||
def _recipe_count_queryset(self, field, depth=1, steplen=4):
|
||||
return Recipe.objects.filter(**{f'{field}__path__startswith': OuterRef('path'), f'{field}__depth__gte': depth}, id__in=self._recipe_list, space=self._request.space
|
||||
).annotate(count=Coalesce(Func('pk', function='Count'), 0)).values('count')
|
||||
|
||||
def _keyword_queryset(self, queryset, keyword=None):
|
||||
depth = getattr(keyword, 'depth', 0) + 1
|
||||
steplen = depth * Keyword.steplen
|
||||
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('keywords', depth, steplen)), 0)
|
||||
).filter(depth=depth, count__gt=0
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
def _food_queryset(self, queryset, food=None):
|
||||
depth = getattr(food, 'depth', 0) + 1
|
||||
steplen = depth * Food.steplen
|
||||
|
||||
if not self._request.space.demo and self._request.space.show_facet_count:
|
||||
return queryset.annotate(count=Coalesce(Subquery(self._recipe_count_queryset('steps__ingredients__food', depth, steplen)), 0)
|
||||
).filter(depth__lte=depth, count__gt=0
|
||||
).values('id', 'name', 'count', 'numchild').order_by(Lower('name').asc())[:200]
|
||||
else:
|
||||
return queryset.filter(depth__lte=depth).values('id', 'name', 'numchild').order_by(Lower('name').asc())
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import random
|
||||
import re
|
||||
import traceback
|
||||
from html import unescape
|
||||
from unicodedata import decomposition
|
||||
|
||||
from django.utils.dateparse import parse_duration
|
||||
from django.utils.translation import gettext as _
|
||||
@@ -10,17 +9,37 @@ from isodate.isoerror import ISO8601Error
|
||||
from pytube import YouTube
|
||||
from recipe_scrapers._utils import get_host_name, get_minutes
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.automation_helper import AutomationEngine
|
||||
from cookbook.helper.ingredient_parser import IngredientParser
|
||||
from cookbook.models import Keyword, Automation
|
||||
|
||||
|
||||
# from recipe_scrapers._utils import get_minutes ## temporary until/unless upstream incorporates get_minutes() PR
|
||||
from cookbook.models import Automation, Keyword, PropertyType
|
||||
|
||||
|
||||
def get_from_scraper(scrape, request):
|
||||
# converting the scrape_me object to the existing json format based on ld+json
|
||||
recipe_json = {}
|
||||
|
||||
recipe_json = {
|
||||
'steps': [],
|
||||
'internal': True
|
||||
}
|
||||
keywords = []
|
||||
|
||||
# assign source URL
|
||||
try:
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
except Exception:
|
||||
recipe_json['source_url'] = ''
|
||||
|
||||
automation_engine = AutomationEngine(request, source=recipe_json.get('source_url'))
|
||||
# assign recipe name
|
||||
try:
|
||||
recipe_json['name'] = parse_name(scrape.title()[:128] or None)
|
||||
except Exception:
|
||||
@@ -31,6 +50,13 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
recipe_json['name'] = ''
|
||||
|
||||
if isinstance(recipe_json['name'], list) and len(recipe_json['name']) > 0:
|
||||
recipe_json['name'] = recipe_json['name'][0]
|
||||
|
||||
recipe_json['name'] = automation_engine.apply_regex_replace_automation(recipe_json['name'], Automation.NAME_REPLACE)
|
||||
|
||||
# assign recipe description
|
||||
# TODO notify user about limit if reached - >256 description will be truncated
|
||||
try:
|
||||
description = scrape.description() or None
|
||||
except Exception:
|
||||
@@ -41,16 +67,20 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
description = ''
|
||||
|
||||
recipe_json['internal'] = True
|
||||
recipe_json['description'] = parse_description(description)
|
||||
recipe_json['description'] = automation_engine.apply_regex_replace_automation(recipe_json['description'], Automation.DESCRIPTION_REPLACE)
|
||||
|
||||
# assign servings attributes
|
||||
try:
|
||||
servings = scrape.schema.data.get('recipeYield') or 1 # dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
# dont use scrape.yields() as this will always return "x servings" or "x items", should be improved in scrapers directly
|
||||
servings = scrape.schema.data.get('recipeYield') or 1
|
||||
except Exception:
|
||||
servings = 1
|
||||
|
||||
recipe_json['servings'] = parse_servings(servings)
|
||||
recipe_json['servings_text'] = parse_servings_text(servings)
|
||||
|
||||
# assign time attributes
|
||||
try:
|
||||
recipe_json['working_time'] = get_minutes(scrape.prep_time()) or 0
|
||||
except Exception:
|
||||
@@ -75,6 +105,7 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# assign image
|
||||
try:
|
||||
recipe_json['image'] = parse_image(scrape.image()) or None
|
||||
except Exception:
|
||||
@@ -85,7 +116,7 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
recipe_json['image'] = ''
|
||||
|
||||
keywords = []
|
||||
# assign keywords
|
||||
try:
|
||||
if scrape.schema.data.get("keywords"):
|
||||
keywords += listify_keywords(scrape.schema.data.get("keywords"))
|
||||
@@ -110,54 +141,32 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
except Exception:
|
||||
recipe_json['source_url'] = ''
|
||||
|
||||
try:
|
||||
if scrape.author():
|
||||
keywords.append(scrape.author())
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request.space)
|
||||
recipe_json['keywords'] = parse_keywords(list(set(map(str.casefold, keywords))), request)
|
||||
except AttributeError:
|
||||
recipe_json['keywords'] = keywords
|
||||
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
|
||||
recipe_json['steps'] = []
|
||||
# assign steps
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], 'show_ingredients_table': request.user.userpreference.show_step_ingredients, })
|
||||
except Exception:
|
||||
pass
|
||||
if len(recipe_json['steps']) == 0:
|
||||
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
|
||||
|
||||
parsed_description = parse_description(description)
|
||||
# TODO notify user about limit if reached
|
||||
# limits exist to limit the attack surface for dos style attacks
|
||||
automations = Automation.objects.filter(type=Automation.DESCRIPTION_REPLACE, space=request.space, disabled=False).only('param_1', 'param_2', 'param_3').all().order_by('order')[:512]
|
||||
for a in automations:
|
||||
if re.match(a.param_1, (recipe_json['source_url'])[:512]):
|
||||
parsed_description = re.sub(a.param_2, a.param_3, parsed_description, count=1)
|
||||
|
||||
if len(parsed_description) > 256: # split at 256 as long descriptions don't look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f'*{parsed_description}* \n\n' + recipe_json['steps'][0]['instruction']
|
||||
if len(recipe_json['description']) > 256: # split at 256 as long descriptions don't look good on recipe cards
|
||||
recipe_json['steps'][0]['instruction'] = f"*{recipe_json['description']}* \n\n" + recipe_json['steps'][0]['instruction']
|
||||
else:
|
||||
recipe_json['description'] = parsed_description[:512]
|
||||
recipe_json['description'] = recipe_json['description'][:512]
|
||||
|
||||
try:
|
||||
for x in scrape.ingredients():
|
||||
@@ -191,16 +200,44 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if recipe_json['source_url']:
|
||||
automations = Automation.objects.filter(type=Automation.INSTRUCTION_REPLACE, space=request.space, disabled=False).only('param_1', 'param_2', 'param_3').order_by('order').all()[:512]
|
||||
for a in automations:
|
||||
if re.match(a.param_1, (recipe_json['source_url'])[:512]):
|
||||
for s in recipe_json['steps']:
|
||||
s['instruction'] = re.sub(a.param_2, a.param_3, s['instruction'])
|
||||
try:
|
||||
recipe_json['properties'] = get_recipe_properties(request.space, scrape.schema.nutrients())
|
||||
print(recipe_json['properties'])
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
for s in recipe_json['steps']:
|
||||
s['instruction'] = automation_engine.apply_regex_replace_automation(s['instruction'], Automation.INSTRUCTION_REPLACE)
|
||||
# re.sub(a.param_2, a.param_3, s['instruction'])
|
||||
|
||||
return recipe_json
|
||||
|
||||
|
||||
def get_recipe_properties(space, property_data):
|
||||
# {'servingSize': '1', 'calories': '302 kcal', 'proteinContent': '7,66g', 'fatContent': '11,56g', 'carbohydrateContent': '41,33g'}
|
||||
properties = {
|
||||
"property-calories": "calories",
|
||||
"property-carbohydrates": "carbohydrateContent",
|
||||
"property-proteins": "proteinContent",
|
||||
"property-fats": "fatContent",
|
||||
}
|
||||
recipe_properties = []
|
||||
for pt in PropertyType.objects.filter(space=space, open_data_slug__in=list(properties.keys())).all():
|
||||
for p in list(properties.keys()):
|
||||
if pt.open_data_slug == p:
|
||||
if properties[p] in property_data:
|
||||
recipe_properties.append({
|
||||
'property_type': {
|
||||
'id': pt.id,
|
||||
'name': pt.name,
|
||||
},
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / float(property_data['servingSize']),
|
||||
})
|
||||
|
||||
return recipe_properties
|
||||
|
||||
|
||||
def get_from_youtube_scraper(url, request):
|
||||
"""A YouTube Information Scraper."""
|
||||
kw, created = Keyword.objects.get_or_create(name='YouTube', space=request.space)
|
||||
@@ -222,11 +259,14 @@ def get_from_youtube_scraper(url, request):
|
||||
]
|
||||
}
|
||||
|
||||
# TODO add automation here
|
||||
try:
|
||||
automation_engine = AutomationEngine(request, source=url)
|
||||
video = YouTube(url=url)
|
||||
default_recipe_json['name'] = video.title
|
||||
default_recipe_json['name'] = automation_engine.apply_regex_replace_automation(video.title, Automation.NAME_REPLACE)
|
||||
default_recipe_json['image'] = video.thumbnail_url
|
||||
default_recipe_json['steps'][0]['instruction'] = video.description
|
||||
default_recipe_json['steps'][0]['instruction'] = automation_engine.apply_regex_replace_automation(video.description, Automation.INSTRUCTION_REPLACE)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -234,7 +274,7 @@ def get_from_youtube_scraper(url, request):
|
||||
|
||||
|
||||
def parse_name(name):
|
||||
if type(name) == list:
|
||||
if isinstance(name, list):
|
||||
try:
|
||||
name = name[0]
|
||||
except Exception:
|
||||
@@ -278,16 +318,16 @@ def parse_instructions(instructions):
|
||||
"""
|
||||
instruction_list = []
|
||||
|
||||
if type(instructions) == list:
|
||||
if isinstance(instructions, list):
|
||||
for i in instructions:
|
||||
if type(i) == str:
|
||||
if isinstance(i, str):
|
||||
instruction_list.append(clean_instruction_string(i))
|
||||
else:
|
||||
if 'text' in i:
|
||||
instruction_list.append(clean_instruction_string(i['text']))
|
||||
elif 'itemListElement' in i:
|
||||
for ile in i['itemListElement']:
|
||||
if type(ile) == str:
|
||||
if isinstance(ile, str):
|
||||
instruction_list.append(clean_instruction_string(ile))
|
||||
elif 'text' in ile:
|
||||
instruction_list.append(clean_instruction_string(ile['text']))
|
||||
@@ -303,13 +343,13 @@ def parse_image(image):
|
||||
# check if list of images is returned, take first if so
|
||||
if not image:
|
||||
return None
|
||||
if type(image) == list:
|
||||
if isinstance(image, list):
|
||||
for pic in image:
|
||||
if (type(pic) == str) and (pic[:4] == 'http'):
|
||||
if (isinstance(pic, str)) and (pic[:4] == 'http'):
|
||||
image = pic
|
||||
elif 'url' in pic:
|
||||
image = pic['url']
|
||||
elif type(image) == dict:
|
||||
elif isinstance(image, dict):
|
||||
if 'url' in image:
|
||||
image = image['url']
|
||||
|
||||
@@ -320,12 +360,12 @@ def parse_image(image):
|
||||
|
||||
|
||||
def parse_servings(servings):
|
||||
if type(servings) == str:
|
||||
if isinstance(servings, str):
|
||||
try:
|
||||
servings = int(re.search(r'\d+', servings).group())
|
||||
except AttributeError:
|
||||
servings = 1
|
||||
elif type(servings) == list:
|
||||
elif isinstance(servings, list):
|
||||
try:
|
||||
servings = int(re.findall(r'\b\d+\b', servings[0])[0])
|
||||
except KeyError:
|
||||
@@ -334,12 +374,12 @@ def parse_servings(servings):
|
||||
|
||||
|
||||
def parse_servings_text(servings):
|
||||
if type(servings) == str:
|
||||
if isinstance(servings, str):
|
||||
try:
|
||||
servings = re.sub("\d+", '', servings).strip()
|
||||
servings = re.sub("\\d+", '', servings).strip()
|
||||
except Exception:
|
||||
servings = ''
|
||||
if type(servings) == list:
|
||||
if isinstance(servings, list):
|
||||
try:
|
||||
servings = parse_servings_text(servings[1])
|
||||
except Exception:
|
||||
@@ -356,7 +396,7 @@ def parse_time(recipe_time):
|
||||
recipe_time = round(iso_parse_duration(recipe_time).seconds / 60)
|
||||
except ISO8601Error:
|
||||
try:
|
||||
if (type(recipe_time) == list and len(recipe_time) > 0):
|
||||
if (isinstance(recipe_time, list) and len(recipe_time) > 0):
|
||||
recipe_time = recipe_time[0]
|
||||
recipe_time = round(parse_duration(recipe_time).seconds / 60)
|
||||
except AttributeError:
|
||||
@@ -365,13 +405,18 @@ def parse_time(recipe_time):
|
||||
return recipe_time
|
||||
|
||||
|
||||
def parse_keywords(keyword_json, space):
|
||||
def parse_keywords(keyword_json, request):
|
||||
keywords = []
|
||||
automation_engine = AutomationEngine(request)
|
||||
|
||||
# keywords as list
|
||||
for kw in keyword_json:
|
||||
kw = normalize_string(kw)
|
||||
# if alias exists use that instead
|
||||
|
||||
if len(kw) != 0:
|
||||
if k := Keyword.objects.filter(name=kw, space=space).first():
|
||||
automation_engine.apply_keyword_automation(kw)
|
||||
if k := Keyword.objects.filter(name=kw, space=request.space).first():
|
||||
keywords.append({'label': str(k), 'name': k.name, 'id': k.id})
|
||||
else:
|
||||
keywords.append({'label': kw, 'name': kw})
|
||||
@@ -382,15 +427,15 @@ def parse_keywords(keyword_json, space):
|
||||
def listify_keywords(keyword_list):
|
||||
# keywords as string
|
||||
try:
|
||||
if type(keyword_list[0]) == dict:
|
||||
if isinstance(keyword_list[0], dict):
|
||||
return keyword_list
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
if type(keyword_list) == str:
|
||||
if isinstance(keyword_list, str):
|
||||
keyword_list = keyword_list.split(',')
|
||||
|
||||
# keywords as string in list
|
||||
if (type(keyword_list) == list and len(keyword_list) == 1 and ',' in keyword_list[0]):
|
||||
if (isinstance(keyword_list, list) and len(keyword_list) == 1 and ',' in keyword_list[0]):
|
||||
keyword_list = keyword_list[0].split(',')
|
||||
return [x.strip() for x in keyword_list]
|
||||
|
||||
@@ -444,13 +489,13 @@ def get_images_from_soup(soup, url):
|
||||
|
||||
|
||||
def clean_dict(input_dict, key):
|
||||
if type(input_dict) == dict:
|
||||
if isinstance(input_dict, dict):
|
||||
for x in list(input_dict):
|
||||
if x == key:
|
||||
del input_dict[x]
|
||||
elif type(input_dict[x]) == dict:
|
||||
elif isinstance(input_dict[x], dict):
|
||||
input_dict[x] = clean_dict(input_dict[x], key)
|
||||
elif type(input_dict[x]) == list:
|
||||
elif isinstance(input_dict[x], list):
|
||||
temp_list = []
|
||||
for e in input_dict[x]:
|
||||
temp_list.append(clean_dict(e, key))
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from django.urls import reverse
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from cookbook.views import views
|
||||
@@ -50,7 +48,6 @@ class ScopeMiddleware:
|
||||
return views.no_groups(request)
|
||||
|
||||
request.space = user_space.space
|
||||
# with scopes_disabled():
|
||||
with scope(space=request.space):
|
||||
return self.get_response(request)
|
||||
else:
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.db.models import F, OuterRef, Q, Subquery, Value
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from cookbook.helper.HelperFunctions import Round, str2bool
|
||||
from cookbook.models import (Ingredient, MealPlan, Recipe, ShoppingListEntry, ShoppingListRecipe,
|
||||
SupermarketCategoryRelation)
|
||||
from recipes import settings
|
||||
|
||||
|
||||
def shopping_helper(qs, request):
|
||||
@@ -47,7 +44,7 @@ class RecipeShoppingEditor():
|
||||
self.mealplan = self._kwargs.get('mealplan', None)
|
||||
if type(self.mealplan) in [int, float]:
|
||||
self.mealplan = MealPlan.objects.filter(id=self.mealplan, space=self.space)
|
||||
if type(self.mealplan) == dict:
|
||||
if isinstance(self.mealplan, dict):
|
||||
self.mealplan = MealPlan.objects.filter(id=self.mealplan['id'], space=self.space).first()
|
||||
self.id = self._kwargs.get('id', None)
|
||||
|
||||
@@ -69,11 +66,12 @@ class RecipeShoppingEditor():
|
||||
|
||||
@property
|
||||
def _recipe_servings(self):
|
||||
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings', None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
|
||||
return getattr(self.recipe, 'servings', None) or getattr(getattr(self.mealplan, 'recipe', None), 'servings',
|
||||
None) or getattr(getattr(self._shopping_list_recipe, 'recipe', None), 'servings', None)
|
||||
|
||||
@property
|
||||
def _servings_factor(self):
|
||||
return Decimal(self.servings)/Decimal(self._recipe_servings)
|
||||
return Decimal(self.servings) / Decimal(self._recipe_servings)
|
||||
|
||||
@property
|
||||
def _shared_users(self):
|
||||
@@ -90,9 +88,10 @@ class RecipeShoppingEditor():
|
||||
|
||||
def get_recipe_ingredients(self, id, exclude_onhand=False):
|
||||
if exclude_onhand:
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(food__onhand_users__id__in=[x.id for x in self._shared_users])
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space).exclude(
|
||||
food__onhand_users__id__in=[x.id for x in self._shared_users])
|
||||
else:
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
|
||||
return Ingredient.objects.filter(step__recipe__id=id, food__ignore_shopping=False, space=self.space)
|
||||
|
||||
@property
|
||||
def _include_related(self):
|
||||
@@ -109,7 +108,7 @@ class RecipeShoppingEditor():
|
||||
self.servings = float(servings)
|
||||
|
||||
if mealplan := kwargs.get('mealplan', None):
|
||||
if type(mealplan) == dict:
|
||||
if isinstance(mealplan, dict):
|
||||
self.mealplan = MealPlan.objects.filter(id=mealplan['id'], space=self.space).first()
|
||||
else:
|
||||
self.mealplan = mealplan
|
||||
@@ -170,14 +169,14 @@ class RecipeShoppingEditor():
|
||||
try:
|
||||
self._shopping_list_recipe.delete()
|
||||
return True
|
||||
except:
|
||||
except BaseException:
|
||||
return False
|
||||
|
||||
def _add_ingredients(self, ingredients=None):
|
||||
if not ingredients:
|
||||
return
|
||||
elif type(ingredients) == list:
|
||||
ingredients = Ingredient.objects.filter(id__in=ingredients)
|
||||
elif isinstance(ingredients, list):
|
||||
ingredients = Ingredient.objects.filter(id__in=ingredients, food__ignore_shopping=False)
|
||||
existing = self._shopping_list_recipe.entries.filter(ingredient__in=ingredients).values_list('ingredient__pk', flat=True)
|
||||
add_ingredients = ingredients.exclude(id__in=existing)
|
||||
|
||||
@@ -199,120 +198,3 @@ class RecipeShoppingEditor():
|
||||
to_delete = self._shopping_list_recipe.entries.exclude(ingredient__in=ingredients)
|
||||
ShoppingListEntry.objects.filter(id__in=to_delete).delete()
|
||||
self._shopping_list_recipe = self.get_shopping_list_recipe(self.id, self.created_by, self.space)
|
||||
|
||||
|
||||
# # TODO refactor as class
|
||||
# def list_from_recipe(list_recipe=None, recipe=None, mealplan=None, servings=None, ingredients=None, created_by=None, space=None, append=False):
|
||||
# """
|
||||
# Creates ShoppingListRecipe and associated ShoppingListEntrys from a recipe or a meal plan with a recipe
|
||||
# :param list_recipe: Modify an existing ShoppingListRecipe
|
||||
# :param recipe: Recipe to use as list of ingredients. One of [recipe, mealplan] are required
|
||||
# :param mealplan: alternatively use a mealplan recipe as source of ingredients
|
||||
# :param servings: Optional: Number of servings to use to scale shoppinglist. If servings = 0 an existing recipe list will be deleted
|
||||
# :param ingredients: Ingredients, list of ingredient IDs to include on the shopping list. When not provided all ingredients will be used
|
||||
# :param append: If False will remove any entries not included with ingredients, when True will append ingredients to the shopping list
|
||||
# """
|
||||
# r = recipe or getattr(mealplan, 'recipe', None) or getattr(list_recipe, 'recipe', None)
|
||||
# if not r:
|
||||
# raise ValueError(_("You must supply a recipe or mealplan"))
|
||||
|
||||
# created_by = created_by or getattr(ShoppingListEntry.objects.filter(list_recipe=list_recipe).first(), 'created_by', None)
|
||||
# if not created_by:
|
||||
# raise ValueError(_("You must supply a created_by"))
|
||||
|
||||
# try:
|
||||
# servings = float(servings)
|
||||
# except (ValueError, TypeError):
|
||||
# servings = getattr(mealplan, 'servings', 1.0)
|
||||
|
||||
# servings_factor = servings / r.servings
|
||||
|
||||
# shared_users = list(created_by.get_shopping_share())
|
||||
# shared_users.append(created_by)
|
||||
# if list_recipe:
|
||||
# created = False
|
||||
# else:
|
||||
# list_recipe = ShoppingListRecipe.objects.create(recipe=r, mealplan=mealplan, servings=servings)
|
||||
# created = True
|
||||
|
||||
# related_step_ing = []
|
||||
# if servings == 0 and not created:
|
||||
# list_recipe.delete()
|
||||
# return []
|
||||
# elif ingredients:
|
||||
# ingredients = Ingredient.objects.filter(pk__in=ingredients, space=space)
|
||||
# else:
|
||||
# ingredients = Ingredient.objects.filter(step__recipe=r, food__ignore_shopping=False, space=space)
|
||||
|
||||
# if exclude_onhand := created_by.userpreference.mealplan_autoexclude_onhand:
|
||||
# ingredients = ingredients.exclude(food__onhand_users__id__in=[x.id for x in shared_users])
|
||||
|
||||
# if related := created_by.userpreference.mealplan_autoinclude_related:
|
||||
# # TODO: add levels of related recipes (related recipes of related recipes) to use when auto-adding mealplans
|
||||
# related_recipes = r.get_related_recipes()
|
||||
|
||||
# for x in related_recipes:
|
||||
# # related recipe is a Step serving size is driven by recipe serving size
|
||||
# # TODO once/if Steps can have a serving size this needs to be refactored
|
||||
# if exclude_onhand:
|
||||
# # if steps are used more than once in a recipe or subrecipe - I don' think this results in the desired behavior
|
||||
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users]).values_list('id', flat=True)
|
||||
# else:
|
||||
# related_step_ing += Ingredient.objects.filter(step__recipe=x, space=space).values_list('id', flat=True)
|
||||
|
||||
# x_ing = []
|
||||
# if ingredients.filter(food__recipe=x).exists():
|
||||
# for ing in ingredients.filter(food__recipe=x):
|
||||
# if exclude_onhand:
|
||||
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__onhand_users__id__in=[x.id for x in shared_users])
|
||||
# else:
|
||||
# x_ing = Ingredient.objects.filter(step__recipe=x, food__ignore_shopping=False, space=space).exclude(food__ignore_shopping=True)
|
||||
# for i in [x for x in x_ing]:
|
||||
# ShoppingListEntry.objects.create(
|
||||
# list_recipe=list_recipe,
|
||||
# food=i.food,
|
||||
# unit=i.unit,
|
||||
# ingredient=i,
|
||||
# amount=i.amount * Decimal(servings_factor),
|
||||
# created_by=created_by,
|
||||
# space=space,
|
||||
# )
|
||||
# # dont' add food to the shopping list that are actually recipes that will be added as ingredients
|
||||
# ingredients = ingredients.exclude(food__recipe=x)
|
||||
|
||||
# add_ingredients = list(ingredients.values_list('id', flat=True)) + related_step_ing
|
||||
# if not append:
|
||||
# existing_list = ShoppingListEntry.objects.filter(list_recipe=list_recipe)
|
||||
# # delete shopping list entries not included in ingredients
|
||||
# existing_list.exclude(ingredient__in=ingredients).delete()
|
||||
# # add shopping list entries that did not previously exist
|
||||
# add_ingredients = set(add_ingredients) - set(existing_list.values_list('ingredient__id', flat=True))
|
||||
# add_ingredients = Ingredient.objects.filter(id__in=add_ingredients, space=space)
|
||||
|
||||
# # if servings have changed, update the ShoppingListRecipe and existing Entries
|
||||
# if servings <= 0:
|
||||
# servings = 1
|
||||
|
||||
# if not created and list_recipe.servings != servings:
|
||||
# update_ingredients = set(ingredients.values_list('id', flat=True)) - set(add_ingredients.values_list('id', flat=True))
|
||||
# list_recipe.servings = servings
|
||||
# list_recipe.save()
|
||||
# for sle in ShoppingListEntry.objects.filter(list_recipe=list_recipe, ingredient__id__in=update_ingredients):
|
||||
# sle.amount = sle.ingredient.amount * Decimal(servings_factor)
|
||||
# sle.save()
|
||||
|
||||
# # add any missing Entries
|
||||
# for i in [x for x in add_ingredients if x.food]:
|
||||
|
||||
# ShoppingListEntry.objects.create(
|
||||
# list_recipe=list_recipe,
|
||||
# food=i.food,
|
||||
# unit=i.unit,
|
||||
# ingredient=i,
|
||||
# amount=i.amount * Decimal(servings_factor),
|
||||
# created_by=created_by,
|
||||
# space=space,
|
||||
# )
|
||||
|
||||
# # return all shopping list items
|
||||
# return list_recipe
|
||||
@@ -2,7 +2,6 @@ from gettext import gettext as _
|
||||
|
||||
import bleach
|
||||
import markdown as md
|
||||
from bleach_allowlist import markdown_attrs, markdown_tags
|
||||
from jinja2 import Template, TemplateSyntaxError, UndefinedError
|
||||
from markdown.extensions.tables import TableExtension
|
||||
|
||||
@@ -53,9 +52,17 @@ class IngredientObject(object):
|
||||
def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
instructions = step.instruction
|
||||
|
||||
tags = markdown_tags + [
|
||||
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead', 'img'
|
||||
]
|
||||
tags = {
|
||||
"h1", "h2", "h3", "h4", "h5", "h6",
|
||||
"b", "i", "strong", "em", "tt",
|
||||
"p", "br",
|
||||
"span", "div", "blockquote", "code", "pre", "hr",
|
||||
"ul", "ol", "li", "dd", "dt",
|
||||
"img",
|
||||
"a",
|
||||
"sub", "sup",
|
||||
'pre', 'table', 'td', 'tr', 'th', 'tbody', 'style', 'thead'
|
||||
}
|
||||
parsed_md = md.markdown(
|
||||
instructions,
|
||||
extensions=[
|
||||
@@ -63,7 +70,11 @@ def render_instructions(step): # TODO deduplicate markdown cleanup code
|
||||
UrlizeExtension(), MarkdownFormatExtension()
|
||||
]
|
||||
)
|
||||
markdown_attrs['*'] = markdown_attrs['*'] + ['class', 'width', 'height']
|
||||
markdown_attrs = {
|
||||
"*": ["id", "class", 'width', 'height'],
|
||||
"img": ["src", "alt", "title"],
|
||||
"a": ["href", "alt", "title"],
|
||||
}
|
||||
|
||||
instructions = bleach.clean(parsed_md, tags, markdown_attrs)
|
||||
|
||||
|
||||
141
cookbook/helper/unit_conversion_helper.py
Normal file
141
cookbook/helper/unit_conversion_helper.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from django.core.cache import caches
|
||||
from decimal import Decimal
|
||||
|
||||
from cookbook.helper.cache_helper import CacheHelper
|
||||
from cookbook.models import Ingredient, Unit
|
||||
|
||||
CONVERSION_TABLE = {
|
||||
'weight': {
|
||||
'g': 1000,
|
||||
'kg': 1,
|
||||
'ounce': 35.274,
|
||||
'pound': 2.20462
|
||||
},
|
||||
'volume': {
|
||||
'ml': 1000,
|
||||
'l': 1,
|
||||
'fluid_ounce': 33.814,
|
||||
'pint': 2.11338,
|
||||
'quart': 1.05669,
|
||||
'gallon': 0.264172,
|
||||
'tbsp': 67.628,
|
||||
'tsp': 202.884,
|
||||
'imperial_fluid_ounce': 35.1951,
|
||||
'imperial_pint': 1.75975,
|
||||
'imperial_quart': 0.879877,
|
||||
'imperial_gallon': 0.219969,
|
||||
'imperial_tbsp': 56.3121,
|
||||
'imperial_tsp': 168.936,
|
||||
},
|
||||
}
|
||||
|
||||
BASE_UNITS_WEIGHT = list(CONVERSION_TABLE['weight'].keys())
|
||||
BASE_UNITS_VOLUME = list(CONVERSION_TABLE['volume'].keys())
|
||||
|
||||
|
||||
class ConversionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnitConversionHelper:
|
||||
space = None
|
||||
|
||||
def __init__(self, space):
|
||||
"""
|
||||
Initializes unit conversion helper
|
||||
:param space: space to perform conversions on
|
||||
"""
|
||||
self.space = space
|
||||
|
||||
@staticmethod
|
||||
def convert_from_to(from_unit, to_unit, amount):
|
||||
"""
|
||||
Convert from one base unit to another. Throws ConversionException if trying to convert between different systems (weight/volume) or if units are not supported.
|
||||
:param from_unit: str unit to convert from
|
||||
:param to_unit: str unit to convert to
|
||||
:param amount: amount to convert
|
||||
:return: Decimal converted amount
|
||||
"""
|
||||
system = None
|
||||
if from_unit in BASE_UNITS_WEIGHT and to_unit in BASE_UNITS_WEIGHT:
|
||||
system = 'weight'
|
||||
if from_unit in BASE_UNITS_VOLUME and to_unit in BASE_UNITS_VOLUME:
|
||||
system = 'volume'
|
||||
|
||||
if not system:
|
||||
raise ConversionException('Trying to convert units not existing or not in one unit system (weight/volume)')
|
||||
|
||||
return Decimal(amount / Decimal(CONVERSION_TABLE[system][from_unit] / CONVERSION_TABLE[system][to_unit]))
|
||||
|
||||
def base_conversions(self, ingredient_list):
|
||||
"""
|
||||
Calculates all possible base unit conversions for each ingredient give.
|
||||
Converts to all common base units IF they exist in the unit database of the space.
|
||||
For useful results all ingredients passed should be of the same food, otherwise filtering afterwards might be required.
|
||||
:param ingredient_list: list of ingredients to convert
|
||||
:return: ingredient list with appended conversions
|
||||
"""
|
||||
base_conversion_ingredient_list = ingredient_list.copy()
|
||||
for i in ingredient_list:
|
||||
try:
|
||||
conversion_unit = i.unit.name
|
||||
if i.unit.base_unit:
|
||||
conversion_unit = i.unit.base_unit
|
||||
|
||||
# TODO allow setting which units to convert to? possibly only once conversions become visible
|
||||
units = caches['default'].get(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, None)
|
||||
if not units:
|
||||
units = Unit.objects.filter(space=self.space, base_unit__in=(BASE_UNITS_VOLUME + BASE_UNITS_WEIGHT)).all()
|
||||
caches['default'].set(CacheHelper(self.space).BASE_UNITS_CACHE_KEY, units, 60 * 60) # cache is cleared on unit save signal so long duration is fine
|
||||
|
||||
for u in units:
|
||||
try:
|
||||
ingredient = Ingredient(amount=self.convert_from_to(conversion_unit, u.base_unit, i.amount), unit=u, food=ingredient_list[0].food, )
|
||||
if not any((x.unit.name == ingredient.unit.name or x.unit.base_unit == ingredient.unit.name) for x in base_conversion_ingredient_list):
|
||||
base_conversion_ingredient_list.append(ingredient)
|
||||
except ConversionException:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return base_conversion_ingredient_list
|
||||
|
||||
def get_conversions(self, ingredient):
|
||||
"""
|
||||
Converts an ingredient to all possible conversions based on the custom unit conversion database.
|
||||
After that passes conversion to UnitConversionHelper.base_conversions() to get all base conversions possible.
|
||||
:param ingredient: Ingredient object
|
||||
:return: list of ingredients with all possible custom and base conversions
|
||||
"""
|
||||
conversions = [ingredient]
|
||||
if ingredient.unit:
|
||||
for c in ingredient.unit.unit_conversion_base_relation.all():
|
||||
if c.space == self.space:
|
||||
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
|
||||
if r and r not in conversions:
|
||||
conversions.append(r)
|
||||
for c in ingredient.unit.unit_conversion_converted_relation.all():
|
||||
if c.space == self.space:
|
||||
r = self._uc_convert(c, ingredient.amount, ingredient.unit, ingredient.food)
|
||||
if r and r not in conversions:
|
||||
conversions.append(r)
|
||||
|
||||
conversions = self.base_conversions(conversions)
|
||||
|
||||
return conversions
|
||||
|
||||
def _uc_convert(self, uc, amount, unit, food):
|
||||
"""
|
||||
Helper to calculate values for custom unit conversions.
|
||||
Converts given base values using the passed UnitConversion object into a converted Ingredient
|
||||
:param uc: UnitConversion object
|
||||
:param amount: base amount
|
||||
:param unit: base unit
|
||||
:param food: base food
|
||||
:return: converted ingredient object from base amount/unit/food
|
||||
"""
|
||||
if uc.food is None or uc.food == food:
|
||||
if unit == uc.base_unit:
|
||||
return Ingredient(amount=amount * (uc.converted_amount / uc.base_amount), unit=uc.converted_unit, food=food, space=self.space)
|
||||
else:
|
||||
return Ingredient(amount=amount * (uc.base_amount / uc.converted_amount), unit=uc.base_unit, food=food, space=self.space)
|
||||
Reference in New Issue
Block a user